From 68940b9b045fcd675276234ac734c4befc9f67dd Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 12 Apr 2018 16:14:03 +0300 Subject: [PATCH 0001/1511] [3.1] fix resolve cancellation (#2910) (#2931) * fix resolve cancellation * fixes based on review * changes based on review * add changes file * rename (cherry picked from commit a7bbaad) Co-authored-by: Alexander Mohr --- CHANGES/2910.bugfix | 1 + aiohttp/connector.py | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 CHANGES/2910.bugfix diff --git a/CHANGES/2910.bugfix b/CHANGES/2910.bugfix new file mode 100644 index 00000000000..e10a8534d06 --- /dev/null +++ b/CHANGES/2910.bugfix @@ -0,0 +1 @@ +fix cancellation broadcast during DNS resolve diff --git a/aiohttp/connector.py b/aiohttp/connector.py index f506230ad42..556d91fdce0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -700,10 +700,7 @@ async def _resolve_host(self, host, port, traces=None): await trace.send_dns_resolvehost_start(host) addrs = await \ - asyncio.shield(self._resolver.resolve(host, - port, - family=self._family), - loop=self._loop) + self._resolver.resolve(host, port, family=self._family) if traces: for trace in traces: await trace.send_dns_resolvehost_end(host) @@ -813,10 +810,13 @@ async def _create_direct_connection(self, req, fingerprint = self._get_fingerprint(req) try: - hosts = await self._resolve_host( + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await asyncio.shield(self._resolve_host( req.url.raw_host, req.port, - traces=traces) + traces=traces), loop=self._loop) except OSError as exc: # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself From a901b54a5beaec472624f089eb1085a1073a1cf4 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:05:49 +0300 Subject: [PATCH 0002/1511] Bump to 3.1.3 --- CHANGES.rst | 6 ++++++ CHANGES/2910.bugfix | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/2910.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index 443b6a2671f..85fb7dff3d5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -14,6 +14,12 @@ Changelog .. towncrier release notes start +3.1.3 (2018-04-12) +================== + +- Fix cancellation broadcast during DNS resolve (#2910) + + 3.1.2 (2018-04-05) ================== diff --git a/CHANGES/2910.bugfix b/CHANGES/2910.bugfix deleted file mode 100644 index e10a8534d06..00000000000 --- a/CHANGES/2910.bugfix +++ /dev/null @@ -1 +0,0 @@ -fix cancellation broadcast during DNS resolve diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 04b35fe26ef..d4087fbd816 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = '3.1.2' +__version__ = '3.1.3' # This relies on each of the submodules having an __all__ variable. From ffd704be3389afe88840da1533b86cfde1d9e066 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:11:35 +0300 Subject: [PATCH 0003/1511] Fix links --- CHANGES.rst | 54 +-- HISTORY.rst | 1210 +++++++++++++++++++++++++-------------------------- 2 files changed, 632 insertions(+), 632 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 85fb7dff3d5..02689ac2a1b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -17,14 +17,14 @@ Changelog 3.1.3 (2018-04-12) ================== -- Fix cancellation broadcast during DNS resolve (#2910) +- Fix cancellation broadcast during DNS resolve (`#2910 `_) 3.1.2 (2018-04-05) ================== -- Make ``LineTooLong`` exception more detailed about actual data size (#2863) -- Call ``on_chunk_sent`` when write_eof takes as a param the last chunk (#2909) +- Make ``LineTooLong`` exception more detailed about actual data size (`#2863 `_) +- Call ``on_chunk_sent`` when write_eof takes as a param the last chunk (`#2909 `_) 3.1.1 (2018-03-27) @@ -32,7 +32,7 @@ Changelog - Support *asynchronous iterators* (and *asynchronous generators* as well) in both client and server API as request / response BODY - payloads. (#2802) + payloads. (`#2802 `_) 3.1.0 (2018-03-21) @@ -77,48 +77,48 @@ Features -------- - Relax JSON content-type checking in the ``ClientResponse.json()`` to allow - "application/xxx+json" instead of strict "application/json". (#2206) -- Bump C HTTP parser to version 2.8 (#2730) + "application/xxx+json" instead of strict "application/json". (`#2206 `_) +- Bump C HTTP parser to version 2.8 (`#2730 `_) - Accept a coroutine as an application factory in ``web.run_app`` and gunicorn - worker. (#2739) -- Implement application cleanup context (``app.cleanup_ctx`` property). (#2747) -- Make ``writer.write_headers`` a coroutine. (#2762) -- Add tracking signals for getting request/response bodies. (#2767) + worker. (`#2739 `_) +- Implement application cleanup context (``app.cleanup_ctx`` property). (`#2747 `_) +- Make ``writer.write_headers`` a coroutine. (`#2762 `_) +- Add tracking signals for getting request/response bodies. (`#2767 `_) - Deprecate ClientResponseError.code in favor of .status to keep similarity - with response classes. (#2781) -- Implement ``app.add_routes()`` method. (#2787) -- Implement ``web.static()`` and ``RouteTableDef.static()`` API. (#2795) + with response classes. (`#2781 `_) +- Implement ``app.add_routes()`` method. (`#2787 `_) +- Implement ``web.static()`` and ``RouteTableDef.static()`` API. (`#2795 `_) - Install a test event loop as default by ``asyncio.set_event_loop()``. The change affects aiohttp test utils but backward compatibility is not broken - for 99.99% of use cases. (#2804) + for 99.99% of use cases. (`#2804 `_) - Refactor ``ClientResponse`` constructor: make logically required constructor - arguments mandatory, drop ``_post_init()`` method. (#2820) -- Use ``app.add_routes()`` in server docs everywhere (#2830) + arguments mandatory, drop ``_post_init()`` method. (`#2820 `_) +- Use ``app.add_routes()`` in server docs everywhere (`#2830 `_) - Websockets refactoring, all websocket writer methods are converted into - coroutines. (#2836) -- Provide ``Content-Range`` header for ``Range`` requests (#2844) + coroutines. (`#2836 `_) +- Provide ``Content-Range`` header for ``Range`` requests (`#2844 `_) Bugfixes -------- -- Fix websocket client return EofStream. (#2784) -- Fix websocket demo. (#2789) +- Fix websocket client return EofStream. (`#2784 `_) +- Fix websocket demo. (`#2789 `_) - Property ``BaseRequest.http_range`` now returns a python-like slice when requesting the tail of the range. It's now indicated by a negative value in - ``range.start`` rather then in ``range.stop`` (#2805) + ``range.start`` rather then in ``range.stop`` (`#2805 `_) - Close a connection if an unexpected exception occurs while sending a request - (#2827) -- Fix firing DNS tracing events. (#2841) + (`#2827 `_) +- Fix firing DNS tracing events. (`#2841 `_) Improved Documentation ---------------------- - Change ``ClientResponse.json()`` documentation to reflect that it now - allows "application/xxx+json" content-types (#2206) + allows "application/xxx+json" content-types (`#2206 `_) - Document behavior when cchardet detects encodings that are unknown to Python. - (#2732) -- Add diagrams for tracing request life style. (#2748) + (`#2732 `_) +- Add diagrams for tracing request life style. (`#2748 `_) - Drop removed functionality for passing ``StreamReader`` as data at client - side. (#2793) + side. (`#2793 `_) diff --git a/HISTORY.rst b/HISTORY.rst index 567f5ca02f2..328e1ac1744 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,41 +2,41 @@ ================== - Close a connection if an unexpected exception occurs while sending a request - (#2827) + (`#2827 `_) 3.0.8 (2018-03-12) ================== -- Use ``asyncio.current_task()`` on Python 3.7 (#2825) +- Use ``asyncio.current_task()`` on Python 3.7 (`#2825 `_) 3.0.7 (2018-03-08) ================== -- Fix SSL proxy support by client. (#2810) +- Fix SSL proxy support by client. (`#2810 `_) - Restore a imperative check in ``setup.py`` for python version. The check works in parallel to environment marker. As effect a error about unsupported Python versions is raised even on outdated systems with very old - ``setuptools`` version installed. (#2813) + ``setuptools`` version installed. (`#2813 `_) 3.0.6 (2018-03-05) ================== - Add ``_reuse_address`` and ``_reuse_port`` to - ``web_runner.TCPSite.__slots__``. (#2792) + ``web_runner.TCPSite.__slots__``. (`#2792 `_) 3.0.5 (2018-02-27) ================== - Fix ``InvalidStateError`` on processing a sequence of two - ``RequestHandler.data_received`` calls on web server. (#2773) + ``RequestHandler.data_received`` calls on web server. (`#2773 `_) 3.0.4 (2018-02-26) ================== -- Fix ``IndexError`` in HTTP request handling by server. (#2752) -- Fix MultipartWriter.append* no longer returning part/payload. (#2759) +- Fix ``IndexError`` in HTTP request handling by server. (`#2752 `_) +- Fix MultipartWriter.append* no longer returning part/payload. (`#2759 `_) 3.0.3 (2018-02-25) @@ -67,120 +67,120 @@ Security Fix Features -------- -- Speed up the `PayloadWriter.write` method for large request bodies. (#2126) -- StreamResponse and Response are now MutableMappings. (#2246) +- Speed up the `PayloadWriter.write` method for large request bodies. (`#2126 `_) +- StreamResponse and Response are now MutableMappings. (`#2246 `_) - ClientSession publishes a set of signals to track the HTTP request execution. - (#2313) -- Content-Disposition fast access in ClientResponse (#2455) -- Added support to Flask-style decorators with class-based Views. (#2472) -- Signal handlers (registered callbacks) should be coroutines. (#2480) -- Support ``async with test_client.ws_connect(...)`` (#2525) + (`#2313 `_) +- Content-Disposition fast access in ClientResponse (`#2455 `_) +- Added support to Flask-style decorators with class-based Views. (`#2472 `_) +- Signal handlers (registered callbacks) should be coroutines. (`#2480 `_) +- Support ``async with test_client.ws_connect(...)`` (`#2525 `_) - Introduce *site* and *application runner* as underlying API for `web.run_app` - implementation. (#2530) -- Only quote multipart boundary when necessary and sanitize input (#2544) + implementation. (`#2530 `_) +- Only quote multipart boundary when necessary and sanitize input (`#2544 `_) - Make the `aiohttp.ClientResponse.get_encoding` method public with the - processing of invalid charset while detecting content encoding. (#2549) + processing of invalid charset while detecting content encoding. (`#2549 `_) - Add optional configurable per message compression for - `ClientWebSocketResponse` and `WebSocketResponse`. (#2551) + `ClientWebSocketResponse` and `WebSocketResponse`. (`#2551 `_) - Add hysteresis to `StreamReader` to prevent flipping between paused and - resumed states too often. (#2555) -- Support `.netrc` by `trust_env` (#2581) + resumed states too often. (`#2555 `_) +- Support `.netrc` by `trust_env` (`#2581 `_) - Avoid to create a new resource when adding a route with the same name and - path of the last added resource (#2586) -- `MultipartWriter.boundary` is `str` now. (#2589) + path of the last added resource (`#2586 `_) +- `MultipartWriter.boundary` is `str` now. (`#2589 `_) - Allow a custom port to be used by `TestServer` (and associated pytest - fixtures) (#2613) -- Add param access_log_class to web.run_app function (#2615) -- Add ``ssl`` parameter to client API (#2626) + fixtures) (`#2613 `_) +- Add param access_log_class to web.run_app function (`#2615 `_) +- Add ``ssl`` parameter to client API (`#2626 `_) - Fixes performance issue introduced by #2577. When there are no middlewares - installed by the user, no additional and useless code is executed. (#2629) -- Rename PayloadWriter to StreamWriter (#2654) + installed by the user, no additional and useless code is executed. (`#2629 `_) +- Rename PayloadWriter to StreamWriter (`#2654 `_) - New options *reuse_port*, *reuse_address* are added to `run_app` and - `TCPSite`. (#2679) -- Use custom classes to pass client signals parameters (#2686) -- Use ``attrs`` library for data classes, replace `namedtuple`. (#2690) -- Pytest fixtures renaming, add ``aiohttp_`` prefix (#2578) + `TCPSite`. (`#2679 `_) +- Use custom classes to pass client signals parameters (`#2686 `_) +- Use ``attrs`` library for data classes, replace `namedtuple`. (`#2690 `_) +- Pytest fixtures renaming, add ``aiohttp_`` prefix (`#2578 `_) - Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line - parameters (#2578) + parameters (`#2578 `_) Bugfixes -------- - Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not support HTTP2 yet, the protocol is not upgraded but response is handled - correctly. (#2277) + correctly. (`#2277 `_) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy - connector (#2408) -- Fix connector convert OSError to ClientConnectorError (#2423) -- Fix connection attempts for multiple dns hosts (#2424) -- Fix writing to closed transport by raising `asyncio.CancelledError` (#2499) + connector (`#2408 `_) +- Fix connector convert OSError to ClientConnectorError (`#2423 `_) +- Fix connection attempts for multiple dns hosts (`#2424 `_) +- Fix writing to closed transport by raising `asyncio.CancelledError` (`#2499 `_) - Fix warning in `ClientSession.__del__` by stopping to try to close it. - (#2523) -- Fixed race-condition for iterating addresses from the DNSCache. (#2620) -- Fix default value of `access_log_format` argument in `web.run_app` (#2649) -- Freeze sub-application on adding to parent app (#2656) -- Do percent encoding for `.url_for()` parameters (#2668) + (`#2523 `_) +- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 `_) +- Fix default value of `access_log_format` argument in `web.run_app` (`#2649 `_) +- Freeze sub-application on adding to parent app (`#2656 `_) +- Do percent encoding for `.url_for()` parameters (`#2668 `_) - Correctly process request start time and multiple request/response - headers in access log extra (#2641) + headers in access log extra (`#2641 `_) Improved Documentation ---------------------- - Improve tutorial docs, using `literalinclude` to link to the actual files. - (#2396) -- Small improvement docs: better example for file uploads. (#2401) -- Rename `from_env` to `trust_env` in client reference. (#2451) + (`#2396 `_) +- Small improvement docs: better example for file uploads. (`#2401 `_) +- Rename `from_env` to `trust_env` in client reference. (`#2451 `_) - Fixed mistype in `Proxy Support` section where `trust_env` parameter was used in `session.get("http://python.org", trust_env=True)` method instead of aiohttp.ClientSession constructor as follows: - `aiohttp.ClientSession(trust_env=True)`. (#2688) -- Fix issue with unittest example not compiling in testing docs. (#2717) + `aiohttp.ClientSession(trust_env=True)`. (`#2688 `_) +- Fix issue with unittest example not compiling in testing docs. (`#2717 `_) Deprecations and Removals ------------------------- -- Simplify HTTP pipelining implementation (#2109) -- Drop `StreamReaderPayload` and `DataQueuePayload`. (#2257) -- Drop `md5` and `sha1` finger-prints (#2267) -- Drop WSMessage.tp (#2321) +- Simplify HTTP pipelining implementation (`#2109 `_) +- Drop `StreamReaderPayload` and `DataQueuePayload`. (`#2257 `_) +- Drop `md5` and `sha1` finger-prints (`#2267 `_) +- Drop WSMessage.tp (`#2321 `_) - Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. - (#2343) -- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (#2348) -- Drop `resolve` param from TCPConnector. (#2377) -- Add DeprecationWarning for returning HTTPException (#2415) + (`#2343 `_) +- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (`#2348 `_) +- Drop `resolve` param from TCPConnector. (`#2377 `_) +- Add DeprecationWarning for returning HTTPException (`#2415 `_) - `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are - genuine async functions now. (#2475) + genuine async functions now. (`#2475 `_) - Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal handlers should be coroutines, support for regular functions is dropped. - (#2480) + (`#2480 `_) - `StreamResponse.drain()` is not a part of public API anymore, just use `await StreamResponse.write()`. `StreamResponse.write` is converted to async - function. (#2483) + function. (`#2483 `_) - Drop deprecated `slow_request_timeout` param and `**kwargs`` from - `RequestHandler`. (#2500) -- Drop deprecated `resource.url()`. (#2501) -- Remove `%u` and `%l` format specifiers from access log format. (#2506) -- Drop deprecated `request.GET` property. (#2547) + `RequestHandler`. (`#2500 `_) +- Drop deprecated `resource.url()`. (`#2501 `_) +- Remove `%u` and `%l` format specifiers from access log format. (`#2506 `_) +- Drop deprecated `request.GET` property. (`#2547 `_) - Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, merge `FlowControlStreamReader` functionality into `StreamReader`, drop - `FlowControlStreamReader` name. (#2555) + `FlowControlStreamReader` name. (`#2555 `_) - Do not create a new resource on `router.add_get(..., allow_head=True)` - (#2585) + (`#2585 `_) - Drop access to TCP tuning options from PayloadWriter and Response classes - (#2604) -- Drop deprecated `encoding` parameter from client API (#2606) + (`#2604 `_) +- Drop deprecated `encoding` parameter from client API (`#2606 `_) - Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in - client API (#2626) -- Get rid of the legacy class StreamWriter. (#2651) -- Forbid non-strings in `resource.url_for()` parameters. (#2668) + client API (`#2626 `_) +- Get rid of the legacy class StreamWriter. (`#2651 `_) +- Forbid non-strings in `resource.url_for()` parameters. (`#2668 `_) - Deprecate inheritance from ``ClientSession`` and ``web.Application`` and custom user attributes for ``ClientSession``, ``web.Request`` and - ``web.Application`` (#2691) + ``web.Application`` (`#2691 `_) - Drop `resp = await aiohttp.request(...)` syntax for sake of `async with - aiohttp.request(...) as resp:`. (#2540) + aiohttp.request(...) as resp:`. (`#2540 `_) - Forbid synchronous context managers for `ClientSession` and test - server/client. (#2362) + server/client. (`#2362 `_) Misc @@ -192,88 +192,88 @@ Misc 2.3.10 (2018-02-02) =================== -- Fix 100% CPU usage on HTTP GET and websocket connection just after it (#1955) +- Fix 100% CPU usage on HTTP GET and websocket connection just after it (`#1955 `_) -- Patch broken `ssl.match_hostname()` on Python<3.7 (#2674) +- Patch broken `ssl.match_hostname()` on Python<3.7 (`#2674 `_) 2.3.9 (2018-01-16) ================== -- Fix colon handing in path for dynamic resources (#2670) +- Fix colon handing in path for dynamic resources (`#2670 `_) 2.3.8 (2018-01-15) ================== - Do not use `yarl.unquote` internal function in aiohttp. Fix - incorrectly unquoted path part in URL dispatcher (#2662) + incorrectly unquoted path part in URL dispatcher (`#2662 `_) -- Fix compatibility with `yarl==1.0.0` (#2662) +- Fix compatibility with `yarl==1.0.0` (`#2662 `_) 2.3.7 (2017-12-27) ================== -- Fixed race-condition for iterating addresses from the DNSCache. (#2620) -- Fix docstring for request.host (#2591) -- Fix docstring for request.remote (#2592) +- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 `_) +- Fix docstring for request.host (`#2591 `_) +- Fix docstring for request.remote (`#2592 `_) 2.3.6 (2017-12-04) ================== -- Correct `request.app` context (for handlers not just middlewares). (#2577) +- Correct `request.app` context (for handlers not just middlewares). (`#2577 `_) 2.3.5 (2017-11-30) ================== -- Fix compatibility with `pytest` 3.3+ (#2565) +- Fix compatibility with `pytest` 3.3+ (`#2565 `_) 2.3.4 (2017-11-29) ================== - Make `request.app` point to proper application instance when using nested - applications (with middlewares). (#2550) + applications (with middlewares). (`#2550 `_) - Change base class of ClientConnectorSSLError to ClientSSLError from - ClientConnectorError. (#2563) + ClientConnectorError. (`#2563 `_) - Return client connection back to free pool on error in `connector.connect()`. - (#2567) + (`#2567 `_) 2.3.3 (2017-11-17) ================== - Having a `;` in Response content type does not assume it contains a charset - anymore. (#2197) + anymore. (`#2197 `_) - Use `getattr(asyncio, 'async')` for keeping compatibility with Python 3.7. - (#2476) + (`#2476 `_) - Ignore `NotImplementedError` raised by `set_child_watcher` from `uvloop`. - (#2491) + (`#2491 `_) - Fix warning in `ClientSession.__del__` by stopping to try to close it. - (#2523) + (`#2523 `_) - Fixed typo's in Third-party libraries page. And added async-v20 to the list - (#2510) + (`#2510 `_) 2.3.2 (2017-11-01) ================== -- Fix passing client max size on cloning request obj. (#2385) +- Fix passing client max size on cloning request obj. (`#2385 `_) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy - connector. (#2408) -- Drop generated `_http_parser` shared object from tarball distribution. (#2414) -- Fix connector convert OSError to ClientConnectorError. (#2423) -- Fix connection attempts for multiple dns hosts. (#2424) + connector. (`#2408 `_) +- Drop generated `_http_parser` shared object from tarball distribution. (`#2414 `_) +- Fix connector convert OSError to ClientConnectorError. (`#2423 `_) +- Fix connection attempts for multiple dns hosts. (`#2424 `_) - Fix ValueError for AF_INET6 sockets if a preexisting INET6 socket to the - `aiohttp.web.run_app` function. (#2431) -- `_SessionRequestContextManager` closes the session properly now. (#2441) -- Rename `from_env` to `trust_env` in client reference. (#2451) + `aiohttp.web.run_app` function. (`#2431 `_) +- `_SessionRequestContextManager` closes the session properly now. (`#2441 `_) +- Rename `from_env` to `trust_env` in client reference. (`#2451 `_) 2.3.1 (2017-10-18) ================== -- Relax attribute lookup in warning about old-styled middleware (#2340) +- Relax attribute lookup in warning about old-styled middleware (`#2340 `_) 2.3.0 (2017-10-18) @@ -282,104 +282,104 @@ Misc Features -------- -- Add SSL related params to `ClientSession.request` (#1128) -- Make enable_compression work on HTTP/1.0 (#1828) -- Deprecate registering synchronous web handlers (#1993) +- Add SSL related params to `ClientSession.request` (`#1128 `_) +- Make enable_compression work on HTTP/1.0 (`#1828 `_) +- Deprecate registering synchronous web handlers (`#1993 `_) - Switch to `multidict 3.0`. All HTTP headers preserve casing now but compared - in case-insensitive way. (#1994) + in case-insensitive way. (`#1994 `_) - Improvement for `normalize_path_middleware`. Added possibility to handle URLs - with query string. (#1995) -- Use towncrier for CHANGES.txt build (#1997) -- Implement `trust_env=True` param in `ClientSession`. (#1998) -- Added variable to customize proxy headers (#2001) -- Implement `router.add_routes` and router decorators. (#2004) + with query string. (`#1995 `_) +- Use towncrier for CHANGES.txt build (`#1997 `_) +- Implement `trust_env=True` param in `ClientSession`. (`#1998 `_) +- Added variable to customize proxy headers (`#2001 `_) +- Implement `router.add_routes` and router decorators. (`#2004 `_) - Deprecated `BaseRequest.has_body` in favor of `BaseRequest.can_read_body` Added `BaseRequest.body_exists` - attribute that stays static for the lifetime of the request (#2005) -- Provide `BaseRequest.loop` attribute (#2024) + attribute that stays static for the lifetime of the request (`#2005 `_) +- Provide `BaseRequest.loop` attribute (`#2024 `_) - Make `_CoroGuard` awaitable and fix `ClientSession.close` warning message - (#2026) + (`#2026 `_) - Responses to redirects without Location header are returned instead of - raising a RuntimeError (#2030) + raising a RuntimeError (`#2030 `_) - Added `get_client`, `get_server`, `setUpAsync` and `tearDownAsync` methods to - AioHTTPTestCase (#2032) -- Add automatically a SafeChildWatcher to the test loop (#2058) -- add ability to disable automatic response decompression (#2110) + AioHTTPTestCase (`#2032 `_) +- Add automatically a SafeChildWatcher to the test loop (`#2058 `_) +- add ability to disable automatic response decompression (`#2110 `_) - Add support for throttling DNS request, avoiding the requests saturation when there is a miss in the DNS cache and many requests getting into the connector - at the same time. (#2111) + at the same time. (`#2111 `_) - Use request for getting access log information instead of message/transport pair. Add `RequestBase.remote` property for accessing to IP of client - initiated HTTP request. (#2123) + initiated HTTP request. (`#2123 `_) - json() raises a ContentTypeError exception if the content-type does not meet - the requirements instead of raising a generic ClientResponseError. (#2136) + the requirements instead of raising a generic ClientResponseError. (`#2136 `_) - Make the HTTP client able to return HTTP chunks when chunked transfer - encoding is used. (#2150) + encoding is used. (`#2150 `_) - add `append_version` arg into `StaticResource.url` and `StaticResource.url_for` methods for getting an url with hash (version) of - the file. (#2157) + the file. (`#2157 `_) - Fix parsing the Forwarded header. * commas and semicolons are allowed inside quoted-strings; * empty forwarded-pairs (as in for=_1;;by=_2) are allowed; * non-standard parameters are allowed (although this alone could be easily done - in the previous parser). (#2173) + in the previous parser). (`#2173 `_) - Don't require ssl module to run. aiohttp does not require SSL to function. The code paths involved with SSL will only be hit upon SSL usage. Raise `RuntimeError` if HTTPS protocol is required but ssl module is not present. - (#2221) -- Accept coroutine fixtures in pytest plugin (#2223) -- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (#2227) -- Speed up Signals when there are no receivers (#2229) + (`#2221 `_) +- Accept coroutine fixtures in pytest plugin (`#2223 `_) +- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (`#2227 `_) +- Speed up Signals when there are no receivers (`#2229 `_) - Raise `InvalidURL` instead of `ValueError` on fetches with invalid URL. - (#2241) -- Move `DummyCookieJar` into `cookiejar.py` (#2242) -- `run_app`: Make `print=None` disable printing (#2260) + (`#2241 `_) +- Move `DummyCookieJar` into `cookiejar.py` (`#2242 `_) +- `run_app`: Make `print=None` disable printing (`#2260 `_) - Support `brotli` encoding (generic-purpose lossless compression algorithm) - (#2270) + (`#2270 `_) - Add server support for WebSockets Per-Message Deflate. Add client option to add deflate compress header in WebSockets request header. If calling ClientSession.ws_connect() with `compress=15` the client will support deflate - compress negotiation. (#2273) + compress negotiation. (`#2273 `_) - Support `verify_ssl`, `fingerprint`, `ssl_context` and `proxy_headers` by - `client.ws_connect`. (#2292) + `client.ws_connect`. (`#2292 `_) - Added `aiohttp.ClientConnectorSSLError` when connection fails due - `ssl.SSLError` (#2294) -- `aiohttp.web.Application.make_handler` support `access_log_class` (#2315) -- Build HTTP parser extension in non-strict mode by default. (#2332) + `ssl.SSLError` (`#2294 `_) +- `aiohttp.web.Application.make_handler` support `access_log_class` (`#2315 `_) +- Build HTTP parser extension in non-strict mode by default. (`#2332 `_) Bugfixes -------- -- Clear auth information on redirecting to other domain (#1699) -- Fix missing app.loop on startup hooks during tests (#2060) +- Clear auth information on redirecting to other domain (`#1699 `_) +- Fix missing app.loop on startup hooks during tests (`#2060 `_) - Fix issue with synchronous session closing when using `ClientSession` as an - asynchronous context manager. (#2063) + asynchronous context manager. (`#2063 `_) - Fix issue with `CookieJar` incorrectly expiring cookies in some edge cases. - (#2084) + (`#2084 `_) - Force use of IPv4 during test, this will make tests run in a Docker container - (#2104) + (`#2104 `_) - Warnings about unawaited coroutines now correctly point to the user's code. - (#2106) + (`#2106 `_) - Fix issue with `IndexError` being raised by the `StreamReader.iter_chunks()` - generator. (#2112) -- Support HTTP 308 Permanent redirect in client class. (#2114) -- Fix `FileResponse` sending empty chunked body on 304. (#2143) + generator. (`#2112 `_) +- Support HTTP 308 Permanent redirect in client class. (`#2114 `_) +- Fix `FileResponse` sending empty chunked body on 304. (`#2143 `_) - Do not add `Content-Length: 0` to GET/HEAD/TRACE/OPTIONS requests by default. - (#2167) -- Fix parsing the Forwarded header according to RFC 7239. (#2170) -- Securely determining remote/scheme/host #2171 (#2171) -- Fix header name parsing, if name is split into multiple lines (#2183) + (`#2167 `_) +- Fix parsing the Forwarded header according to RFC 7239. (`#2170 `_) +- Securely determining remote/scheme/host #2171 (`#2171 `_) +- Fix header name parsing, if name is split into multiple lines (`#2183 `_) - Handle session close during connection, `KeyError: - ` (#2193) + ` (`#2193 `_) - Fixes uncaught `TypeError` in `helpers.guess_filename` if `name` is not a - string (#2201) + string (`#2201 `_) - Raise OSError on async DNS lookup if resolved domain is an alias for another - one, which does not have an A or CNAME record. (#2231) -- Fix incorrect warning in `StreamReader`. (#2251) -- Properly clone state of web request (#2284) + one, which does not have an A or CNAME record. (`#2231 `_) +- Fix incorrect warning in `StreamReader`. (`#2251 `_) +- Properly clone state of web request (`#2284 `_) - Fix C HTTP parser for cases when status line is split into different TCP - packets. (#2311) -- Fix `web.FileResponse` overriding user supplied Content-Type (#2317) + packets. (`#2311 `_) +- Fix `web.FileResponse` overriding user supplied Content-Type (`#2317 `_) Improved Documentation @@ -387,32 +387,32 @@ Improved Documentation - Add a note about possible performance degradation in `await resp.text()` if charset was not provided by `Content-Type` HTTP header. Pass explicit - encoding to solve it. (#1811) -- Drop `disqus` widget from documentation pages. (#2018) -- Add a graceful shutdown section to the client usage documentation. (#2039) -- Document `connector_owner` parameter. (#2072) -- Update the doc of web.Application (#2081) -- Fix mistake about access log disabling. (#2085) + encoding to solve it. (`#1811 `_) +- Drop `disqus` widget from documentation pages. (`#2018 `_) +- Add a graceful shutdown section to the client usage documentation. (`#2039 `_) +- Document `connector_owner` parameter. (`#2072 `_) +- Update the doc of web.Application (`#2081 `_) +- Fix mistake about access log disabling. (`#2085 `_) - Add example usage of on_startup and on_shutdown signals by creating and - disposing an aiopg connection engine. (#2131) + disposing an aiopg connection engine. (`#2131 `_) - Document `encoded=True` for `yarl.URL`, it disables all yarl transformations. - (#2198) + (`#2198 `_) - Document that all app's middleware factories are run for every request. - (#2225) + (`#2225 `_) - Reflect the fact that default resolver is threaded one starting from aiohttp - 1.1 (#2228) + 1.1 (`#2228 `_) Deprecations and Removals ------------------------- -- Drop deprecated `Server.finish_connections` (#2006) +- Drop deprecated `Server.finish_connections` (`#2006 `_) - Drop %O format from logging, use %b instead. Drop %e format from logging, - environment variables are not supported anymore. (#2123) -- Drop deprecated secure_proxy_ssl_header support (#2171) + environment variables are not supported anymore. (`#2123 `_) +- Drop deprecated secure_proxy_ssl_header support (`#2171 `_) - Removed TimeService in favor of simple caching. TimeService also had a bug - where it lost about 0.5 seconds per second. (#2176) -- Drop unused response_factory from static files API (#2290) + where it lost about 0.5 seconds per second. (`#2176 `_) +- Drop unused response_factory from static files API (`#2290 `_) Misc @@ -425,13 +425,13 @@ Misc ================== - Don't raise deprecation warning on - `loop.run_until_complete(client.close())` (#2065) + `loop.run_until_complete(client.close())` (`#2065 `_) 2.2.4 (2017-08-02) ================== - Fix issue with synchronous session closing when using ClientSession - as an asynchronous context manager. (#2063) + as an asynchronous context manager. (`#2063 `_) 2.2.3 (2017-07-04) ================== @@ -449,51 +449,51 @@ Misc - Relax `yarl` requirement to 0.11+ -- Backport #2026: `session.close` *is* a coroutine (#2029) +- Backport #2026: `session.close` *is* a coroutine (`#2029 `_) 2.2.0 (2017-06-20) ================== -- Add doc for add_head, update doc for add_get. (#1944) +- Add doc for add_head, update doc for add_get. (`#1944 `_) - Fixed consecutive calls for `Response.write_eof`. - Retain method attributes (e.g. :code:`__doc__`) when registering synchronous - handlers for resources. (#1953) + handlers for resources. (`#1953 `_) -- Added signal TERM handling in `run_app` to gracefully exit (#1932) +- Added signal TERM handling in `run_app` to gracefully exit (`#1932 `_) -- Fix websocket issues caused by frame fragmentation. (#1962) +- Fix websocket issues caused by frame fragmentation. (`#1962 `_) - Raise RuntimeError is you try to set the Content Length and enable - chunked encoding at the same time (#1941) + chunked encoding at the same time (`#1941 `_) - Small update for `unittest_run_loop` -- Use CIMultiDict for ClientRequest.skip_auto_headers (#1970) +- Use CIMultiDict for ClientRequest.skip_auto_headers (`#1970 `_) - Fix wrong startup sequence: test server and `run_app()` are not raise - `DeprecationWarning` now (#1947) + `DeprecationWarning` now (`#1947 `_) -- Make sure cleanup signal is sent if startup signal has been sent (#1959) +- Make sure cleanup signal is sent if startup signal has been sent (`#1959 `_) -- Fixed server keep-alive handler, could cause 100% cpu utilization (#1955) +- Fixed server keep-alive handler, could cause 100% cpu utilization (`#1955 `_) - Connection can be destroyed before response get processed if - `await aiohttp.request(..)` is used (#1981) + `await aiohttp.request(..)` is used (`#1981 `_) -- MultipartReader does not work with -OO (#1969) +- MultipartReader does not work with -OO (`#1969 `_) -- Fixed `ClientPayloadError` with blank `Content-Encoding` header (#1931) +- Fixed `ClientPayloadError` with blank `Content-Encoding` header (`#1931 `_) -- Support `deflate` encoding implemented in `httpbin.org/deflate` (#1918) +- Support `deflate` encoding implemented in `httpbin.org/deflate` (`#1918 `_) -- Fix BadStatusLine caused by extra `CRLF` after `POST` data (#1792) +- Fix BadStatusLine caused by extra `CRLF` after `POST` data (`#1792 `_) -- Keep a reference to `ClientSession` in response object (#1985) +- Keep a reference to `ClientSession` in response object (`#1985 `_) -- Deprecate undocumented `app.on_loop_available` signal (#1978) +- Deprecate undocumented `app.on_loop_available` signal (`#1978 `_) @@ -504,54 +504,54 @@ Misc https://github.com/PyO3/tokio - Write to transport ``\r\n`` before closing after keepalive timeout, - otherwise client can not detect socket disconnection. (#1883) + otherwise client can not detect socket disconnection. (`#1883 `_) - Only call `loop.close` in `run_app` if the user did *not* supply a loop. Useful for allowing clients to specify their own cleanup before closing the asyncio loop if they wish to tightly control loop behavior -- Content disposition with semicolon in filename (#917) +- Content disposition with semicolon in filename (`#917 `_) -- Added `request_info` to response object and `ClientResponseError`. (#1733) +- Added `request_info` to response object and `ClientResponseError`. (`#1733 `_) -- Added `history` to `ClientResponseError`. (#1741) +- Added `history` to `ClientResponseError`. (`#1741 `_) -- Allow to disable redirect url re-quoting (#1474) +- Allow to disable redirect url re-quoting (`#1474 `_) -- Handle RuntimeError from transport (#1790) +- Handle RuntimeError from transport (`#1790 `_) -- Dropped "%O" in access logger (#1673) +- Dropped "%O" in access logger (`#1673 `_) - Added `args` and `kwargs` to `unittest_run_loop`. Useful with other - decorators, for example `@patch`. (#1803) + decorators, for example `@patch`. (`#1803 `_) -- Added `iter_chunks` to response.content object. (#1805) +- Added `iter_chunks` to response.content object. (`#1805 `_) - Avoid creating TimerContext when there is no timeout to allow - compatibility with Tornado. (#1817) (#1180) + compatibility with Tornado. (`#1817 `_) (`#1180 `_) - Add `proxy_from_env` to `ClientRequest` to read from environment - variables. (#1791) + variables. (`#1791 `_) -- Add DummyCookieJar helper. (#1830) +- Add DummyCookieJar helper. (`#1830 `_) -- Fix assertion errors in Python 3.4 from noop helper. (#1847) +- Fix assertion errors in Python 3.4 from noop helper. (`#1847 `_) -- Do not unquote `+` in match_info values (#1816) +- Do not unquote `+` in match_info values (`#1816 `_) - Use Forwarded, X-Forwarded-Scheme and X-Forwarded-Host for better scheme and - host resolution. (#1134) + host resolution. (`#1134 `_) -- Fix sub-application middlewares resolution order (#1853) +- Fix sub-application middlewares resolution order (`#1853 `_) -- Fix applications comparison (#1866) +- Fix applications comparison (`#1866 `_) -- Fix static location in index when prefix is used (#1662) +- Fix static location in index when prefix is used (`#1662 `_) -- Make test server more reliable (#1896) +- Make test server more reliable (`#1896 `_) - Extend list of web exceptions, add HTTPUnprocessableEntity, - HTTPFailedDependency, HTTPInsufficientStorage status codes (#1920) + HTTPFailedDependency, HTTPInsufficientStorage status codes (`#1920 `_) 2.0.7 (2017-04-12) @@ -559,57 +559,57 @@ Misc - Fix *pypi* distribution -- Fix exception description (#1807) +- Fix exception description (`#1807 `_) -- Handle socket error in FileResponse (#1773) +- Handle socket error in FileResponse (`#1773 `_) -- Cancel websocket heartbeat on close (#1793) +- Cancel websocket heartbeat on close (`#1793 `_) 2.0.6 (2017-04-04) ================== -- Keeping blank values for `request.post()` and `multipart.form()` (#1765) +- Keeping blank values for `request.post()` and `multipart.form()` (`#1765 `_) -- TypeError in data_received of ResponseHandler (#1770) +- TypeError in data_received of ResponseHandler (`#1770 `_) - Fix ``web.run_app`` not to bind to default host-port pair if only socket is - passed (#1786) + passed (`#1786 `_) 2.0.5 (2017-03-29) ================== -- Memory leak with aiohttp.request (#1756) +- Memory leak with aiohttp.request (`#1756 `_) - Disable cleanup closed ssl transports by default. - Exception in request handling if the server responds before the body - is sent (#1761) + is sent (`#1761 `_) 2.0.4 (2017-03-27) ================== -- Memory leak with aiohttp.request (#1756) +- Memory leak with aiohttp.request (`#1756 `_) -- Encoding is always UTF-8 in POST data (#1750) +- Encoding is always UTF-8 in POST data (`#1750 `_) -- Do not add "Content-Disposition" header by default (#1755) +- Do not add "Content-Disposition" header by default (`#1755 `_) 2.0.3 (2017-03-24) ================== -- Call https website through proxy will cause error (#1745) +- Call https website through proxy will cause error (`#1745 `_) -- Fix exception on multipart/form-data post if content-type is not set (#1743) +- Fix exception on multipart/form-data post if content-type is not set (`#1743 `_) 2.0.2 (2017-03-21) ================== -- Fixed Application.on_loop_available signal (#1739) +- Fixed Application.on_loop_available signal (`#1739 `_) - Remove debug code @@ -617,21 +617,21 @@ Misc 2.0.1 (2017-03-21) ================== -- Fix allow-head to include name on route (#1737) +- Fix allow-head to include name on route (`#1737 `_) -- Fixed AttributeError in WebSocketResponse.can_prepare (#1736) +- Fixed AttributeError in WebSocketResponse.can_prepare (`#1736 `_) 2.0.0 (2017-03-20) ================== -- Added `json` to `ClientSession.request()` method (#1726) +- Added `json` to `ClientSession.request()` method (`#1726 `_) - Added session's `raise_for_status` parameter, automatically calls - raise_for_status() on any request. (#1724) + raise_for_status() on any request. (`#1724 `_) - `response.json()` raises `ClientReponseError` exception if response's - content type does not match (#1723) + content type does not match (`#1723 `_) - Cleanup timer and loop handle on any client exception. @@ -641,25 +641,25 @@ Misc `2.0.0rc1` (2017-03-15) ======================= -- Properly handle payload errors (#1710) +- Properly handle payload errors (`#1710 `_) -- Added `ClientWebSocketResponse.get_extra_info()` (#1717) +- Added `ClientWebSocketResponse.get_extra_info()` (`#1717 `_) - It is not possible to combine Transfer-Encoding and chunked parameter, - same for compress and Content-Encoding (#1655) + same for compress and Content-Encoding (`#1655 `_) - Connector's `limit` parameter indicates total concurrent connections. - New `limit_per_host` added, indicates total connections per endpoint. (#1601) + New `limit_per_host` added, indicates total connections per endpoint. (`#1601 `_) -- Use url's `raw_host` for name resolution (#1685) +- Use url's `raw_host` for name resolution (`#1685 `_) -- Change `ClientResponse.url` to `yarl.URL` instance (#1654) +- Change `ClientResponse.url` to `yarl.URL` instance (`#1654 `_) -- Add max_size parameter to web.Request reading methods (#1133) +- Add max_size parameter to web.Request reading methods (`#1133 `_) -- Web Request.post() stores data in temp files (#1469) +- Web Request.post() stores data in temp files (`#1469 `_) -- Add the `allow_head=True` keyword argument for `add_get` (#1618) +- Add the `allow_head=True` keyword argument for `add_get` (`#1618 `_) - `run_app` and the Command Line Interface now support serving over Unix domain sockets for faster inter-process communication. @@ -668,54 +668,54 @@ Misc e.g. for socket-based activated applications, when binding of a socket is done by the parent process. -- Implementation for Trailer headers parser is broken (#1619) +- Implementation for Trailer headers parser is broken (`#1619 `_) - Fix FileResponse to not fall on bad request (range out of file size) - Fix FileResponse to correct stream video to Chromes -- Deprecate public low-level api (#1657) +- Deprecate public low-level api (`#1657 `_) - Deprecate `encoding` parameter for ClientSession.request() method -- Dropped aiohttp.wsgi (#1108) +- Dropped aiohttp.wsgi (`#1108 `_) - Dropped `version` from ClientSession.request() method -- Dropped websocket version 76 support (#1160) +- Dropped websocket version 76 support (`#1160 `_) -- Dropped: `aiohttp.protocol.HttpPrefixParser` (#1590) +- Dropped: `aiohttp.protocol.HttpPrefixParser` (`#1590 `_) - Dropped: Servers response's `.started`, `.start()` and - `.can_start()` method (#1591) + `.can_start()` method (`#1591 `_) - Dropped: Adding `sub app` via `app.router.add_subapp()` is deprecated - use `app.add_subapp()` instead (#1592) + use `app.add_subapp()` instead (`#1592 `_) -- Dropped: `Application.finish()` and `Application.register_on_finish()` (#1602) +- Dropped: `Application.finish()` and `Application.register_on_finish()` (`#1602 `_) - Dropped: `web.Request.GET` and `web.Request.POST` - Dropped: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and - aiohttp.ws_connect() (#1593) + aiohttp.ws_connect() (`#1593 `_) -- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (#1605) +- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (`#1605 `_) - Dropped: `ServerHttpProtocol.keep_alive_timeout` attribute and - `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (#1606) + `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (`#1606 `_) - Dropped: `TCPConnector's`` `.resolve`, `.resolved_hosts`, `.clear_resolved_hosts()` attributes and `resolve` constructor - parameter (#1607) + parameter (`#1607 `_) -- Dropped `ProxyConnector` (#1609) +- Dropped `ProxyConnector` (`#1609 `_) 1.3.5 (2017-03-16) ================== -- Fixed None timeout support (#1720) +- Fixed None timeout support (`#1720 `_) 1.3.4 (2017-03-14) @@ -729,30 +729,30 @@ Misc - Fix file_sender to correct stream video to Chromes -- Fix NotImplementedError server exception (#1703) +- Fix NotImplementedError server exception (`#1703 `_) -- Clearer error message for URL without a host name. (#1691) +- Clearer error message for URL without a host name. (`#1691 `_) -- Silence deprecation warning in __repr__ (#1690) +- Silence deprecation warning in __repr__ (`#1690 `_) -- IDN + HTTPS = `ssl.CertificateError` (#1685) +- IDN + HTTPS = `ssl.CertificateError` (`#1685 `_) 1.3.3 (2017-02-19) ================== -- Fixed memory leak in time service (#1656) +- Fixed memory leak in time service (`#1656 `_) 1.3.2 (2017-02-16) ================== -- Awaiting on WebSocketResponse.send_* does not work (#1645) +- Awaiting on WebSocketResponse.send_* does not work (`#1645 `_) - Fix multiple calls to client ws_connect when using a shared header - dict (#1643) + dict (`#1643 `_) -- Make CookieJar.filter_cookies() accept plain string parameter. (#1636) +- Make CookieJar.filter_cookies() accept plain string parameter. (`#1636 `_) 1.3.1 (2017-02-09) @@ -760,65 +760,65 @@ Misc - Handle CLOSING in WebSocketResponse.__anext__ -- Fixed AttributeError 'drain' for server websocket handler (#1613) +- Fixed AttributeError 'drain' for server websocket handler (`#1613 `_) 1.3.0 (2017-02-08) ================== - Multipart writer validates the data on append instead of on a - request send (#920) + request send (`#920 `_) - Multipart reader accepts multipart messages with or without their epilogue - to consistently handle valid and legacy behaviors (#1526) (#1581) + to consistently handle valid and legacy behaviors (`#1526 `_) (`#1581 `_) - Separate read + connect + request timeouts # 1523 -- Do not swallow Upgrade header (#1587) +- Do not swallow Upgrade header (`#1587 `_) -- Fix polls demo run application (#1487) +- Fix polls demo run application (`#1487 `_) -- Ignore unknown 1XX status codes in client (#1353) +- Ignore unknown 1XX status codes in client (`#1353 `_) -- Fix sub-Multipart messages missing their headers on serialization (#1525) +- Fix sub-Multipart messages missing their headers on serialization (`#1525 `_) - Do not use readline when reading the content of a part - in the multipart reader (#1535) + in the multipart reader (`#1535 `_) -- Add optional flag for quoting `FormData` fields (#916) +- Add optional flag for quoting `FormData` fields (`#916 `_) -- 416 Range Not Satisfiable if requested range end > file size (#1588) +- 416 Range Not Satisfiable if requested range end > file size (`#1588 `_) -- Having a `:` or `@` in a route does not work (#1552) +- Having a `:` or `@` in a route does not work (`#1552 `_) - Added `receive_timeout` timeout for websocket to receive complete - message. (#1325) + message. (`#1325 `_) - Added `heartbeat` parameter for websocket to automatically send - `ping` message. (#1024) (#777) + `ping` message. (`#1024 `_) (`#777 `_) -- Remove `web.Application` dependency from `web.UrlDispatcher` (#1510) +- Remove `web.Application` dependency from `web.UrlDispatcher` (`#1510 `_) -- Accepting back-pressure from slow websocket clients (#1367) +- Accepting back-pressure from slow websocket clients (`#1367 `_) -- Do not pause transport during set_parser stage (#1211) +- Do not pause transport during set_parser stage (`#1211 `_) -- Lingering close does not terminate before timeout (#1559) +- Lingering close does not terminate before timeout (`#1559 `_) -- `setsockopt` may raise `OSError` exception if socket is closed already (#1595) +- `setsockopt` may raise `OSError` exception if socket is closed already (`#1595 `_) -- Lots of CancelledError when requests are interrupted (#1565) +- Lots of CancelledError when requests are interrupted (`#1565 `_) - Allow users to specify what should happen to decoding errors - when calling a responses `text()` method (#1542) + when calling a responses `text()` method (`#1542 `_) -- Back port std module `http.cookies` for python3.4.2 (#1566) +- Back port std module `http.cookies` for python3.4.2 (`#1566 `_) -- Maintain url's fragment in client response (#1314) +- Maintain url's fragment in client response (`#1314 `_) -- Allow concurrently close WebSocket connection (#754) +- Allow concurrently close WebSocket connection (`#754 `_) -- Gzipped responses with empty body raises ContentEncodingError (#609) +- Gzipped responses with empty body raises ContentEncodingError (`#609 `_) - Return 504 if request handle raises TimeoutError. @@ -828,25 +828,25 @@ Misc message during client response release - Abort closed ssl client transports, broken servers can keep socket - open un-limit time (#1568) + open un-limit time (`#1568 `_) - Log warning instead of `RuntimeError` is websocket connection is closed. - Deprecated: `aiohttp.protocol.HttpPrefixParser` - will be removed in 1.4 (#1590) + will be removed in 1.4 (`#1590 `_) - Deprecated: Servers response's `.started`, `.start()` and - `.can_start()` method will be removed in 1.4 (#1591) + `.can_start()` method will be removed in 1.4 (`#1591 `_) - Deprecated: Adding `sub app` via `app.router.add_subapp()` is deprecated - use `app.add_subapp()` instead, will be removed in 1.4 (#1592) + use `app.add_subapp()` instead, will be removed in 1.4 (`#1592 `_) - Deprecated: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect() - will be removed in 1.4 (#1593) + will be removed in 1.4 (`#1593 `_) - Deprecated: `Application.finish()` and `Application.register_on_finish()` - will be removed in 1.4 (#1602) + will be removed in 1.4 (`#1602 `_) 1.2.0 (2016-12-17) @@ -854,13 +854,13 @@ Misc - Extract `BaseRequest` from `web.Request`, introduce `web.Server` (former `RequestHandlerFactory`), introduce new low-level web server - which is not coupled with `web.Application` and routing (#1362) + which is not coupled with `web.Application` and routing (`#1362 `_) -- Make `TestServer.make_url` compatible with `yarl.URL` (#1389) +- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 `_) -- Implement range requests for static files (#1382) +- Implement range requests for static files (`#1382 `_) -- Support task attribute for StreamResponse (#1410) +- Support task attribute for StreamResponse (`#1410 `_) - Drop `TestClient.app` property, use `TestClient.server.app` instead (BACKWARD INCOMPATIBLE) @@ -871,84 +871,84 @@ Misc - `TestClient.server` property returns a test server instance, was `asyncio.AbstractServer` (BACKWARD INCOMPATIBLE) -- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (#1201) +- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (`#1201 `_) - Call worker_int and worker_abort callbacks in - `Gunicorn[UVLoop]WebWorker` (#1202) + `Gunicorn[UVLoop]WebWorker` (`#1202 `_) -- Has functional tests for client proxy (#1218) +- Has functional tests for client proxy (`#1218 `_) -- Fix bugs with client proxy target path and proxy host with port (#1413) +- Fix bugs with client proxy target path and proxy host with port (`#1413 `_) -- Fix bugs related to the use of unicode hostnames (#1444) +- Fix bugs related to the use of unicode hostnames (`#1444 `_) -- Preserve cookie quoting/escaping (#1453) +- Preserve cookie quoting/escaping (`#1453 `_) -- FileSender will send gzipped response if gzip version available (#1426) +- FileSender will send gzipped response if gzip version available (`#1426 `_) - Don't override `Content-Length` header in `web.Response` if no body - was set (#1400) + was set (`#1400 `_) -- Introduce `router.post_init()` for solving (#1373) +- Introduce `router.post_init()` for solving (`#1373 `_) - Fix raise error in case of multiple calls of `TimeServive.stop()` -- Allow to raise web exceptions on router resolving stage (#1460) +- Allow to raise web exceptions on router resolving stage (`#1460 `_) -- Add a warning for session creation outside of coroutine (#1468) +- Add a warning for session creation outside of coroutine (`#1468 `_) - Avoid a race when application might start accepting incoming requests but startup signals are not processed yet e98e8c6 - Raise a `RuntimeError` when trying to change the status of the HTTP response - after the headers have been sent (#1480) + after the headers have been sent (`#1480 `_) -- Fix bug with https proxy acquired cleanup (#1340) +- Fix bug with https proxy acquired cleanup (`#1340 `_) -- Use UTF-8 as the default encoding for multipart text parts (#1484) +- Use UTF-8 as the default encoding for multipart text parts (`#1484 `_) 1.1.6 (2016-11-28) ================== - Fix `BodyPartReader.read_chunk` bug about returns zero bytes before - `EOF` (#1428) + `EOF` (`#1428 `_) 1.1.5 (2016-11-16) ================== -- Fix static file serving in fallback mode (#1401) +- Fix static file serving in fallback mode (`#1401 `_) 1.1.4 (2016-11-14) ================== -- Make `TestServer.make_url` compatible with `yarl.URL` (#1389) +- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 `_) - Generate informative exception on redirects from server which - does not provide redirection headers (#1396) + does not provide redirection headers (`#1396 `_) 1.1.3 (2016-11-10) ================== -- Support *root* resources for sub-applications (#1379) +- Support *root* resources for sub-applications (`#1379 `_) 1.1.2 (2016-11-08) ================== -- Allow starting variables with an underscore (#1379) +- Allow starting variables with an underscore (`#1379 `_) -- Properly process UNIX sockets by gunicorn worker (#1375) +- Properly process UNIX sockets by gunicorn worker (`#1375 `_) - Fix ordering for `FrozenList` -- Don't propagate pre and post signals to sub-application (#1377) +- Don't propagate pre and post signals to sub-application (`#1377 `_) 1.1.1 (2016-11-04) ================== -- Fix documentation generation (#1120) +- Fix documentation generation (`#1120 `_) 1.1.0 (2016-11-03) ================== @@ -956,23 +956,23 @@ Misc - Drop deprecated `WSClientDisconnectedError` (BACKWARD INCOMPATIBLE) - Use `yarl.URL` in client API. The change is 99% backward compatible - but `ClientResponse.url` is an `yarl.URL` instance now. (#1217) + but `ClientResponse.url` is an `yarl.URL` instance now. (`#1217 `_) -- Close idle keep-alive connections on shutdown (#1222) +- Close idle keep-alive connections on shutdown (`#1222 `_) -- Modify regex in AccessLogger to accept underscore and numbers (#1225) +- Modify regex in AccessLogger to accept underscore and numbers (`#1225 `_) - Use `yarl.URL` in web server API. `web.Request.rel_url` and `web.Request.url` are added. URLs and templates are percent-encoded - now. (#1224) + now. (`#1224 `_) -- Accept `yarl.URL` by server redirections (#1278) +- Accept `yarl.URL` by server redirections (`#1278 `_) -- Return `yarl.URL` by `.make_url()` testing utility (#1279) +- Return `yarl.URL` by `.make_url()` testing utility (`#1279 `_) -- Properly format IPv6 addresses by `aiohttp.web.run_app` (#1139) +- Properly format IPv6 addresses by `aiohttp.web.run_app` (`#1139 `_) -- Use `yarl.URL` by server API (#1288) +- Use `yarl.URL` by server API (`#1288 `_) * Introduce `resource.url_for()`, deprecate `resource.url()`. @@ -983,38 +983,38 @@ Misc * Drop old-style routes: `Route`, `PlainRoute`, `DynamicRoute`, `StaticRoute`, `ResourceAdapter`. -- Revert `resp.url` back to `str`, introduce `resp.url_obj` (#1292) +- Revert `resp.url` back to `str`, introduce `resp.url_obj` (`#1292 `_) -- Raise ValueError if BasicAuth login has a ":" character (#1307) +- Raise ValueError if BasicAuth login has a ":" character (`#1307 `_) - Fix bug when ClientRequest send payload file with opened as - open('filename', 'r+b') (#1306) + open('filename', 'r+b') (`#1306 `_) -- Enhancement to AccessLogger (pass *extra* dict) (#1303) +- Enhancement to AccessLogger (pass *extra* dict) (`#1303 `_) -- Show more verbose message on import errors (#1319) +- Show more verbose message on import errors (`#1319 `_) -- Added save and load functionality for `CookieJar` (#1219) +- Added save and load functionality for `CookieJar` (`#1219 `_) -- Added option on `StaticRoute` to follow symlinks (#1299) +- Added option on `StaticRoute` to follow symlinks (`#1299 `_) -- Force encoding of `application/json` content type to utf-8 (#1339) +- Force encoding of `application/json` content type to utf-8 (`#1339 `_) -- Fix invalid invocations of `errors.LineTooLong` (#1335) +- Fix invalid invocations of `errors.LineTooLong` (`#1335 `_) -- Websockets: Stop `async for` iteration when connection is closed (#1144) +- Websockets: Stop `async for` iteration when connection is closed (`#1144 `_) -- Ensure TestClient HTTP methods return a context manager (#1318) +- Ensure TestClient HTTP methods return a context manager (`#1318 `_) - Raise `ClientDisconnectedError` to `FlowControlStreamReader` read function - if `ClientSession` object is closed by client when reading data. (#1323) + if `ClientSession` object is closed by client when reading data. (`#1323 `_) -- Document deployment without `Gunicorn` (#1120) +- Document deployment without `Gunicorn` (`#1120 `_) - Add deprecation warning for MD5 and SHA1 digests when used for fingerprint - of site certs in TCPConnector. (#1186) + of site certs in TCPConnector. (`#1186 `_) -- Implement sub-applications (#1301) +- Implement sub-applications (`#1301 `_) - Don't inherit `web.Request` from `dict` but implement `MutableMapping` protocol. @@ -1039,55 +1039,55 @@ Misc boost of your application -- a couple DB requests and business logic is still the main bottleneck. -- Boost performance by adding a custom time service (#1350) +- Boost performance by adding a custom time service (`#1350 `_) - Extend `ClientResponse` with `content_type` and `charset` - properties like in `web.Request`. (#1349) + properties like in `web.Request`. (`#1349 `_) -- Disable aiodns by default (#559) +- Disable aiodns by default (`#559 `_) - Don't flap `tcp_cork` in client code, use TCP_NODELAY mode by default. -- Implement `web.Request.clone()` (#1361) +- Implement `web.Request.clone()` (`#1361 `_) 1.0.5 (2016-10-11) ================== - Fix StreamReader._read_nowait to return all available - data up to the requested amount (#1297) + data up to the requested amount (`#1297 `_) 1.0.4 (2016-09-22) ================== - Fix FlowControlStreamReader.read_nowait so that it checks - whether the transport is paused (#1206) + whether the transport is paused (`#1206 `_) 1.0.2 (2016-09-22) ================== -- Make CookieJar compatible with 32-bit systems (#1188) +- Make CookieJar compatible with 32-bit systems (`#1188 `_) -- Add missing `WSMsgType` to `web_ws.__all__`, see (#1200) +- Add missing `WSMsgType` to `web_ws.__all__`, see (`#1200 `_) -- Fix `CookieJar` ctor when called with `loop=None` (#1203) +- Fix `CookieJar` ctor when called with `loop=None` (`#1203 `_) -- Fix broken upper-casing in wsgi support (#1197) +- Fix broken upper-casing in wsgi support (`#1197 `_) 1.0.1 (2016-09-16) ================== - Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake - of backward compatibility (#1178) + of backward compatibility (`#1178 `_) - Tune alabaster schema. - Use `text/html` content type for displaying index pages by static file handler. -- Fix `AssertionError` in static file handling (#1177) +- Fix `AssertionError` in static file handling (`#1177 `_) - Fix access log formats `%O` and `%b` for static file handling @@ -1099,9 +1099,9 @@ Misc ================== - Change default size for client session's connection pool from - unlimited to 20 (#977) + unlimited to 20 (`#977 `_) -- Add IE support for cookie deletion. (#994) +- Add IE support for cookie deletion. (`#994 `_) - Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD INCOMPATIBLE) @@ -1110,26 +1110,26 @@ Misc method (BACKWARD INCOMPATIBLE) - Avoid using of mutable CIMultiDict kw param in make_mocked_request - (#997) + (`#997 `_) - Make WebSocketResponse.close a little bit faster by avoiding new task creating just for timeout measurement - Add `proxy` and `proxy_auth` params to `client.get()` and family, - deprecate `ProxyConnector` (#998) + deprecate `ProxyConnector` (`#998 `_) - Add support for websocket send_json and receive_json, synchronize - server and client API for websockets (#984) + server and client API for websockets (`#984 `_) - Implement router shourtcuts for most useful HTTP methods, use `app.router.add_get()`, `app.router.add_post()` etc. instead of - `app.router.add_route()` (#986) + `app.router.add_route()` (`#986 `_) -- Support SSL connections for gunicorn worker (#1003) +- Support SSL connections for gunicorn worker (`#1003 `_) - Move obsolete examples to legacy folder -- Switch to multidict 2.0 and title-cased strings (#1015) +- Switch to multidict 2.0 and title-cased strings (`#1015 `_) - `{FOO}e` logger format is case-sensitive now @@ -1145,9 +1145,9 @@ Misc - Remove deprecated decode param from resp.read(decode=True) -- Use 5min default client timeout (#1028) +- Use 5min default client timeout (`#1028 `_) -- Relax HTTP method validation in UrlDispatcher (#1037) +- Relax HTTP method validation in UrlDispatcher (`#1037 `_) - Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()` should be present) @@ -1157,84 +1157,84 @@ Misc - Link header for 451 status code is mandatory -- Fix test_client fixture to allow multiple clients per test (#1072) +- Fix test_client fixture to allow multiple clients per test (`#1072 `_) -- make_mocked_request now accepts dict as headers (#1073) +- make_mocked_request now accepts dict as headers (`#1073 `_) - Add Python 3.5.2/3.6+ compatibility patch for async generator - protocol change (#1082) + protocol change (`#1082 `_) -- Improvement test_client can accept instance object (#1083) +- Improvement test_client can accept instance object (`#1083 `_) -- Simplify ServerHttpProtocol implementation (#1060) +- Simplify ServerHttpProtocol implementation (`#1060 `_) - Add a flag for optional showing directory index for static file - handling (#921) + handling (`#921 `_) -- Define `web.Application.on_startup()` signal handler (#1103) +- Define `web.Application.on_startup()` signal handler (`#1103 `_) -- Drop ChunkedParser and LinesParser (#1111) +- Drop ChunkedParser and LinesParser (`#1111 `_) -- Call `Application.startup` in GunicornWebWorker (#1105) +- Call `Application.startup` in GunicornWebWorker (`#1105 `_) - Fix client handling hostnames with 63 bytes when a port is given in - the url (#1044) + the url (`#1044 `_) -- Implement proxy support for ClientSession.ws_connect (#1025) +- Implement proxy support for ClientSession.ws_connect (`#1025 `_) -- Return named tuple from WebSocketResponse.can_prepare (#1016) +- Return named tuple from WebSocketResponse.can_prepare (`#1016 `_) -- Fix access_log_format in `GunicornWebWorker` (#1117) +- Fix access_log_format in `GunicornWebWorker` (`#1117 `_) -- Setup Content-Type to application/octet-stream by default (#1124) +- Setup Content-Type to application/octet-stream by default (`#1124 `_) - Deprecate debug parameter from app.make_handler(), use - `Application(debug=True)` instead (#1121) + `Application(debug=True)` instead (`#1121 `_) -- Remove fragment string in request path (#846) +- Remove fragment string in request path (`#846 `_) -- Use aiodns.DNSResolver.gethostbyname() if available (#1136) +- Use aiodns.DNSResolver.gethostbyname() if available (`#1136 `_) -- Fix static file sending on uvloop when sendfile is available (#1093) +- Fix static file sending on uvloop when sendfile is available (`#1093 `_) -- Make prettier urls if query is empty dict (#1143) +- Make prettier urls if query is empty dict (`#1143 `_) -- Fix redirects for HEAD requests (#1147) +- Fix redirects for HEAD requests (`#1147 `_) -- Default value for `StreamReader.read_nowait` is -1 from now (#1150) +- Default value for `StreamReader.read_nowait` is -1 from now (`#1150 `_) - `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now - (BACKWARD INCOMPATIBLE) (#1150) + (BACKWARD INCOMPATIBLE) (`#1150 `_) -- Streams documentation added (#1150) +- Streams documentation added (`#1150 `_) -- Add `multipart` coroutine method for web Request object (#1067) +- Add `multipart` coroutine method for web Request object (`#1067 `_) -- Publish ClientSession.loop property (#1149) +- Publish ClientSession.loop property (`#1149 `_) -- Fix static file with spaces (#1140) +- Fix static file with spaces (`#1140 `_) -- Fix piling up asyncio loop by cookie expiration callbacks (#1061) +- Fix piling up asyncio loop by cookie expiration callbacks (`#1061 `_) - Drop `Timeout` class for sake of `async_timeout` external library. `aiohttp.Timeout` is an alias for `async_timeout.timeout` - `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by - default (BACKWARD INCOMPATIBLE) (#1152) + default (BACKWARD INCOMPATIBLE) (`#1152 `_) - `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by - default (BACKWARD INCOMPATIBLE) (#1152) + default (BACKWARD INCOMPATIBLE) (`#1152 `_) -- Conform to RFC3986 - do not include url fragments in client requests (#1174) +- Conform to RFC3986 - do not include url fragments in client requests (`#1174 `_) -- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (#1173) +- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (`#1173 `_) -- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (#1173) +- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (`#1173 `_) - Fix clashing cookies with have the same name but belong to different - domains (BACKWARD INCOMPATIBLE) (#1125) + domains (BACKWARD INCOMPATIBLE) (`#1125 `_) -- Support binary Content-Transfer-Encoding (#1169) +- Support binary Content-Transfer-Encoding (`#1169 `_) 0.22.5 (08-02-2016) @@ -1245,17 +1245,17 @@ Misc 0.22.3 (07-26-2016) =================== -- Do not filter cookies if unsafe flag provided (#1005) +- Do not filter cookies if unsafe flag provided (`#1005 `_) 0.22.2 (07-23-2016) =================== -- Suppress CancelledError when Timeout raises TimeoutError (#970) +- Suppress CancelledError when Timeout raises TimeoutError (`#970 `_) - Don't expose `aiohttp.__version__` -- Add unsafe parameter to CookieJar (#968) +- Add unsafe parameter to CookieJar (`#968 `_) - Use unsafe cookie jar in test client tools @@ -1266,88 +1266,88 @@ Misc =================== - Large cookie expiration/max-age does not break an event loop from now - (fixes (#967)) + (fixes (`#967 `_)) 0.22.0 (07-15-2016) =================== -- Fix bug in serving static directory (#803) +- Fix bug in serving static directory (`#803 `_) -- Fix command line arg parsing (#797) +- Fix command line arg parsing (`#797 `_) -- Fix a documentation chapter about cookie usage (#790) +- Fix a documentation chapter about cookie usage (`#790 `_) -- Handle empty body with gzipped encoding (#758) +- Handle empty body with gzipped encoding (`#758 `_) -- Support 451 Unavailable For Legal Reasons http status (#697) +- Support 451 Unavailable For Legal Reasons http status (`#697 `_) -- Fix Cookie share example and few small typos in docs (#817) +- Fix Cookie share example and few small typos in docs (`#817 `_) -- UrlDispatcher.add_route with partial coroutine handler (#814) +- UrlDispatcher.add_route with partial coroutine handler (`#814 `_) -- Optional support for aiodns (#728) +- Optional support for aiodns (`#728 `_) -- Add ServiceRestart and TryAgainLater websocket close codes (#828) +- Add ServiceRestart and TryAgainLater websocket close codes (`#828 `_) -- Fix prompt message for `web.run_app` (#832) +- Fix prompt message for `web.run_app` (`#832 `_) -- Allow to pass None as a timeout value to disable timeout logic (#834) +- Allow to pass None as a timeout value to disable timeout logic (`#834 `_) -- Fix leak of connection slot during connection error (#835) +- Fix leak of connection slot during connection error (`#835 `_) - Gunicorn worker with uvloop support - `aiohttp.worker.GunicornUVLoopWebWorker` (#878) + `aiohttp.worker.GunicornUVLoopWebWorker` (`#878 `_) -- Don't send body in response to HEAD request (#838) +- Don't send body in response to HEAD request (`#838 `_) -- Skip the preamble in MultipartReader (#881) +- Skip the preamble in MultipartReader (`#881 `_) -- Implement BasicAuth decode classmethod. (#744) +- Implement BasicAuth decode classmethod. (`#744 `_) -- Don't crash logger when transport is None (#889) +- Don't crash logger when transport is None (`#889 `_) - Use a create_future compatibility wrapper instead of creating - Futures directly (#896) + Futures directly (`#896 `_) -- Add test utilities to aiohttp (#902) +- Add test utilities to aiohttp (`#902 `_) -- Improve Request.__repr__ (#875) +- Improve Request.__repr__ (`#875 `_) -- Skip DNS resolving if provided host is already an ip address (#874) +- Skip DNS resolving if provided host is already an ip address (`#874 `_) -- Add headers to ClientSession.ws_connect (#785) +- Add headers to ClientSession.ws_connect (`#785 `_) -- Document that server can send pre-compressed data (#906) +- Document that server can send pre-compressed data (`#906 `_) -- Don't add Content-Encoding and Transfer-Encoding if no body (#891) +- Don't add Content-Encoding and Transfer-Encoding if no body (`#891 `_) -- Add json() convenience methods to websocket message objects (#897) +- Add json() convenience methods to websocket message objects (`#897 `_) -- Add client_resp.raise_for_status() (#908) +- Add client_resp.raise_for_status() (`#908 `_) -- Implement cookie filter (#799) +- Implement cookie filter (`#799 `_) -- Include an example of middleware to handle error pages (#909) +- Include an example of middleware to handle error pages (`#909 `_) -- Fix error handling in StaticFileMixin (#856) +- Fix error handling in StaticFileMixin (`#856 `_) -- Add mocked request helper (#900) +- Add mocked request helper (`#900 `_) -- Fix empty ALLOW Response header for cls based View (#929) +- Fix empty ALLOW Response header for cls based View (`#929 `_) -- Respect CONNECT method to implement a proxy server (#847) +- Respect CONNECT method to implement a proxy server (`#847 `_) -- Add pytest_plugin (#914) +- Add pytest_plugin (`#914 `_) - Add tutorial - Add backlog option to support more than 128 (default value in - "create_server" function) concurrent connections (#892) + "create_server" function) concurrent connections (`#892 `_) -- Allow configuration of header size limits (#912) +- Allow configuration of header size limits (`#912 `_) -- Separate sending file logic from StaticRoute dispatcher (#901) +- Separate sending file logic from StaticRoute dispatcher (`#901 `_) - Drop deprecated share_cookies connector option (BACKWARD INCOMPATIBLE) @@ -1360,28 +1360,28 @@ Misc - Drop all mentions about api changes in documentation for versions older than 0.16 -- Allow to override default cookie jar (#963) +- Allow to override default cookie jar (`#963 `_) - Add manylinux wheel builds -- Dup a socket for sendfile usage (#964) +- Dup a socket for sendfile usage (`#964 `_) 0.21.6 (05-05-2016) =================== -- Drop initial query parameters on redirects (#853) +- Drop initial query parameters on redirects (`#853 `_) 0.21.5 (03-22-2016) =================== -- Fix command line arg parsing (#797) +- Fix command line arg parsing (`#797 `_) 0.21.4 (03-12-2016) =================== - Fix ResourceAdapter: don't add method to allowed if resource is not - match (#826) + match (`#826 `_) - Fix Resource: append found method to returned allowed methods @@ -1389,12 +1389,12 @@ Misc =================== - Fix a regression: support for handling ~/path in static file routes was - broken (#782) + broken (`#782 `_) 0.21.1 (02-10-2016) =================== -- Make new resources classes public (#767) +- Make new resources classes public (`#767 `_) - Add `router.resources()` view @@ -1403,22 +1403,22 @@ Misc 0.21.0 (02-04-2016) =================== -- Introduce on_shutdown signal (#722) +- Introduce on_shutdown signal (`#722 `_) -- Implement raw input headers (#726) +- Implement raw input headers (`#726 `_) -- Implement web.run_app utility function (#734) +- Implement web.run_app utility function (`#734 `_) - Introduce on_cleanup signal - Deprecate Application.finish() / Application.register_on_finish() in favor of on_cleanup. -- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (#729) +- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (`#729 `_) -- Deprecate bare aiohttp.request(), aiohttp.get() and family (#729) +- Deprecate bare aiohttp.request(), aiohttp.get() and family (`#729 `_) -- Refactor keep-alive support (#737): +- Refactor keep-alive support (`#737 `_): - Enable keepalive for HTTP 1.0 by default @@ -1437,18 +1437,18 @@ Misc - don't send `Connection` header for HTTP 1.0 - Add version parameter to ClientSession constructor, - deprecate it for session.request() and family (#736) + deprecate it for session.request() and family (`#736 `_) -- Enable access log by default (#735) +- Enable access log by default (`#735 `_) - Deprecate app.router.register_route() (the method was not documented intentionally BTW). - Deprecate app.router.named_routes() in favor of app.router.named_resources() -- route.add_static accepts pathlib.Path now (#743) +- route.add_static accepts pathlib.Path now (`#743 `_) -- Add command line support: `$ python -m aiohttp.web package.main` (#740) +- Add command line support: `$ python -m aiohttp.web package.main` (`#740 `_) - FAQ section was added to docs. Enjoy and fill free to contribute new topics @@ -1456,32 +1456,32 @@ Misc - Document ClientResponse's host, method, url properties -- Use CORK/NODELAY in client API (#748) +- Use CORK/NODELAY in client API (`#748 `_) - ClientSession.close and Connector.close are coroutines now - Close client connection on exception in ClientResponse.release() -- Allow to read multipart parts without content-length specified (#750) +- Allow to read multipart parts without content-length specified (`#750 `_) -- Add support for unix domain sockets to gunicorn worker (#470) +- Add support for unix domain sockets to gunicorn worker (`#470 `_) -- Add test for default Expect handler (#601) +- Add test for default Expect handler (`#601 `_) - Add the first demo project -- Rename `loader` keyword argument in `web.Request.json` method. (#646) +- Rename `loader` keyword argument in `web.Request.json` method. (`#646 `_) -- Add local socket binding for TCPConnector (#678) +- Add local socket binding for TCPConnector (`#678 `_) 0.20.2 (01-07-2016) =================== -- Enable use of `await` for a class based view (#717) +- Enable use of `await` for a class based view (`#717 `_) -- Check address family to fill wsgi env properly (#718) +- Check address family to fill wsgi env properly (`#718 `_) -- Fix memory leak in headers processing (thanks to Marco Paolini) (#723) +- Fix memory leak in headers processing (thanks to Marco Paolini) (`#723 `_) 0.20.1 (12-30-2015) =================== @@ -1489,7 +1489,7 @@ Misc - Raise RuntimeError is Timeout context manager was used outside of task context. -- Add number of bytes to stream.read_nowait (#700) +- Add number of bytes to stream.read_nowait (`#700 `_) - Use X-FORWARDED-PROTO for wsgi.url_scheme when available @@ -1500,19 +1500,19 @@ Misc - Extend list of web exceptions, add HTTPMisdirectedRequest, HTTPUpgradeRequired, HTTPPreconditionRequired, HTTPTooManyRequests, HTTPRequestHeaderFieldsTooLarge, HTTPVariantAlsoNegotiates, - HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (#644) + HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (`#644 `_) -- Do not remove AUTHORIZATION header by WSGI handler (#649) +- Do not remove AUTHORIZATION header by WSGI handler (`#649 `_) -- Fix broken support for https proxies with authentication (#617) +- Fix broken support for https proxies with authentication (`#617 `_) - Get REMOTE_* and SEVER_* http vars from headers when listening on - unix socket (#654) + unix socket (`#654 `_) -- Add HTTP 308 support (#663) +- Add HTTP 308 support (`#663 `_) - Add Tf format (time to serve request in seconds, %06f format) to - access log (#669) + access log (`#669 `_) - Remove one and a half years long deprecated ClientResponse.read_and_close() method @@ -1521,77 +1521,77 @@ Misc on sending chunked encoded data - Use TCP_CORK and TCP_NODELAY to optimize network latency and - throughput (#680) + throughput (`#680 `_) -- Websocket XOR performance improved (#687) +- Websocket XOR performance improved (`#687 `_) -- Avoid sending cookie attributes in Cookie header (#613) +- Avoid sending cookie attributes in Cookie header (`#613 `_) - Round server timeouts to seconds for grouping pending calls. That - leads to less amount of poller syscalls e.g. epoll.poll(). (#702) + leads to less amount of poller syscalls e.g. epoll.poll(). (`#702 `_) -- Close connection on websocket handshake error (#703) +- Close connection on websocket handshake error (`#703 `_) -- Implement class based views (#684) +- Implement class based views (`#684 `_) -- Add *headers* parameter to ws_connect() (#709) +- Add *headers* parameter to ws_connect() (`#709 `_) -- Drop unused function `parse_remote_addr()` (#708) +- Drop unused function `parse_remote_addr()` (`#708 `_) -- Close session on exception (#707) +- Close session on exception (`#707 `_) -- Store http code and headers in WSServerHandshakeError (#706) +- Store http code and headers in WSServerHandshakeError (`#706 `_) -- Make some low-level message properties readonly (#710) +- Make some low-level message properties readonly (`#710 `_) 0.19.0 (11-25-2015) =================== -- Memory leak in ParserBuffer (#579) +- Memory leak in ParserBuffer (`#579 `_) - Support gunicorn's `max_requests` settings in gunicorn worker -- Fix wsgi environment building (#573) +- Fix wsgi environment building (`#573 `_) -- Improve access logging (#572) +- Improve access logging (`#572 `_) -- Drop unused host and port from low-level server (#586) +- Drop unused host and port from low-level server (`#586 `_) -- Add Python 3.5 `async for` implementation to server websocket (#543) +- Add Python 3.5 `async for` implementation to server websocket (`#543 `_) - Add Python 3.5 `async for` implementation to client websocket - Add Python 3.5 `async with` implementation to client websocket -- Add charset parameter to web.Response constructor (#593) +- Add charset parameter to web.Response constructor (`#593 `_) - Forbid passing both Content-Type header and content_type or charset params into web.Response constructor -- Forbid duplicating of web.Application and web.Request (#602) +- Forbid duplicating of web.Application and web.Request (`#602 `_) -- Add an option to pass Origin header in ws_connect (#607) +- Add an option to pass Origin header in ws_connect (`#607 `_) -- Add json_response function (#592) +- Add json_response function (`#592 `_) -- Make concurrent connections respect limits (#581) +- Make concurrent connections respect limits (`#581 `_) -- Collect history of responses if redirects occur (#614) +- Collect history of responses if redirects occur (`#614 `_) -- Enable passing pre-compressed data in requests (#621) +- Enable passing pre-compressed data in requests (`#621 `_) -- Expose named routes via UrlDispatcher.named_routes() (#622) +- Expose named routes via UrlDispatcher.named_routes() (`#622 `_) -- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (#629) +- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (`#629 `_) - Use ensure_future if available -- Always quote params for Content-Disposition (#641) +- Always quote params for Content-Disposition (`#641 `_) -- Support async for in multipart reader (#640) +- Support async for in multipart reader (`#640 `_) -- Add Timeout context manager (#611) +- Add Timeout context manager (`#611 `_) 0.18.4 (13-11-2015) =================== @@ -1602,12 +1602,12 @@ Misc 0.18.3 (25-10-2015) =================== -- Fix formatting for _RequestContextManager helper (#590) +- Fix formatting for _RequestContextManager helper (`#590 `_) 0.18.2 (22-10-2015) =================== -- Fix regression for OpenSSL < 1.0.0 (#583) +- Fix regression for OpenSSL < 1.0.0 (`#583 `_) 0.18.1 (20-10-2015) =================== @@ -1619,7 +1619,7 @@ Misc =================== - Use errors.HttpProcessingError.message as HTTP error reason and - message (#459) + message (`#459 `_) - Optimize cythonized multidict a bit @@ -1627,27 +1627,27 @@ Misc - default headers in ClientSession are now case-insensitive -- Make '=' char and 'wss://' schema safe in urls (#477) +- Make '=' char and 'wss://' schema safe in urls (`#477 `_) -- `ClientResponse.close()` forces connection closing by default from now (#479) +- `ClientResponse.close()` forces connection closing by default from now (`#479 `_) N.B. Backward incompatible change: was `.close(force=False) Using `force` parameter for the method is deprecated: use `.release()` instead. -- Properly requote URL's path (#480) +- Properly requote URL's path (`#480 `_) -- add `skip_auto_headers` parameter for client API (#486) +- add `skip_auto_headers` parameter for client API (`#486 `_) -- Properly parse URL path in aiohttp.web.Request (#489) +- Properly parse URL path in aiohttp.web.Request (`#489 `_) -- Raise RuntimeError when chunked enabled and HTTP is 1.0 (#488) +- Raise RuntimeError when chunked enabled and HTTP is 1.0 (`#488 `_) -- Fix a bug with processing io.BytesIO as data parameter for client API (#500) +- Fix a bug with processing io.BytesIO as data parameter for client API (`#500 `_) -- Skip auto-generation of Content-Type header (#507) +- Skip auto-generation of Content-Type header (`#507 `_) -- Use sendfile facility for static file handling (#503) +- Use sendfile facility for static file handling (`#503 `_) - Default `response_factory` in `app.router.add_static` now is `StreamResponse`, not `None`. The functionality is not changed if @@ -1656,17 +1656,17 @@ Misc - Drop `ClientResponse.message` attribute, it was always implementation detail. - Streams are optimized for speed and mostly memory in case of a big - HTTP message sizes (#496) + HTTP message sizes (`#496 `_) - Fix a bug for server-side cookies for dropping cookie and setting it again without Max-Age parameter. -- Don't trim redirect URL in client API (#499) +- Don't trim redirect URL in client API (`#499 `_) -- Extend precision of access log "D" to milliseconds (#527) +- Extend precision of access log "D" to milliseconds (`#527 `_) - Deprecate `StreamResponse.start()` method in favor of - `StreamResponse.prepare()` coroutine (#525) + `StreamResponse.prepare()` coroutine (`#525 `_) `.start()` is still supported but responses begun with `.start()` does not call signal for response preparing to be sent. @@ -1674,48 +1674,48 @@ Misc - Add `StreamReader.__repr__` - Drop Python 3.3 support, from now minimal required version is Python - 3.4.1 (#541) + 3.4.1 (`#541 `_) -- Add `async with` support for `ClientSession.request()` and family (#536) +- Add `async with` support for `ClientSession.request()` and family (`#536 `_) -- Ignore message body on 204 and 304 responses (#505) +- Ignore message body on 204 and 304 responses (`#505 `_) -- `TCPConnector` processed both IPv4 and IPv6 by default (#559) +- `TCPConnector` processed both IPv4 and IPv6 by default (`#559 `_) -- Add `.routes()` view for urldispatcher (#519) +- Add `.routes()` view for urldispatcher (`#519 `_) -- Route name should be a valid identifier name from now (#567) +- Route name should be a valid identifier name from now (`#567 `_) -- Implement server signals (#562) +- Implement server signals (`#562 `_) - Drop a year-old deprecated *files* parameter from client API. -- Added `async for` support for aiohttp stream (#542) +- Added `async for` support for aiohttp stream (`#542 `_) 0.17.4 (09-29-2015) =================== -- Properly parse URL path in aiohttp.web.Request (#489) +- Properly parse URL path in aiohttp.web.Request (`#489 `_) - Add missing coroutine decorator, the client api is await-compatible now 0.17.3 (08-28-2015) =================== -- Remove Content-Length header on compressed responses (#450) +- Remove Content-Length header on compressed responses (`#450 `_) - Support Python 3.5 -- Improve performance of transport in-use list (#472) +- Improve performance of transport in-use list (`#472 `_) -- Fix connection pooling (#473) +- Fix connection pooling (`#473 `_) 0.17.2 (08-11-2015) =================== -- Don't forget to pass `data` argument forward (#462) +- Don't forget to pass `data` argument forward (`#462 `_) -- Fix multipart read bytes count (#463) +- Fix multipart read bytes count (`#463 `_) 0.17.1 (08-10-2015) =================== @@ -1725,28 +1725,28 @@ Misc 0.17.0 (08-04-2015) =================== -- Make StaticRoute support Last-Modified and If-Modified-Since headers (#386) +- Make StaticRoute support Last-Modified and If-Modified-Since headers (`#386 `_) - Add Request.if_modified_since and Stream.Response.last_modified properties -- Fix deflate compression when writing a chunked response (#395) +- Fix deflate compression when writing a chunked response (`#395 `_) - Request`s content-length header is cleared now after redirect from - POST method (#391) + POST method (`#391 `_) -- Return a 400 if server received a non HTTP content (#405) +- Return a 400 if server received a non HTTP content (`#405 `_) -- Fix keep-alive support for aiohttp clients (#406) +- Fix keep-alive support for aiohttp clients (`#406 `_) -- Allow gzip compression in high-level server response interface (#403) +- Allow gzip compression in high-level server response interface (`#403 `_) -- Rename TCPConnector.resolve and family to dns_cache (#415) +- Rename TCPConnector.resolve and family to dns_cache (`#415 `_) -- Make UrlDispatcher ignore quoted characters during url matching (#414) +- Make UrlDispatcher ignore quoted characters during url matching (`#414 `_) Backward-compatibility warning: this may change the url matched by - your queries if they send quoted character (like %2F for /) (#414) + your queries if they send quoted character (like %2F for /) (`#414 `_) -- Use optional cchardet accelerator if present (#418) +- Use optional cchardet accelerator if present (`#418 `_) - Borrow loop from Connector in ClientSession if loop is not set @@ -1755,50 +1755,50 @@ Misc - Add toplevel get(), post(), put(), head(), delete(), options(), patch() coroutines. -- Fix IPv6 support for client API (#425) +- Fix IPv6 support for client API (`#425 `_) -- Pass SSL context through proxy connector (#421) +- Pass SSL context through proxy connector (`#421 `_) - Make the rule: path for add_route should start with slash - Don't process request finishing by low-level server on closed event loop -- Don't override data if multiple files are uploaded with same key (#433) +- Don't override data if multiple files are uploaded with same key (`#433 `_) - Ensure multipart.BodyPartReader.read_chunk read all the necessary data to avoid false assertions about malformed multipart payload -- Don't send body for 204, 205 and 304 http exceptions (#442) +- Don't send body for 204, 205 and 304 http exceptions (`#442 `_) -- Correctly skip Cython compilation in MSVC not found (#453) +- Correctly skip Cython compilation in MSVC not found (`#453 `_) -- Add response factory to StaticRoute (#456) +- Add response factory to StaticRoute (`#456 `_) -- Don't append trailing CRLF for multipart.BodyPartReader (#454) +- Don't append trailing CRLF for multipart.BodyPartReader (`#454 `_) 0.16.6 (07-15-2015) =================== -- Skip compilation on Windows if vcvarsall.bat cannot be found (#438) +- Skip compilation on Windows if vcvarsall.bat cannot be found (`#438 `_) 0.16.5 (06-13-2015) =================== -- Get rid of all comprehensions and yielding in _multidict (#410) +- Get rid of all comprehensions and yielding in _multidict (`#410 `_) 0.16.4 (06-13-2015) =================== - Don't clear current exception in multidict's `__repr__` (cythonized - versions) (#410) + versions) (`#410 `_) 0.16.3 (05-30-2015) =================== -- Fix StaticRoute vulnerability to directory traversal attacks (#380) +- Fix StaticRoute vulnerability to directory traversal attacks (`#380 `_) 0.16.2 (05-27-2015) @@ -1808,26 +1808,26 @@ Misc 3.4.1 instead of 3.4.0 - Add check for presence of loop.is_closed() method before call the - former (#378) + former (`#378 `_) 0.16.1 (05-27-2015) =================== -- Fix regression in static file handling (#377) +- Fix regression in static file handling (`#377 `_) 0.16.0 (05-26-2015) =================== -- Unset waiter future after cancellation (#363) +- Unset waiter future after cancellation (`#363 `_) -- Update request url with query parameters (#372) +- Update request url with query parameters (`#372 `_) - Support new `fingerprint` param of TCPConnector to enable verifying - SSL certificates via MD5, SHA1, or SHA256 digest (#366) + SSL certificates via MD5, SHA1, or SHA256 digest (`#366 `_) - Setup uploaded filename if field value is binary and transfer - encoding is not specified (#349) + encoding is not specified (`#349 `_) - Implement `ClientSession.close()` method @@ -1842,20 +1842,20 @@ Misc - Add `__del__` to client-side objects: sessions, connectors, connections, requests, responses. -- Refactor connections cleanup by connector (#357) +- Refactor connections cleanup by connector (`#357 `_) -- Add `limit` parameter to connector constructor (#358) +- Add `limit` parameter to connector constructor (`#358 `_) -- Add `request.has_body` property (#364) +- Add `request.has_body` property (`#364 `_) -- Add `response_class` parameter to `ws_connect()` (#367) +- Add `response_class` parameter to `ws_connect()` (`#367 `_) - `ProxyConnector` does not support keep-alive requests by default - starting from now (#368) + starting from now (`#368 `_) - Add `connector.force_close` property -- Add ws_connect to ClientSession (#374) +- Add ws_connect to ClientSession (`#374 `_) - Support optional `chunk_size` parameter in `router.add_static()` @@ -1865,7 +1865,7 @@ Misc - Fix graceful shutdown handling -- Fix `Expect` header handling for not found and not allowed routes (#340) +- Fix `Expect` header handling for not found and not allowed routes (`#340 `_) 0.15.2 (04-19-2015) @@ -1877,15 +1877,15 @@ Misc - Allow to match any request method with `*` -- Explicitly call drain on transport (#316) +- Explicitly call drain on transport (`#316 `_) -- Make chardet module dependency mandatory (#318) +- Make chardet module dependency mandatory (`#318 `_) -- Support keep-alive for HTTP 1.0 (#325) +- Support keep-alive for HTTP 1.0 (`#325 `_) -- Do not chunk single file during upload (#327) +- Do not chunk single file during upload (`#327 `_) -- Add ClientSession object for cookie storage and default headers (#328) +- Add ClientSession object for cookie storage and default headers (`#328 `_) - Add `keep_alive_on` argument for HTTP server handler. @@ -1911,13 +1911,13 @@ Misc - Client WebSockets support -- New Multipart system (#273) +- New Multipart system (`#273 `_) -- Support for "Except" header (#287) (#267) +- Support for "Except" header (`#287 `_) (`#267 `_) -- Set default Content-Type for post requests (#184) +- Set default Content-Type for post requests (`#184 `_) -- Fix issue with construction dynamic route with regexps and trailing slash (#266) +- Fix issue with construction dynamic route with regexps and trailing slash (`#266 `_) - Add repr to web.Request @@ -1927,7 +1927,7 @@ Misc - Add repr for web.Application -- Add repr to UrlMappingMatchInfo (#217) +- Add repr to UrlMappingMatchInfo (`#217 `_) - Gunicorn 19.2.x compatibility @@ -1935,29 +1935,29 @@ Misc 0.14.4 (01-29-2015) =================== -- Fix issue with error during constructing of url with regex parts (#264) +- Fix issue with error during constructing of url with regex parts (`#264 `_) 0.14.3 (01-28-2015) =================== -- Use path='/' by default for cookies (#261) +- Use path='/' by default for cookies (`#261 `_) 0.14.2 (01-23-2015) =================== -- Connections leak in BaseConnector (#253) +- Connections leak in BaseConnector (`#253 `_) -- Do not swallow websocket reader exceptions (#255) +- Do not swallow websocket reader exceptions (`#255 `_) -- web.Request's read, text, json are memorized (#250) +- web.Request's read, text, json are memorized (`#250 `_) 0.14.1 (01-15-2015) =================== -- HttpMessage._add_default_headers does not overwrite existing headers (#216) +- HttpMessage._add_default_headers does not overwrite existing headers (`#216 `_) - Expose multidict classes at package level @@ -1996,21 +1996,21 @@ Misc - Server has 75 seconds keepalive timeout now, was non-keepalive by default. -- Application does not accept `**kwargs` anymore ((#243)). +- Application does not accept `**kwargs` anymore ((`#243 `_)). - Request is inherited from dict now for making per-request storage to - middlewares ((#242)). + middlewares ((`#242 `_)). 0.13.1 (12-31-2014) =================== -- Add `aiohttp.web.StreamResponse.started` property (#213) +- Add `aiohttp.web.StreamResponse.started` property (`#213 `_) - HTML escape traceback text in `ServerHttpProtocol.handle_error` - Mention handler and middlewares in `aiohttp.web.RequestHandler.handle_request` - on error ((#218)) + on error ((`#218 `_)) 0.13.0 (12-29-2014) @@ -2020,16 +2020,16 @@ Misc - Chain exceptions when raise `ClientRequestError`. -- Support custom regexps in route variables (#204) +- Support custom regexps in route variables (`#204 `_) - Fixed graceful shutdown, disable keep-alive on connection closing. - Decode HTTP message with `utf-8` encoding, some servers send headers - in utf-8 encoding (#207) + in utf-8 encoding (`#207 `_) -- Support `aiohtt.web` middlewares (#209) +- Support `aiohtt.web` middlewares (`#209 `_) -- Add ssl_context to TCPConnector (#206) +- Add ssl_context to TCPConnector (`#206 `_) 0.12.0 (12-12-2014) @@ -2039,7 +2039,7 @@ Misc Sorry, we have to do this. - Automatically force aiohttp.web handlers to coroutines in - `UrlDispatcher.add_route()` (#186) + `UrlDispatcher.add_route()` (`#186 `_) - Rename `Request.POST()` function to `Request.post()` @@ -2068,15 +2068,15 @@ Misc 0.11.0 (11-29-2014) =================== -- Support named routes in `aiohttp.web.UrlDispatcher` (#179) +- Support named routes in `aiohttp.web.UrlDispatcher` (`#179 `_) -- Make websocket subprotocols conform to spec (#181) +- Make websocket subprotocols conform to spec (`#181 `_) 0.10.2 (11-19-2014) =================== -- Don't unquote `environ['PATH_INFO']` in wsgi.py (#177) +- Don't unquote `environ['PATH_INFO']` in wsgi.py (`#177 `_) 0.10.1 (11-17-2014) @@ -2102,54 +2102,54 @@ Misc from 'Can not read status line' to explicit 'Connection closed by server' -- Drop closed connections from connector (#173) +- Drop closed connections from connector (`#173 `_) -- Set server.transport to None on .closing() (#172) +- Set server.transport to None on .closing() (`#172 `_) 0.9.3 (10-30-2014) ================== -- Fix compatibility with asyncio 3.4.1+ (#170) +- Fix compatibility with asyncio 3.4.1+ (`#170 `_) 0.9.2 (10-16-2014) ================== -- Improve redirect handling (#157) +- Improve redirect handling (`#157 `_) -- Send raw files as is (#153) +- Send raw files as is (`#153 `_) -- Better websocket support (#150) +- Better websocket support (`#150 `_) 0.9.1 (08-30-2014) ================== -- Added MultiDict support for client request params and data (#114). +- Added MultiDict support for client request params and data (`#114 `_). -- Fixed parameter type for IncompleteRead exception (#118). +- Fixed parameter type for IncompleteRead exception (`#118 `_). -- Strictly require ASCII headers names and values (#137) +- Strictly require ASCII headers names and values (`#137 `_) -- Keep port in ProxyConnector (#128). +- Keep port in ProxyConnector (`#128 `_). -- Python 3.4.1 compatibility (#131). +- Python 3.4.1 compatibility (`#131 `_). 0.9.0 (07-08-2014) ================== -- Better client basic authentication support (#112). +- Better client basic authentication support (`#112 `_). -- Fixed incorrect line splitting in HttpRequestParser (#97). +- Fixed incorrect line splitting in HttpRequestParser (`#97 `_). - Support StreamReader and DataQueue as request data. -- Client files handling refactoring (#20). +- Client files handling refactoring (`#20 `_). - Backward incompatible: Replace DataQueue with StreamReader for - request payload (#87). + request payload (`#87 `_). 0.8.4 (07-04-2014) @@ -2272,7 +2272,7 @@ Misc - Better support for server exit. -- Read response body until EOF if content-length is not defined (#14) +- Read response body until EOF if content-length is not defined (`#14 `_) 0.6.2 (02-18-2014) From 96e09c6a5c456475ff20ba2fd4c02549b0e56c66 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:13:16 +0300 Subject: [PATCH 0004/1511] Add changelog links fixer --- tools/fix_changelog.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 tools/fix_changelog.py diff --git a/tools/fix_changelog.py b/tools/fix_changelog.py new file mode 100644 index 00000000000..e5a73f33219 --- /dev/null +++ b/tools/fix_changelog.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +import argparse +import re +import sys +from pathlib import Path + + +PATTERN = re.compile("\(#(\d+)\)") + + +def get_root(script_path): + folder = script_path.absolute().parent + while not (folder / '.git').exists(): + folder = folder.parent + if folder == folder.anchor: + raise RuntimeError("git repo not found") + return folder + + +def main(argv): + parser = argparse.ArgumentParser(description='Expand github links.') + parser.add_argument('filename', default='CHANGES.rst', nargs='?', + help="filename to proess") + args = parser.parse_args() + here = Path(argv[0]) + root = get_root(here) + fname = root / args.filename + + content = fname.read_text() + new = PATTERN.sub( + r'(`#\1 `_)', + content) + + fname.write_text(new) + print(f"Fixed links in {fname}") + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) From 77f5633ce02da71c6879d3b25b5fbe8b240647c6 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:29:16 +0300 Subject: [PATCH 0005/1511] Fix changenote --- CHANGES.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 02689ac2a1b..ce362dbf32c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -115,8 +115,6 @@ Bugfixes Improved Documentation ---------------------- -- Change ``ClientResponse.json()`` documentation to reflect that it now - allows "application/xxx+json" content-types (`#2206 `_) - Document behavior when cchardet detects encodings that are unknown to Python. (`#2732 `_) - Add diagrams for tracing request life style. (`#2748 `_) From 72567e375a7c8f9ad2c9fc0297928913c468c402 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2019 13:57:50 +0200 Subject: [PATCH 0006/1511] Bump coverage from 4.5.3 to 4.5.4 (#3961) Bumps [coverage](https://github.com/nedbat/coveragepy) from 4.5.3 to 4.5.4. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/coverage-4.5.3...coverage-4.5.4) Signed-off-by: dependabot-preview[bot] --- requirements/ci-wheel.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/ci-wheel.txt b/requirements/ci-wheel.txt index 0cefb432df8..9f54d031592 100644 --- a/requirements/ci-wheel.txt +++ b/requirements/ci-wheel.txt @@ -5,7 +5,7 @@ async-timeout==3.0.1 brotlipy==0.7.0 cchardet==2.1.4 chardet==3.0.4 -coverage==4.5.3 +coverage==4.5.4 gunicorn==19.9.0 multidict==4.5.2 pytest==5.0.1 From e553cc2bc2776cde73e69000a43580c415560d9d Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Tue, 10 Sep 2019 00:42:36 +0200 Subject: [PATCH 0007/1511] Replace PyPI creds with API token in Travis CI Resolves #3941 --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 97f2ff62e42..39b014729d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -85,10 +85,11 @@ _helpers: # This happens when other CI (AppVeyor etc.) has already uploaded # the very same dist (usually sdist). skip-existing: true - user: aio-libs-bot + user: __token__ password: # Encrypted with `travis encrypt -r aio-libs/aiohttp --api-endpoint 'https://api.travis-ci.com/'`: - secure: bWF8GkGmjxfisMx+LqOmZgf9EXPT4yBMhiIbN+rU5gs53PJJyn1r287c5zEijnIHlpQaJVwWJ9ZFl1n34y37P1yvhlz0M6esr5K10B9fJ6nkUoAivtOWARcHLQ3bC+WGC/V9S0v0pZ11qFuSNzJzFZqfRabRw0H8muVWGhuBuYp97EdJWCdSNpXqsj2Ts8ytPMYv+5m3iPgXq833svFbWRjZ+HgX8HwvF2lo+ej+tsFJNACbQmj+eQDGlWQZzP2s4/3grHivd2retpqfW1cYgZaZX68/UB2ghsCtkxhcNpGaM8I/n3udAHkPSqz3MC2FJL0RdkyvQ8UZkcmcisQxz0voQM25995BHGWktwpDh7BxFtGJishXV7hiFz9zVOZLM9u5AzIO4hoN770SsZDewWdhzowXPYT8DXiHVg+roEkszg8FeBmisx1cw34CK9H0iLUSQ9EF2vuz8T4bqEpbT6Fyta90wZTvJ7GpJ4yXJXR6VAvLgiX4zXeFdx/4aViz3UzkDJ06qieRuZJWfQ9u2lDxJfqHEVy5IxM5iACagP1XayJiVIN0uFRxNElxTlCMope6ICCOu9fhcGnF15XNw5YpFPYYvph3JU1vC8cYw7ypg8LQGryp1fNM9SXWaGTiV+J/yvsynFiXX6QiyGOwSBIJ9XjZEfRd8i9HaGHw2cw= + secure: >- + LC+sJojSdf4KhjHc/loszfAQmUM/VNHJarmC3sY9Dfa3qUS+2bnXxQmLK+lNw6mlAaoTaz7Y4MQDggAH1pBkP5jKjQrUjArjCNSYIubmfjhFqRYGa1xFrBjEJYjYNEfFzjPx+TUX2+qHKaZ8qp7nxFaPHG4JKuUHZQk7F7J/zs3VufWnYmc+QhOGbWFfcWZwpFly46HNrX78/6Plr84Gsz0Hws3K3GHkyXusX9axlByUpe9VZ+nVcANF6PGzqFwipXEWAe31vYO4MnYuZRotQiWVsaHDb9Ki+OyHVJJ02xp4ooofBsYhgZ8axtWKu8639xtTlOagecjKBenhipOQc6OrVWigyYfARVUDY5bBWQlyyOKh5TJkrTScLf5P6MKQ+Pgj3hkzyDELusgxd7Jkb/CN3GraX7U0808x5TiOcm7/3BO+eR3+mP54n6qAyHB+ckOQzWRHMeGPjOy2eIR3VkVcFzJCpIJwtArjWVzO5KFBzPYdxgz2IVBhZRyg66AlQ+GHFp2sI6rZXOqQnJWZOL+RZe/xqircgwUQQm2MGjwW05K5WT2WEwuGkmRnFwSdcKv+PSrmCIyXoy3neo9u9rPbrwBfBIbPj3MmE51edUy2rS/qw7jLUG683RNXcx/LTXAtd7SZgaWVHnvyukBPi5akyGeV0Pd00Th3tkqYBto= # Although Travis CI instructs `setup.py` to build source distribution, # which is default value for distribution option (`distribution: sdist`), # it will also upload all wheels we've previously built in manylinux1 From de9b2946ac4d5141eb2f989090d232807e84846d Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Tue, 10 Sep 2019 14:47:24 +0200 Subject: [PATCH 0008/1511] Use PyPI access token in AppVeyor Closes #3941 --- .appveyor.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 42a82ff4ad6..410cff2b6ca 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -1,6 +1,10 @@ environment: + PYPI_USER: __token__ PYPI_PASSWD: - secure: HrwOVde4wZoYHJf9bZ5AsQ== + secure: >- + E3o5zJvWwZ08zASfNAFzB8nRC8DMNhl2V5B+cH6fpvqt2qBsOeoDhbmSVlxNSdYLhbohI3MGLPAEQ3U8TheQ5UypPx6VRTQDyOkeRC1goENfwHTz3fdWn8eBo3TCSTss5pKK92jWDLNkYirXIyZoz/ukAS2mfxYHMA0GoTiVc46xJzRvuiHjJgUgtsJOX3/rjrw2eFJXyZGyug4Zbvj64PIzzDNPm7umEDnpJzIvG8/CZuIEemZpNiA4FOehIcvMEXR9giox40/EN8Gwn4Jl3Q== + PYTHONIOENCODING: "utf8:backslashreplace" + PYTHONLEGACYWINDOWSSTDIO: "1" matrix: - PYTHON: "C:\\Python35" - PYTHON: "C:\\Python35-x64" @@ -35,7 +39,7 @@ artifacts: deploy_script: - ps: >- if($env:appveyor_repo_tag -eq 'True') { - Invoke-Expression "$env:PYTHON\\python.exe -m twine upload dist/* --username aio-libs-bot --password $env:PYPI_PASSWD --skip-existing" + Invoke-Expression "$env:PYTHON\\python.exe -m twine upload dist/* --username $env:PYPI_USER --password $env:PYPI_PASSWD --skip-existing" } #notifications: From b1bf885645f8520b88a53090a4e4c8e3a878ff71 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Mon, 23 Mar 2020 00:36:57 +0100 Subject: [PATCH 0009/1511] Enable strict xfail mode in pytest by default https://pganssle-talks.github.io/xfail-lightning --- pytest.ci.ini | 1 + pytest.ini | 1 + 2 files changed, 2 insertions(+) diff --git a/pytest.ci.ini b/pytest.ci.ini index b61a40a74b8..17345743609 100644 --- a/pytest.ci.ini +++ b/pytest.ci.ini @@ -7,3 +7,4 @@ junit_suite_name = aiohttp_test_suite norecursedirs = dist docs build .tox .eggs minversion = 3.8.2 testpaths = tests/ +xfail_strict = true diff --git a/pytest.ini b/pytest.ini index bb280dcd3a4..ddde78bc6a8 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,3 +7,4 @@ junit_suite_name = aiohttp_test_suite norecursedirs = dist docs build .tox .eggs minversion = 3.8.2 testpaths = tests/ +xfail_strict = true From deba4b733c44cd1d3740df8f17d65c96b6bb6d97 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Mon, 23 Mar 2020 00:54:25 +0100 Subject: [PATCH 0010/1511] Revert "Mark test_handle_uncompleted_pipe as xfail" This reverts commit 47a3e4c1cf1db1212449b381083e2484f83972ba. --- tests/test_web_protocol.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test_web_protocol.py b/tests/test_web_protocol.py index f1b5ea51e8e..4d901894250 100644 --- a/tests/test_web_protocol.py +++ b/tests/test_web_protocol.py @@ -1,7 +1,6 @@ # Tests for aiohttp/server.py import asyncio -import platform import socket from functools import partial from unittest import mock @@ -10,8 +9,6 @@ from aiohttp import helpers, http, streams, web -IS_MACOS = platform.system() == 'Darwin' - @pytest.fixture def make_srv(loop, manager): @@ -341,11 +338,6 @@ def close(): "Error handling request", exc_info=mock.ANY) -@pytest.mark.xfail( - IS_MACOS, - raises=TypeError, - reason='Intermittently fails on macOS', -) async def test_handle_uncompleted_pipe( make_srv, transport, request_handler, handle_with_error): closed = False From 1c302e985dd7e6b6c5717cd6b7d3e3f8959dc1aa Mon Sep 17 00:00:00 2001 From: "Paul \"TBBle\" Hampson" Date: Sat, 25 Apr 2020 07:04:39 +1000 Subject: [PATCH 0011/1511] [3.6] Bump sphinxcontrib-blockdiag from 1.5.5 to 2.0.0 (#4428) (#4576) --- .azure-pipelines/stage-test.yml | 1 + requirements/doc.txt | 2 +- tests/test_proxy_functional.py | 3 ++- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.azure-pipelines/stage-test.yml b/.azure-pipelines/stage-test.yml index 6b9735f56f9..da6eb60e0d4 100644 --- a/.azure-pipelines/stage-test.yml +++ b/.azure-pipelines/stage-test.yml @@ -86,6 +86,7 @@ stages: displayName: 'Cythonize' - script: | + pip install wheel pip install -r requirements/dev.txt displayName: 'Install dependencies' env: diff --git a/requirements/doc.txt b/requirements/doc.txt index 7721def5697..bb3d651f4d9 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -2,4 +2,4 @@ sphinx==2.2.0 sphinxcontrib-asyncio==0.2.0 pygments==2.4.2 aiohttp-theme==0.1.6 -sphinxcontrib-blockdiag==1.5.5 +sphinxcontrib-blockdiag==2.0.0 diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 59a5ad78e9f..6e03a69ea8c 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -343,7 +343,8 @@ async def test_proxy_https_bad_response(proxy_test_server, assert len(proxy.requests_list) == 1 assert proxy.request.method == 'CONNECT' - assert proxy.request.path == 'secure.aiohttp.io:443' + # The following check fails on MacOS + # assert proxy.request.path == 'secure.aiohttp.io:443' @pytest.mark.xfail From 5fb01796182bd5b56f8823fe9605f494be99042b Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Wed, 20 Nov 2019 20:04:24 +0200 Subject: [PATCH 0012/1511] Drop obsolete setup.py check -rms from Makefile --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index c8339c4f081..b1f0b25bcb2 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,6 @@ flake: .flake $(shell find tests -type f) \ $(shell find examples -type f) flake8 aiohttp examples tests - python setup.py check -rms @if ! isort -c -rc aiohttp tests examples; then \ echo "Import sort errors, run 'make isort' to fix them!!!"; \ isort --diff -rc aiohttp tests examples; \ From 1dda663caf32f261f0088da23a93f6bf4dc88c26 Mon Sep 17 00:00:00 2001 From: "Paul \"TBBle\" Hampson" Date: Sat, 25 Apr 2020 07:50:21 +1000 Subject: [PATCH 0013/1511] [3.6] Fix python 3.8 warnings (#4264). (#4570) --- aiohttp/connector.py | 2 +- aiohttp/locks.py | 2 +- aiohttp/web_server.py | 2 +- tests/test_loop.py | 2 +- tests/test_web_protocol.py | 5 +---- 5 files changed, 5 insertions(+), 8 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 75cd288b93c..3e8f4932cc8 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -964,7 +964,7 @@ async def _create_direct_connection( hosts = await asyncio.shield(self._resolve_host( host, port, - traces=traces), loop=self._loop) + traces=traces)) except OSError as exc: # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself diff --git a/aiohttp/locks.py b/aiohttp/locks.py index ed41f979589..88b9d3e36ac 100644 --- a/aiohttp/locks.py +++ b/aiohttp/locks.py @@ -18,7 +18,7 @@ class EventResultOrError: def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._exc = None # type: Optional[BaseException] - self._event = asyncio.Event(loop=loop) + self._event = asyncio.Event() self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]] def set(self, exc: Optional[BaseException]=None) -> None: diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index ad746ed0b4b..9bfd0eda8dc 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -50,7 +50,7 @@ def _make_request(self, message: RawRequestMessage, async def shutdown(self, timeout: Optional[float]=None) -> None: coros = [conn.shutdown(timeout) for conn in self._connections] - await asyncio.gather(*coros, loop=self._loop) + await asyncio.gather(*coros) self._connections.clear() def __call__(self) -> RequestHandler: diff --git a/tests/test_loop.py b/tests/test_loop.py index 25d36c706e1..7609e4100c1 100644 --- a/tests/test_loop.py +++ b/tests/test_loop.py @@ -13,7 +13,7 @@ async def test_subprocess_co(loop) -> None: assert isinstance(threading.current_thread(), threading._MainThread) proc = await asyncio.create_subprocess_shell( - "exit 0", loop=loop, stdin=asyncio.subprocess.DEVNULL, + "exit 0", stdin=asyncio.subprocess.DEVNULL, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await proc.wait() diff --git a/tests/test_web_protocol.py b/tests/test_web_protocol.py index 4d901894250..0b11370df90 100644 --- a/tests/test_web_protocol.py +++ b/tests/test_web_protocol.py @@ -835,10 +835,7 @@ async def handler(request): app.router.add_route('POST', '/', handler) server = await aiohttp_server(app, logger=logger) - if helpers.PY_38: - writer = await asyncio.connect('127.0.0.1', server.port) - else: - _, writer = await asyncio.open_connection('127.0.0.1', server.port) + _, writer = await asyncio.open_connection('127.0.0.1', server.port) writer.write("""POST / HTTP/1.1\r Connection: keep-alive\r Content-Length: 10\r From a22acb38520cb5befb2e989c3ab6eb4aa264e7b4 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 26 Apr 2020 01:29:10 +0200 Subject: [PATCH 0014/1511] [3.6] switch noop from function to class (#4322) (#4712) --- CHANGES/4282.bugfix | 1 + aiohttp/helpers.py | 8 ++++---- tests/test_web_protocol.py | 15 --------------- 3 files changed, 5 insertions(+), 19 deletions(-) create mode 100644 CHANGES/4282.bugfix diff --git a/CHANGES/4282.bugfix b/CHANGES/4282.bugfix new file mode 100644 index 00000000000..27062bb91bb --- /dev/null +++ b/CHANGES/4282.bugfix @@ -0,0 +1 @@ +Remove warning messages from noop. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 8405d3a7ea8..1fdfac5e8aa 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -24,6 +24,7 @@ Any, Callable, Dict, + Generator, Iterable, Iterator, List, @@ -99,12 +100,11 @@ def all_tasks( coroutines = asyncio.coroutines old_debug = coroutines._DEBUG # type: ignore -# prevent "coroutine noop was never awaited" warning. -coroutines._DEBUG = False # type: ignore +class noop: + def __await__(self) -> Generator[None, None, None]: + yield -async def noop(*args: Any, **kwargs: Any) -> None: - return noop2 = noop diff --git a/tests/test_web_protocol.py b/tests/test_web_protocol.py index 0b11370df90..181e10a1611 100644 --- a/tests/test_web_protocol.py +++ b/tests/test_web_protocol.py @@ -252,21 +252,6 @@ async def test_bad_method(srv, buf) -> None: assert buf.startswith(b'HTTP/1.0 400 Bad Request\r\n') -async def test_data_received_error(srv, buf) -> None: - transport = srv.transport - srv._request_parser = mock.Mock() - srv._request_parser.feed_data.side_effect = TypeError - - srv.data_received( - b'!@#$ / HTTP/1.0\r\n' - b'Host: example.com\r\n\r\n') - - await asyncio.sleep(0) - assert buf.startswith(b'HTTP/1.0 500 Internal Server Error\r\n') - assert transport.close.called - assert srv._error_handler is None - - async def test_line_too_long(srv, buf) -> None: srv.data_received(b''.join([b'a' for _ in range(10000)]) + b'\r\n\r\n') From 4d440d1275da8f259bfb27d61f3b2ad73891c007 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 18 Nov 2023 15:08:58 +0000 Subject: [PATCH 0015/1511] Bump to 3.10.0.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 538ce0a1038..701d913bef4 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.0" +__version__ = "3.10.0.dev0" from typing import TYPE_CHECKING, Tuple From 8254482f2f82b133190a3ca7ca74a559cc9c2532 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 19 Nov 2023 14:38:11 +0000 Subject: [PATCH 0016/1511] [PR #7850/22170b21 backport][3.10] Fix import under PyPy 3.8/3.9 on Windows (#7855) **This is a backport of PR #7850 as merged into master (22170b21064be8fdf75b947d9c2930df7b2518e1).** Fixes #7848. Co-authored-by: Jelle Zijlstra --- CHANGES/7848.bugfix | 1 + aiohttp/cookiejar.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/7848.bugfix diff --git a/CHANGES/7848.bugfix b/CHANGES/7848.bugfix new file mode 100644 index 00000000000..13a29e2a226 --- /dev/null +++ b/CHANGES/7848.bugfix @@ -0,0 +1 @@ +Fix importing aiohttp under PyPy 3.8 and 3.9 on Windows. diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 15dd982c960..a348f112cb5 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -62,9 +62,10 @@ class CookieJar(AbstractCookieJar): ) try: calendar.timegm(time.gmtime(MAX_TIME)) - except OSError: + except (OSError, ValueError): # Hit the maximum representable time on Windows # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 + # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) except OverflowError: # #4515: datetime.max may not be representable on 32-bit platforms From 572e5fcc865e256e3a1daa3f67408f7fcdc04e5c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 19 Nov 2023 15:59:05 +0000 Subject: [PATCH 0017/1511] [PR #7852/122597fc backport][3.10] Add requirement files to sdist (#7857) **This is a backport of PR #7852 as merged into master (122597fce2cf9a1d4c0b2ebaa0a8be567f19816c).** Co-authored-by: Marcel Telka --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 05084efddb9..d7c5cef6aad 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,6 +7,7 @@ graft aiohttp graft docs graft examples graft tests +graft requirements recursive-include vendor * global-include aiohttp *.pyi global-exclude *.pyc From 3c63ca0ba6e5558de896ee9de4f2dea6968a9655 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 22 Nov 2023 19:15:00 +0000 Subject: [PATCH 0018/1511] [PR #7863/3a21134a backport][3.10] remove tests/__init__.py (#7871) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #7863 as merged into master (3a21134a0e3e8a163faa7436383e92da08415f13).** ## What do these changes do? fixes https://github.com/aio-libs/aiohttp/issues/7858 ## Are there changes in behavior for the user? ## Related issue number https://github.com/aio-libs/aiohttp/issues/7858 ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES` folder * name it `.` for example (588.bugfix) * if you don't have an `issue_id` change it to the pr id after creating the pr * ensure type is one of the following: * `.feature`: Signifying a new feature. * `.bugfix`: Signifying a bug fix. * `.doc`: Signifying a documentation improvement. * `.removal`: Signifying a deprecation or removal of public API. * `.misc`: A ticket has been closed, but it is not of interest to users. * Make sure to use full sentences with correct case and punctuation, for example: "Fix issue with non-ascii contents in doctest text files." Co-authored-by: Robert Schütz --- tests/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests/__init__.py diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 From 45147c4e18d20ee7a4007fb3ee399487e319d9ad Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 23 Nov 2023 11:52:16 +0100 Subject: [PATCH 0019/1511] Fix ClientResponse.close releasing the connection instead of closing (#7869) (#7874) (cherry picked from commit 25ef450238864d06ba8a44227080e4a7b354ba76) --- CHANGES/7869.bugfix | 1 + aiohttp/client_reqrep.py | 4 +++- tests/test_client_functional.py | 36 ++++++++++++++++++++++++++++++++- 3 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7869.bugfix diff --git a/CHANGES/7869.bugfix b/CHANGES/7869.bugfix new file mode 100644 index 00000000000..23282fc3bb4 --- /dev/null +++ b/CHANGES/7869.bugfix @@ -0,0 +1 @@ +Fix ClientResponse.close releasing the connection instead of closing diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4cea7466d8d..0ab84743658 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1033,7 +1033,9 @@ def close(self) -> None: return self._cleanup_writer() - self._release_connection() + if self._connection is not None: + self._connection.close() + self._connection = None def release(self) -> Any: if not self._released: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index c5379e74a4b..6698ac6ef52 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -19,6 +19,7 @@ from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import TooManyRedirects +from aiohttp.pytest_plugin import AiohttpClient, TestClient from aiohttp.test_utils import unused_port @@ -3186,7 +3187,40 @@ async def handler(request): await client.get("/") -async def test_read_timeout_on_prepared_response(aiohttp_client) -> None: +async def test_read_timeout_closes_connection(aiohttp_client: AiohttpClient) -> None: + request_count = 0 + + async def handler(request): + nonlocal request_count + request_count += 1 + if request_count < 3: + await asyncio.sleep(0.5) + return web.Response(body=f"request:{request_count}") + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + timeout = aiohttp.ClientTimeout(total=0.1) + client: TestClient = await aiohttp_client(app, timeout=timeout) + with pytest.raises(asyncio.TimeoutError): + await client.get("/") + + # Make sure its really closed + assert not client.session.connector._conns + + with pytest.raises(asyncio.TimeoutError): + await client.get("/") + + # Make sure its really closed + assert not client.session.connector._conns + result = await client.get("/") + assert await result.read() == b"request:3" + + # Make sure its not closed + assert client.session.connector._conns + + +async def test_read_timeout_on_prepared_response(aiohttp_client: Any) -> None: async def handler(request): resp = aiohttp.web.StreamResponse() await resp.prepare(request) From a5e8aba14868a8b020d3693f8a9a1e7b25f4b14b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 23 Nov 2023 14:31:15 +0100 Subject: [PATCH 0020/1511] Fix usage of proxy.py in test_proxy_functional (#7773) (#7877) (cherry picked from commit 4d9fc636dbad45678330f17b7d82b75cf91247bf) --- tests/test_proxy_functional.py | 76 ++++++++++++++-------------------- 1 file changed, 30 insertions(+), 46 deletions(-) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 61e30841cc1..de5eeb258ff 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -1,8 +1,8 @@ import asyncio -import functools import os import pathlib -import platform +import ssl +import sys from re import match as match_regex from unittest import mock from uuid import uuid4 @@ -13,8 +13,8 @@ import aiohttp from aiohttp import web -from aiohttp.client_exceptions import ClientConnectionError, ClientProxyConnectionError -from aiohttp.helpers import IS_MACOS, IS_WINDOWS, PY_310 +from aiohttp.client_exceptions import ClientConnectionError +from aiohttp.helpers import IS_MACOS, IS_WINDOWS pytestmark = [ pytest.mark.filterwarnings( @@ -28,20 +28,7 @@ ] -secure_proxy_xfail_under_py310_linux = functools.partial( - pytest.mark.xfail, - PY_310 and platform.system() == "Linux", - reason=( - "The secure proxy fixture does not seem to work " - "under Python 3.10 on Linux. " - "See https://github.com/abhinavsingh/proxy.py/issues/622." - ), -) - -ASYNCIO_SUPPORTS_TLS_IN_TLS = hasattr( - asyncio.sslproto._SSLProtocolTransport, - "_start_tls_compatible", -) +ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11) @pytest.fixture @@ -51,6 +38,9 @@ def secure_proxy_url(tls_certificate_pem_path): This fixture also spawns that instance and tears it down after the test. """ proxypy_args = [ + # --threadless does not work on windows, see + # https://github.com/abhinavsingh/proxy.py/issues/492 + "--threaded" if os.name == "nt" else "--threadless", "--num-workers", "1", # the tests only send one query anyway "--hostname", @@ -112,32 +102,20 @@ async def handler(*args, **kwargs): ) -@pytest.fixture -def _pretend_asyncio_supports_tls_in_tls( - monkeypatch, - web_server_endpoint_type, -): - if web_server_endpoint_type != "https" or ASYNCIO_SUPPORTS_TLS_IN_TLS: - return - - # for https://github.com/python/cpython/pull/28073 - # and https://bugs.python.org/issue37179 - monkeypatch.setattr( - asyncio.sslproto._SSLProtocolTransport, - "_start_tls_compatible", - True, - raising=False, - ) - - -@secure_proxy_xfail_under_py310_linux(raises=ClientProxyConnectionError) +@pytest.mark.skipif( + not ASYNCIO_SUPPORTS_TLS_IN_TLS, + reason="asyncio on this python does not support TLS in TLS", +) @pytest.mark.parametrize("web_server_endpoint_type", ("http", "https")) -@pytest.mark.usefixtures("_pretend_asyncio_supports_tls_in_tls", "loop") +@pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") +# Filter out the warning from +# https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 +# otherwise this test will fail because the proxy will die with an error. async def test_secure_https_proxy_absolute_path( - client_ssl_ctx, - secure_proxy_url, - web_server_endpoint_url, - web_server_endpoint_payload, + client_ssl_ctx: ssl.SSLContext, + secure_proxy_url: URL, + web_server_endpoint_url: str, + web_server_endpoint_payload: str, ) -> None: """Ensure HTTP(S) sites are accessible through a secure proxy.""" conn = aiohttp.TCPConnector() @@ -160,13 +138,19 @@ async def test_secure_https_proxy_absolute_path( await asyncio.sleep(0.1) -@secure_proxy_xfail_under_py310_linux(raises=AssertionError) @pytest.mark.parametrize("web_server_endpoint_type", ("https",)) @pytest.mark.usefixtures("loop") +@pytest.mark.skipif( + ASYNCIO_SUPPORTS_TLS_IN_TLS, reason="asyncio on this python supports TLS in TLS" +) +@pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") +# Filter out the warning from +# https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 +# otherwise this test will fail because the proxy will die with an error. async def test_https_proxy_unsupported_tls_in_tls( - client_ssl_ctx, - secure_proxy_url, - web_server_endpoint_type, + client_ssl_ctx: ssl.SSLContext, + secure_proxy_url: URL, + web_server_endpoint_type: str, ) -> None: """Ensure connecting to TLS endpoints w/ HTTPS proxy needs patching. From fc9e7a2ca2d5530a3b7e7348b27c229cd61c38f5 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 24 Nov 2023 19:00:57 +0000 Subject: [PATCH 0021/1511] Update dependabot.yml (#7884) Sync from master --- .github/dependabot.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index cd8b2782b43..3b392a34b3b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -18,22 +18,22 @@ updates: interval: "daily" open-pull-requests-limit: 10 - # Maintain dependencies for GitHub Actions aiohttp 3.8 + # Maintain dependencies for GitHub Actions aiohttp 3.9 - package-ecosystem: "github-actions" directory: "/" labels: - dependencies - target-branch: "3.8" + target-branch: "3.9" schedule: interval: "daily" open-pull-requests-limit: 10 - # Maintain dependencies for Python aiohttp 3.8 + # Maintain dependencies for Python aiohttp 3.10 - package-ecosystem: "pip" directory: "/" labels: - dependencies - target-branch: "3.8" + target-branch: "3.10" schedule: interval: "daily" open-pull-requests-limit: 10 From 82c93ff3b074b44313e8366b66597846fc5f55b1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 24 Nov 2023 13:50:46 -0600 Subject: [PATCH 0022/1511] Restore async concurrency safety to websocket compressor (#7865) (#7890) Fixes #7859 (cherry picked from commit 86a23961531103ccc34853f67321c7d0f63797f5) --- CHANGES/7865.bugfix | 1 + aiohttp/compression_utils.py | 22 +++++++---- aiohttp/http_websocket.py | 26 ++++++++----- tests/test_websocket_writer.py | 67 +++++++++++++++++++++++++++++++++- 4 files changed, 97 insertions(+), 19 deletions(-) create mode 100644 CHANGES/7865.bugfix diff --git a/CHANGES/7865.bugfix b/CHANGES/7865.bugfix new file mode 100644 index 00000000000..9a46e124486 --- /dev/null +++ b/CHANGES/7865.bugfix @@ -0,0 +1 @@ +Restore async concurrency safety to websocket compressor diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 52791fe5015..9631d377e9a 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -62,19 +62,25 @@ def __init__( self._compressor = zlib.compressobj( wbits=self._mode, strategy=strategy, level=level ) + self._compress_lock = asyncio.Lock() def compress_sync(self, data: bytes) -> bytes: return self._compressor.compress(data) async def compress(self, data: bytes) -> bytes: - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.compress_sync, data - ) - return self.compress_sync(data) + async with self._compress_lock: + # To ensure the stream is consistent in the event + # there are multiple writers, we need to lock + # the compressor so that only one writer can + # compress at a time. + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.compress_sync, data + ) + return self.compress_sync(data) def flush(self, mode: int = zlib.Z_FINISH) -> bytes: return self._compressor.flush(mode) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index a94ac2a73dd..f395a27614a 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -635,21 +635,17 @@ async def _send_frame( if (compress or self.compress) and opcode < 8: if compress: # Do not set self._compress if compressing is for this frame - compressobj = ZLibCompressor( - level=zlib.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) + compressobj = self._make_compress_obj(compress) else: # self.compress if not self._compressobj: - self._compressobj = ZLibCompressor( - level=zlib.Z_BEST_SPEED, - wbits=-self.compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) + self._compressobj = self._make_compress_obj(self.compress) compressobj = self._compressobj message = await compressobj.compress(message) + # Its critical that we do not return control to the event + # loop until we have finished sending all the compressed + # data. Otherwise we could end up mixing compressed frames + # if there are multiple coroutines compressing data. message += compressobj.flush( zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH ) @@ -687,10 +683,20 @@ async def _send_frame( self._output_size += len(header) + len(message) + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper() + def _make_compress_obj(self, compress: int) -> ZLibCompressor: + return ZLibCompressor( + level=zlib.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + def _write(self, data: bytes) -> None: if self.transport is None or self.transport.is_closing(): raise ConnectionResetError("Cannot write to closing transport") diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index fce3c330d27..8dbbc815fb7 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -1,9 +1,12 @@ +import asyncio import random +from typing import Any, Callable from unittest import mock import pytest -from aiohttp.http import WebSocketWriter +from aiohttp import DataQueue, WSMessage +from aiohttp.http import WebSocketReader, WebSocketWriter from aiohttp.test_utils import make_mocked_coro @@ -104,3 +107,65 @@ async def test_send_compress_text_per_message(protocol, transport) -> None: writer.transport.write.assert_called_with(b"\x81\x04text") await writer.send(b"text", compress=15) writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") + + +@pytest.mark.parametrize( + ("max_sync_chunk_size", "payload_point_generator"), + ( + (16, lambda count: count), + (4096, lambda count: count), + (32, lambda count: 64 + count if count % 2 else count), + ), +) +async def test_concurrent_messages( + protocol: Any, + transport: Any, + max_sync_chunk_size: int, + payload_point_generator: Callable[[int], int], +) -> None: + """Ensure messages are compressed correctly when there are multiple concurrent writers. + + This test generates is parametrized to + + - Generate messages that are larger than patch + WEBSOCKET_MAX_SYNC_CHUNK_SIZE of 16 + where compression will run in the executor + + - Generate messages that are smaller than patch + WEBSOCKET_MAX_SYNC_CHUNK_SIZE of 4096 + where compression will run in the event loop + + - Interleave generated messages with a + WEBSOCKET_MAX_SYNC_CHUNK_SIZE of 32 + where compression will run in the event loop + and in the executor + """ + with mock.patch( + "aiohttp.http_websocket.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", max_sync_chunk_size + ): + writer = WebSocketWriter(protocol, transport, compress=15) + queue: DataQueue[WSMessage] = DataQueue(asyncio.get_running_loop()) + reader = WebSocketReader(queue, 50000) + writers = [] + payloads = [] + for count in range(1, 64 + 1): + point = payload_point_generator(count) + payload = bytes((point,)) * point + payloads.append(payload) + writers.append(writer.send(payload, binary=True)) + await asyncio.gather(*writers) + + for call in writer.transport.write.call_args_list: + call_bytes = call[0][0] + result, _ = reader.feed_data(call_bytes) + assert result is False + msg = await queue.read() + bytes_data: bytes = msg.data + first_char = bytes_data[0:1] + char_val = ord(first_char) + assert len(bytes_data) == char_val + # If we have a concurrency problem, the data + # tends to get mixed up between messages so + # we want to validate that all the bytes are + # the same value + assert bytes_data == bytes_data[0:1] * char_val From 45c11bac35b01373414235ceb88a051aa250dea7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Nov 2023 21:48:52 +0000 Subject: [PATCH 0023/1511] Bump typing-extensions from 4.7.1 to 4.8.0 (#7892) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.7.1 to 4.8.0.
Release notes

Sourced from typing-extensions's releases.

4.8.0

Changes since 4.7.1:

  • Add typing_extensions.Doc, as proposed by PEP 727. Patch by Sebastián Ramírez.
  • Drop support for Python 3.7 (including PyPy-3.7). Patch by Alex Waygood.
  • Fix bug where get_original_bases() would return incorrect results when called on a concrete subclass of a generic class. Patch by Alex Waygood (backporting python/cpython#107584, by James Hilton-Balfe).
  • Fix bug where ParamSpec(default=...) would raise a TypeError on Python versions <3.11. Patch by James Hilton-Balfe

No changes since 4.8.0rc1.

4.8.0rc1

  • Add typing_extensions.Doc, as proposed by PEP 727. Patch by Sebastián Ramírez.
  • Drop support for Python 3.7 (including PyPy-3.7). Patch by Alex Waygood.
  • Fix bug where get_original_bases() would return incorrect results when called on a concrete subclass of a generic class. Patch by Alex Waygood (backporting python/cpython#107584, by James Hilton-Balfe).
  • Fix bug where ParamSpec(default=...) would raise a TypeError on Python versions <3.11. Patch by James Hilton-Balfe
Changelog

Sourced from typing-extensions's changelog.

Release 4.8.0 (September 17, 2023)

No changes since 4.8.0rc1.

Release 4.8.0rc1 (September 7, 2023)

  • Add typing_extensions.Doc, as proposed by PEP 727. Patch by Sebastián Ramírez.
  • Drop support for Python 3.7 (including PyPy-3.7). Patch by Alex Waygood.
  • Fix bug where get_original_bases() would return incorrect results when called on a concrete subclass of a generic class. Patch by Alex Waygood (backporting python/cpython#107584, by James Hilton-Balfe).
  • Fix bug where ParamSpec(default=...) would raise a TypeError on Python versions <3.11. Patch by James Hilton-Balfe
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.7.1&new-version=4.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 4 +--- requirements/cython.txt | 2 +- requirements/dev.txt | 7 ++----- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- requirements/typing-extensions.txt | 2 +- 7 files changed, 8 insertions(+), 13 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 89d5aec9195..3cbb45e6799 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -34,7 +34,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index df021579ea0..c294ed018d9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,6 @@ setuptools-git==1.2 six==1.16.0 # via # python-dateutil - # sphinx # virtualenv slotscheck==0.17.1 # via -r requirements/lint.in @@ -241,7 +240,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # -r requirements/typing-extensions.in # aioredis @@ -280,4 +279,3 @@ setuptools==68.0.0 # via # blockdiag # pip-tools - # sphinx diff --git a/requirements/cython.txt b/requirements/cython.txt index 5851f1d8b48..f60bbf53da1 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.5 # via -r requirements/cython.in multidict==6.0.4 # via -r requirements/multidict.in -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 605327c4e94..7cc11281ed1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -177,9 +177,7 @@ requests==2.31.0 setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 - # via - # python-dateutil - # sphinx + # via python-dateutil slotscheck==0.17.1 # via -r requirements/lint.in snowballstemmer==2.2.0 @@ -228,7 +226,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # -r requirements/typing-extensions.in # aioredis @@ -269,4 +267,3 @@ setuptools==68.0.0 # blockdiag # nodeenv # pip-tools - # sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index e90f801862d..3f5486da172 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/test.txt b/requirements/test.txt index 0f8a7ef8ee1..25950f4c430 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -109,7 +109,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # -r requirements/typing-extensions.in # annotated-types diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt index c45af7262f7..043deab49a9 100644 --- a/requirements/typing-extensions.txt +++ b/requirements/typing-extensions.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in # -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via -r requirements/typing-extensions.in From 4ca503331239f309d044c8ca8539ae8fa347659a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 25 Nov 2023 20:15:42 +0000 Subject: [PATCH 0024/1511] Message is not upgraded if Upgrade header is missing (#7895) (#7897) (cherry picked from commit fde031fe7b0d6060eab4ff13d588a882bb7a1ddb) --- CHANGES/7895.bugfix | 1 + aiohttp/http_parser.py | 3 ++- tests/test_http_parser.py | 9 +++++++++ 3 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 CHANGES/7895.bugfix diff --git a/CHANGES/7895.bugfix b/CHANGES/7895.bugfix new file mode 100644 index 00000000000..557df294d71 --- /dev/null +++ b/CHANGES/7895.bugfix @@ -0,0 +1 @@ +Fixed messages being reported as upgraded without an Upgrade header in Python parser. -- by :user:`Dreamsorcerer` diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index b435096c5c7..85499177701 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -510,7 +510,8 @@ def parse_headers( close_conn = True elif v == "keep-alive": close_conn = False - elif v == "upgrade": + # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols + elif v == "upgrade" and headers.get(hdrs.UPGRADE): upgrade = True # encoding diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index a6b4988c452..820a76cb821 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -474,6 +474,15 @@ def test_conn_upgrade(parser: Any) -> None: assert upgrade +def test_bad_upgrade(parser) -> None: + """Test not upgraded if missing Upgrade header.""" + text = b"GET /test HTTP/1.1\r\nconnection: upgrade\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + msg = messages[0][0] + assert not msg.upgrade + assert not upgrade + + def test_compression_empty(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: \r\n\r\n" messages, upgrade, tail = parser.feed_data(text) From a3fa775aed25b645fa49ce38e41d3dd15e9bd89f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 25 Nov 2023 21:45:33 +0000 Subject: [PATCH 0025/1511] [PR #7896/9a7cfe77 backport][3.10] Fix some flaky tests (#7901) **This is a backport of PR #7896 as merged into master (9a7cfe77623b9a61e4e58f425fff99529de2f795).** Co-authored-by: Sam Bull --- tests/test_web_server.py | 7 +++++-- tests/test_web_urldispatcher.py | 12 ++++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 73e69831991..d0fd95acdb4 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -219,9 +219,11 @@ async def test_no_handler_cancellation(aiohttp_unused_port) -> None: timeout_event = asyncio.Event() done_event = asyncio.Event() port = aiohttp_unused_port() + started = False async def on_request(_: web.Request) -> web.Response: - nonlocal done_event, timeout_event + nonlocal done_event, started, timeout_event + started = True await asyncio.wait_for(timeout_event.wait(), timeout=5) done_event.set() return web.Response() @@ -238,7 +240,7 @@ async def on_request(_: web.Request) -> web.Response: try: async with client.ClientSession( - timeout=client.ClientTimeout(total=0.1) + timeout=client.ClientTimeout(total=0.2) ) as sess: with pytest.raises(asyncio.TimeoutError): await sess.get(f"http://localhost:{port}/") @@ -247,6 +249,7 @@ async def on_request(_: web.Request) -> web.Response: with suppress(asyncio.TimeoutError): await asyncio.wait_for(done_event.wait(), timeout=1) + assert started assert done_event.is_set() finally: await asyncio.gather(runner.shutdown(), site.stop()) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 4fbf5b02ecc..8ca8dcd7b99 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -93,13 +93,13 @@ async def test_access_root_of_static_handler( client = await aiohttp_client(app) # Request the root of the static directory. - r = await client.get(prefix) - assert r.status == status + async with await client.get(prefix) as r: + assert r.status == status - if data: - assert r.headers["Content-Type"] == "text/html; charset=utf-8" - read_ = await r.read() - assert read_ == data + if data: + assert r.headers["Content-Type"] == "text/html; charset=utf-8" + read_ = await r.read() + assert read_ == data async def test_follow_symlink( From 31d0834e34d7c122918a1061ed9222963e1135be Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 25 Nov 2023 23:20:07 +0000 Subject: [PATCH 0026/1511] Fix flaky websocket test (#7902) (#7903) (cherry picked from commit 28d0b06c267335555f46569d0fd8803b74b1a5a4) --- tests/test_client_ws_functional.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 87e4162c04f..6270675276e 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -5,6 +5,7 @@ import aiohttp from aiohttp import hdrs, web +from aiohttp.http import WSCloseCode if sys.version_info >= (3, 11): import asyncio as async_timeout @@ -581,12 +582,12 @@ async def handler(request): app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.ws_connect("/", heartbeat=0.05) - - await resp.receive() - await resp.receive() + resp = await client.ws_connect("/", heartbeat=0.1) + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE async def test_send_recv_compress(aiohttp_client) -> None: From 566f3467bff9356efd257cbe817658676eb335f5 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 26 Nov 2023 15:00:28 +0000 Subject: [PATCH 0027/1511] Fix regression with connection upgrade (#7879) (#7907) Fixes #7867. (cherry picked from commit 48b15583305e692ce997ec6f5a6a2f88f23ace71) --- CHANGES/7879.bugfix | 1 + aiohttp/client_reqrep.py | 19 ++++++++----------- aiohttp/connector.py | 4 ++++ tests/test_client_functional.py | 19 +++++++++++++++++++ 4 files changed, 32 insertions(+), 11 deletions(-) create mode 100644 CHANGES/7879.bugfix diff --git a/CHANGES/7879.bugfix b/CHANGES/7879.bugfix new file mode 100644 index 00000000000..08baf85be42 --- /dev/null +++ b/CHANGES/7879.bugfix @@ -0,0 +1 @@ +Fixed a regression where connection may get closed during upgrade. -- by :user:`Dreamsorcerer` diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 0ab84743658..1d946aea320 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1006,19 +1006,14 @@ def _response_eof(self) -> None: if self._closed: return - if self._connection is not None: - # websocket, protocol could be None because - # connection could be detached - if ( - self._connection.protocol is not None - and self._connection.protocol.upgraded - ): - return - - self._release_connection() + # protocol could be None because connection could be detached + protocol = self._connection and self._connection.protocol + if protocol is not None and protocol.upgraded: + return self._closed = True self._cleanup_writer() + self._release_connection() @property def closed(self) -> bool: @@ -1113,7 +1108,9 @@ async def read(self) -> bytes: elif self._released: # Response explicitly released raise ClientConnectionError("Connection closed") - await self._wait_released() # Underlying connection released + protocol = self._connection and self._connection.protocol + if protocol is None or not protocol.upgraded: + await self._wait_released() # Underlying connection released return self._body # type: ignore[no-any-return] def get_encoding(self) -> str: diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d85679f8bca..61c26430860 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -127,6 +127,10 @@ def __del__(self, _warnings: Any = warnings) -> None: context["source_traceback"] = self._source_traceback self._loop.call_exception_handler(context) + def __bool__(self) -> Literal[True]: + """Force subclasses to not be falsy, to make checks simpler.""" + return True + @property def loop(self) -> asyncio.AbstractEventLoop: warnings.warn( diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 6698ac6ef52..8a9a4e184be 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -173,6 +173,25 @@ async def handler(request): assert 1 == len(client._session.connector._conns) +async def test_upgrade_connection_not_released_after_read(aiohttp_client) -> None: + async def handler(request: web.Request) -> web.Response: + body = await request.read() + assert b"" == body + return web.Response( + status=101, headers={"Connection": "Upgrade", "Upgrade": "tcp"} + ) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + + resp = await client.get("/") + await resp.read() + assert resp.connection is not None + assert not resp.closed + + async def test_keepalive_server_force_close_connection(aiohttp_client) -> None: async def handler(request): body = await request.read() From aa7b1213c9685bb9e9b754efeafebabee19bf4e7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 26 Nov 2023 16:06:44 +0000 Subject: [PATCH 0028/1511] [PR #7673/aa7d1a8f backport][3.10] Document release process (#7910) **This is a backport of PR #7673 as merged into master (aa7d1a8fcad4ac4f1f0eae577f4c1947ebc1acf3).** Co-authored-by: Sam Bull --- docs/contributing-admins.rst | 57 ++++++++++++++++++++++++++++++++++++ docs/contributing.rst | 2 ++ docs/spelling_wordlist.txt | 3 +- 3 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 docs/contributing-admins.rst diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst new file mode 100644 index 00000000000..488953c6cc5 --- /dev/null +++ b/docs/contributing-admins.rst @@ -0,0 +1,57 @@ +:orphan: + +Instructions for aiohttp admins +=============================== + +This page is intended to document certain processes for admins of the aiohttp repository. +For regular contributors, return to :doc:`contributing`. + +.. contents:: + :local: + +Creating a new release +---------------------- + +.. note:: The example commands assume that ``origin`` refers to the ``aio-libs`` repository. + +To create a new release: + +#. Start on the branch for the release you are planning (e.g. ``3.8`` for v3.8.6): ``git checkout 3.8 && git pull`` +#. Update the version number in ``__init__.py``. +#. Run ``towncrier``. +#. Check and cleanup the changes in ``CHANGES.rst``. +#. Checkout a new branch: e.g. ``git checkout -b release/v3.8.6`` +#. Commit and create a PR. Once PR is merged, continue. +#. Go back to the release branch: e.g. ``git checkout 3.8 && git pull`` +#. Add a tag: e.g. ``git tag -a v3.8.6 -m 'Release 3.8.6'`` +#. Push the tag: e.g. ``git push origin v3.8.6`` +#. Monitor CI to ensure release process completes without errors. + +Once released, we need to complete some cleanup steps (no further steps are needed for +non-stable releases though). If doing a patch release, we need to do the below steps twice, +first merge into the newer release branch (e.g. 3.8 into 3.9) and then to master +(e.g. 3.9 into master). If a new minor release, then just merge to master. + +#. Switch to target branch: e.g. ``git checkout 3.9 && git pull`` +#. Start a merge: e.g. ``git merge 3.8 --no-commit --no-ff --gpg-sign`` +#. Carefully review the changes and revert anything that should not be included (most + things outside the changelog). +#. To ensure change fragments are cleaned up properly, run: ``python tools/cleanup_changes.py`` +#. Commit the merge (must be a normal merge commit, not squashed). +#. Push the branch directly to Github (because a PR would get squashed). When pushing, + you may get a rejected message. Follow these steps to resolve: + + #. Checkout to a new branch and push: e.g. ``git checkout -b do-not-merge && git push`` + #. Open a *draft* PR with a title of 'DO NOT MERGE'. + #. Once the CI has completed on that branch, you should be able to switch back and push + the target branch (as tests have passed on the merge commit now). + #. This should automatically consider the PR merged and delete the temporary branch. + +Back on the original release branch, append ``.dev0`` to the version number in ``__init__.py``. + +If doing a minor release: + +#. Create a new release branch for future features to go to: e.g. ``git checkout -b 3.10 3.9 && git push`` +#. Update ``target-branch`` for Dependabot to reference the new branch name in ``.github/dependabot.yml``. +#. Delete the older backport label (e.g. backport-3.8): https://github.com/aio-libs/aiohttp/labels +#. Add a new backport label (e.g. backport-3.10). diff --git a/docs/contributing.rst b/docs/contributing.rst index 1b826eb0a9f..5263f4a3f47 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -3,6 +3,8 @@ Contributing ============ +(:doc:`contributing-admins`) + Instructions for contributors ----------------------------- diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 84cb5cd8131..1523ccd2a65 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -53,6 +53,7 @@ canonicalize cchardet cChardet ceil +changelog Changelog chardet Chardet @@ -88,7 +89,7 @@ Cythonize cythonized de deduplicate -# de-facto: +Dependabot deprecations DER dev From 20eda12bfb77a22a8bfe727244e3baed859f88d7 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 26 Nov 2023 18:15:55 +0000 Subject: [PATCH 0029/1511] Merge 3.9 --- CHANGES.rst | 31 +++++++++++++++++++++++++++++++ CHANGES/7848.bugfix | 1 - CHANGES/7865.bugfix | 1 - CHANGES/7869.bugfix | 1 - CHANGES/7879.bugfix | 1 - CHANGES/7895.bugfix | 1 - requirements/base.txt | 2 +- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 4 ++-- 12 files changed, 40 insertions(+), 14 deletions(-) delete mode 100644 CHANGES/7848.bugfix delete mode 100644 CHANGES/7865.bugfix delete mode 100644 CHANGES/7869.bugfix delete mode 100644 CHANGES/7879.bugfix delete mode 100644 CHANGES/7895.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index fcfd111b8a4..8c2a2707408 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,37 @@ .. towncrier release notes start +3.9.1 (2023-11-26) +================== + +Bugfixes +-------- + +- Fixed importing aiohttp under PyPy on Windows. + + `#7848 `_ + +- Fixed async concurrency safety in websocket compressor. + + `#7865 `_ + +- Fixed ``ClientResponse.close()`` releasing the connection instead of closing. + + `#7869 `_ + +- Fixed a regression where connection may get closed during upgrade. -- by :user:`Dreamsorcerer` + + `#7879 `_ + +- Fixed messages being reported as upgraded without an Upgrade header in Python parser. -- by :user:`Dreamsorcerer` + + `#7895 `_ + + + +---- + + 3.9.0 (2023-11-18) ================== diff --git a/CHANGES/7848.bugfix b/CHANGES/7848.bugfix deleted file mode 100644 index 13a29e2a226..00000000000 --- a/CHANGES/7848.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix importing aiohttp under PyPy 3.8 and 3.9 on Windows. diff --git a/CHANGES/7865.bugfix b/CHANGES/7865.bugfix deleted file mode 100644 index 9a46e124486..00000000000 --- a/CHANGES/7865.bugfix +++ /dev/null @@ -1 +0,0 @@ -Restore async concurrency safety to websocket compressor diff --git a/CHANGES/7869.bugfix b/CHANGES/7869.bugfix deleted file mode 100644 index 23282fc3bb4..00000000000 --- a/CHANGES/7869.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix ClientResponse.close releasing the connection instead of closing diff --git a/CHANGES/7879.bugfix b/CHANGES/7879.bugfix deleted file mode 100644 index 08baf85be42..00000000000 --- a/CHANGES/7879.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed a regression where connection may get closed during upgrade. -- by :user:`Dreamsorcerer` diff --git a/CHANGES/7895.bugfix b/CHANGES/7895.bugfix deleted file mode 100644 index 557df294d71..00000000000 --- a/CHANGES/7895.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed messages being reported as upgraded without an Upgrade header in Python parser. -- by :user:`Dreamsorcerer` diff --git a/requirements/base.txt b/requirements/base.txt index 3cbb45e6799..daccca54d86 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ typing-extensions==4.8.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c294ed018d9..79d079e4d6a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -108,7 +108,7 @@ multidict==6.0.4 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in @@ -265,7 +265,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 7cc11281ed1..84c14387d33 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -103,7 +103,7 @@ multidict==6.0.4 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in @@ -252,7 +252,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 3f5486da172..9b6101de5d8 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ identify==2.5.26 # via pre-commit iniconfig==2.0.0 # via pytest -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 9d1e47b2e54..a0f2aa861f7 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -30,5 +30,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 25950f4c430..bd2f951b0a4 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,7 +53,7 @@ multidict==6.0.4 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy @@ -124,5 +124,5 @@ uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in From c2a53158a62770c9d7a66e97804840defb05616e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 10:39:14 +0000 Subject: [PATCH 0030/1511] Bump cython from 3.0.5 to 3.0.6 (#7913) Bumps [cython](https://github.com/cython/cython) from 3.0.5 to 3.0.6.
Changelog

Sourced from cython's changelog.

3.0.6 (2023-11-26)

Features added

  • Fused def function dispatch is a bit faster.

  • Declarations for the wchar PyUnicode API were added. (Github issue :issue:5836)

  • The Python "nogil" fork is now also detected with the new Py_GIL_DISABLED macro. Patch by Hugo van Kemenade (Github issue :issue:583652)

Bugs fixed

  • Comparing dataclasses could give different results than Python. (Github issue :issue:5857)

  • float(std::string) generated invalid C code. (Github issue :issue:5818)

  • Using cpdef functions with cimport_from_pyx failed. (Github issue :issue:5795)

  • A crash was fixed when string-formatting a Python value fails. (Github issue :issue:5787)

  • On item access, Cython could try the sequence protocol before the mapping protocol in some cases if an object supports both. (Github issue :issue:5776)

  • A C compiler warning was resolved. (Github issue :issue:5794)

  • Complex numbers failed to compile in MSVC with C11. Patch by Lysandros Nikolaou. (Github issue :issue:5809)

  • Some issues with the Limited API and with PyPy were resolved. (Github issues :issue:5695, :issue:5696)

  • A C++ issue in Python 3.13 was resolved. (Github issue :issue:5790)

  • Several directives are now also available (as no-ops) in Python code. (Github issue :issue:5803)

  • An error message was corrected. Patch by Mads Ynddal. (Github issue :issue:5805)

Commits
  • 2486558 Prepare release of 3.0.6.
  • cb1d78b Update changelog.
  • 7f6577a Fix dataclass comparison operators and enable tests (#5857)
  • f258b67 PEP 703: Accept new Py_GIL_DISABLED macro in addition to PY_NOGIL (GH-5852)
  • df2bffa Partially disable trashcan test in PyPy (#5832)
  • a4bb6dc Add missing unicode C API functions and Py_UNICODE warning (#5836)
  • 16d31ef Disable failing Windows CyCache test (#5826)
  • 4bbefa3 Simplify pstats test
  • f71205d Remove allowed_failure from Python 3.12 (#5815)
  • 7683cb1 Fix "float(std::string)" and other non-PyObject arguments to float().
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.5&new-version=3.0.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 79d079e4d6a..7d3de754a4f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -60,7 +60,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.5 +cython==3.0.6 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index f60bbf53da1..ee1b2f15393 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.5 +cython==3.0.6 # via -r requirements/cython.in multidict==6.0.4 # via -r requirements/multidict.in From 0a75ef09f72eb5242fda176d262c1de70bf45d19 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 19:34:26 +0000 Subject: [PATCH 0031/1511] [PR #7829/2a3eaa11 backport][3.10] Index resources in the UrlDispatcher to avoid linear search for most cases (#7917) **This is a backport of PR #7829 as merged into master (2a3eaa11dc2f8e6150eede9337182f273e14c20a).** --------- Co-authored-by: J. Nick Koston Co-authored-by: Sam Bull --- CHANGES/7829.misc | 3 ++ aiohttp/web_urldispatcher.py | 79 +++++++++++++++++++++++++----- docs/web_reference.rst | 39 +++++++++++---- tests/test_urldispatch.py | 7 +++ tests/test_web_urldispatcher.py | 86 ++++++++++++++++++++++++++++++++- 5 files changed, 190 insertions(+), 24 deletions(-) create mode 100644 CHANGES/7829.misc diff --git a/CHANGES/7829.misc b/CHANGES/7829.misc new file mode 100644 index 00000000000..9eb060f4713 --- /dev/null +++ b/CHANGES/7829.misc @@ -0,0 +1,3 @@ +Improved URL handler resolution time by indexing resources in the UrlDispatcher. +For applications with a large number of handlers, this should increase performance significantly. +-- by :user:`bdraco` diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index ddb6ede0dd1..e398a698818 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -738,13 +738,20 @@ class PrefixedSubAppResource(PrefixResource): def __init__(self, prefix: str, app: "Application") -> None: super().__init__(prefix) self._app = app - for resource in app.router.resources(): - resource.add_prefix(prefix) + self._add_prefix_to_resources(prefix) def add_prefix(self, prefix: str) -> None: super().add_prefix(prefix) - for resource in self._app.router.resources(): + self._add_prefix_to_resources(prefix) + + def _add_prefix_to_resources(self, prefix: str) -> None: + router = self._app.router + for resource in router.resources(): + # Since the canonical path of a resource is about + # to change, we need to unindex it and then reindex + router.unindex_resource(resource) resource.add_prefix(prefix) + router.index_resource(resource) def url_for(self, *args: str, **kwargs: str) -> URL: raise RuntimeError(".url_for() is not supported " "by sub-application root") @@ -753,11 +760,6 @@ def get_info(self) -> _InfoDict: return {"app": self._app, "prefix": self._prefix} async def resolve(self, request: Request) -> _Resolve: - if ( - not request.url.raw_path.startswith(self._prefix2) - and request.url.raw_path != self._prefix - ): - return None, set() match_info = await self._app.router.resolve(request) match_info.add_app(self._app) if isinstance(match_info.http_exception, HTTPMethodNotAllowed): @@ -1003,12 +1005,39 @@ def __init__(self) -> None: super().__init__() self._resources: List[AbstractResource] = [] self._named_resources: Dict[str, AbstractResource] = {} + self._resource_index: dict[str, list[AbstractResource]] = {} + self._matched_sub_app_resources: List[MatchedSubAppResource] = [] async def resolve(self, request: Request) -> UrlMappingMatchInfo: - method = request.method + resource_index = self._resource_index allowed_methods: Set[str] = set() - for resource in self._resources: + # Walk the url parts looking for candidates. We walk the url backwards + # to ensure the most explicit match is found first. If there are multiple + # candidates for a given url part because there are multiple resources + # registered for the same canonical path, we resolve them in a linear + # fashion to ensure registration order is respected. + url_part = request.rel_url.raw_path + while url_part: + for candidate in resource_index.get(url_part, ()): + match_dict, allowed = await candidate.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + if url_part == "/": + break + url_part = url_part.rpartition("/")[0] or "/" + + # + # We didn't find any candidates, so we'll try the matched sub-app + # resources which we have to walk in a linear fashion because they + # have regex/wildcard match rules and we cannot index them. + # + # For most cases we do not expect there to be many of these since + # currently they are only added by `add_domain` + # + for resource in self._matched_sub_app_resources: match_dict, allowed = await resource.resolve(request) if match_dict is not None: return match_dict @@ -1016,9 +1045,9 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: allowed_methods |= allowed if allowed_methods: - return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) - else: - return MatchInfoError(HTTPNotFound()) + return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) + + return MatchInfoError(HTTPNotFound()) def __iter__(self) -> Iterator[str]: return iter(self._named_resources) @@ -1074,6 +1103,30 @@ def register_resource(self, resource: AbstractResource) -> None: self._named_resources[name] = resource self._resources.append(resource) + if isinstance(resource, MatchedSubAppResource): + # We cannot index match sub-app resources because they have match rules + self._matched_sub_app_resources.append(resource) + else: + self.index_resource(resource) + + def _get_resource_index_key(self, resource: AbstractResource) -> str: + """Return a key to index the resource in the resource index.""" + # strip at the first { to allow for variables + return resource.canonical.partition("{")[0].rstrip("/") or "/" + + def index_resource(self, resource: AbstractResource) -> None: + """Add a resource to the resource index.""" + resource_key = self._get_resource_index_key(resource) + # There may be multiple resources for a canonical path + # so we keep them in a list to ensure that registration + # order is respected. + self._resource_index.setdefault(resource_key, []).append(resource) + + def unindex_resource(self, resource: AbstractResource) -> None: + """Remove a resource from the resource index.""" + resource_key = self._get_resource_index_key(resource) + self._resource_index[resource_key].remove(resource) + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: if path and not path.startswith("/"): raise ValueError("path should be started with / or be empty") diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 1351e76d25d..874b15bd8e3 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1956,20 +1956,38 @@ unique *name* and at least one :term:`route`. :term:`web-handler` lookup is performed in the following way: -1. Router iterates over *resources* one-by-one. -2. If *resource* matches to requested URL the resource iterates over - own *routes*. -3. If route matches to requested HTTP method (or ``'*'`` wildcard) the - route's handler is used as found :term:`web-handler`. The lookup is - finished. -4. Otherwise router tries next resource from the *routing table*. -5. If the end of *routing table* is reached and no *resource* / - *route* pair found the *router* returns special :class:`~aiohttp.abc.AbstractMatchInfo` +1. The router splits the URL and checks the index from longest to shortest. + For example, '/one/two/three' will first check the index for + '/one/two/three', then '/one/two' and finally '/'. +2. If the URL part is found in the index, the list of routes for + that URL part is iterated over. If a route matches to requested HTTP + method (or ``'*'`` wildcard) the route's handler is used as the chosen + :term:`web-handler`. The lookup is finished. +3. If the route is not found in the index, the router tries to find + the route in the list of :class:`~aiohttp.web.MatchedSubAppResource`, + (current only created from :meth:`~aiohttp.web.Application.add_domain`), + and will iterate over the list of + :class:`~aiohttp.web.MatchedSubAppResource` in a linear fashion + until a match is found. +4. If no *resource* / *route* pair was found, the *router* + returns the special :class:`~aiohttp.abc.AbstractMatchInfo` instance with :attr:`aiohttp.abc.AbstractMatchInfo.http_exception` is not ``None`` but :exc:`HTTPException` with either *HTTP 404 Not Found* or *HTTP 405 Method Not Allowed* status code. Registered :meth:`~aiohttp.abc.AbstractMatchInfo.handler` raises this exception on call. +Fixed paths are preferred over variable paths. For example, +if you have two routes ``/a/b`` and ``/a/{name}``, then the first +route will always be preferred over the second one. + +If there are multiple dynamic paths with the same fixed prefix, +they will be resolved in order of registration. + +For example, if you have two dynamic routes that are prefixed +with the fixed ``/users`` path such as ``/users/{x}/{y}/z`` and +``/users/{x}/y/z``, the first one will be preferred over the +second one. + User should never instantiate resource classes but give it by :meth:`UrlDispatcher.add_resource` call. @@ -1991,7 +2009,10 @@ Resource classes hierarchy:: Resource PlainResource DynamicResource + PrefixResource StaticResource + PrefixedSubAppResource + MatchedSubAppResource .. class:: AbstractResource diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 588daed8d40..bf15588bb13 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1258,10 +1258,17 @@ async def test_prefixed_subapp_overlap(app) -> None: subapp2.router.add_get("/b", handler2) app.add_subapp("/ss", subapp2) + subapp3 = web.Application() + handler3 = make_handler() + subapp3.router.add_get("/c", handler3) + app.add_subapp("/s/s", subapp3) + match_info = await app.router.resolve(make_mocked_request("GET", "/s/a")) assert match_info.route.handler is handler1 match_info = await app.router.resolve(make_mocked_request("GET", "/ss/b")) assert match_info.route.handler is handler2 + match_info = await app.router.resolve(make_mocked_request("GET", "/s/s/c")) + assert match_info.route.handler is handler3 async def test_prefixed_subapp_empty_route(app) -> None: diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 8ca8dcd7b99..3b92a10896c 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -13,7 +13,7 @@ from aiohttp import abc, web from aiohttp.pytest_plugin import AiohttpClient -from aiohttp.web_urldispatcher import SystemRoute +from aiohttp.web_urldispatcher import Resource, SystemRoute @pytest.fixture(scope="function") @@ -166,7 +166,6 @@ async def test_access_to_the_file_with_spaces( r = await client.get(url) assert r.status == 200 assert (await r.text()) == data - await r.release() async def test_access_non_existing_resource( @@ -588,3 +587,86 @@ async def handler(request: web.Request) -> web.Response: r = await client.get(yarl.URL(urlencoded_path, encoded=True)) assert r.status == expected_http_resp_status await r.release() + + +async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: + """Test route order is preserved. + + Note that fixed/static paths are always preferred over a regex path. + """ + app = web.Application() + + async def handler(request: web.Request) -> web.Response: + assert isinstance(request.match_info._route.resource, Resource) + return web.Response(text=request.match_info._route.resource.canonical) + + app.router.add_get("/first/x/{b}/", handler) + app.router.add_get(r"/first/{x:.*/b}", handler) + + app.router.add_get(r"/second/{user}/info", handler) + app.router.add_get("/second/bob/info", handler) + + app.router.add_get("/third/bob/info", handler) + app.router.add_get(r"/third/{user}/info", handler) + + app.router.add_get(r"/forth/{name:\d+}", handler) + app.router.add_get("/forth/42", handler) + + app.router.add_get("/fifth/42", handler) + app.router.add_get(r"/fifth/{name:\d+}", handler) + + client = await aiohttp_client(app) + + r = await client.get("/first/x/b/") + assert r.status == 200 + assert await r.text() == "/first/x/{b}/" + + r = await client.get("/second/frank/info") + assert r.status == 200 + assert await r.text() == "/second/{user}/info" + + # Fixed/static paths are always preferred over regex paths + r = await client.get("/second/bob/info") + assert r.status == 200 + assert await r.text() == "/second/bob/info" + + r = await client.get("/third/bob/info") + assert r.status == 200 + assert await r.text() == "/third/bob/info" + + r = await client.get("/third/frank/info") + assert r.status == 200 + assert await r.text() == "/third/{user}/info" + + r = await client.get("/forth/21") + assert r.status == 200 + assert await r.text() == "/forth/{name}" + + # Fixed/static paths are always preferred over regex paths + r = await client.get("/forth/42") + assert r.status == 200 + assert await r.text() == "/forth/42" + + r = await client.get("/fifth/21") + assert r.status == 200 + assert await r.text() == "/fifth/{name}" + + r = await client.get("/fifth/42") + assert r.status == 200 + assert await r.text() == "/fifth/42" + + +async def test_url_with_many_slashes(aiohttp_client: AiohttpClient) -> None: + app = web.Application() + + class MyView(web.View): + async def get(self) -> web.Response: + return web.Response() + + app.router.add_routes([web.view("/a", MyView)]) + + client = await aiohttp_client(app) + + r = await client.get("///a") + assert r.status == 200 + await r.release() From 1b4efffeb2fc539503f62e5d93b7a243d6a29787 Mon Sep 17 00:00:00 2001 From: Alex <52292902+alexrudd2@users.noreply.github.com> Date: Wed, 29 Nov 2023 16:46:20 -0600 Subject: [PATCH 0032/1511] [PR #7808/213d1b2 backport][3.10] Restore requirements-txt-fixer in pre-commit (#7921) Backport of https://github.com/aio-libs/aiohttp/pull/7808 cherry picked from commit 213d1b22d42ce8efc54d7858b490e920fcdb4f0a --- .pre-commit-config.yaml | 3 +-- Makefile | 5 +---- requirements/runtime-deps.in | 12 ++++++------ requirements/sync-direct-runtime-deps.py | 16 ++++++++++++++++ requirements/test.in | 2 +- setup.cfg | 6 +++--- 6 files changed, 28 insertions(+), 16 deletions(-) create mode 100755 requirements/sync-direct-runtime-deps.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ffa16b6fb36..587c46e991d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -45,8 +45,7 @@ repos: exclude: >- ^docs/[^/]*\.svg$ - id: requirements-txt-fixer - exclude: >- - ^requirements/.*\.txt$ + files: requirements/.*\.in$ - id: trailing-whitespace - id: file-contents-sorter args: ['--ignore-case'] diff --git a/Makefile b/Makefile index cdeb0ad0ed9..e3ec98c7ce8 100644 --- a/Makefile +++ b/Makefile @@ -182,7 +182,4 @@ install-dev: .develop .PHONY: sync-direct-runtime-deps sync-direct-runtime-deps: @echo Updating 'requirements/runtime-deps.in' from 'setup.cfg'... >&2 - @echo '# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`' > requirements/runtime-deps.in - @echo >> requirements/runtime-deps.in - @python -c 'from configparser import ConfigParser; from itertools import chain; from pathlib import Path; cfg = ConfigParser(); cfg.read_string(Path("setup.cfg").read_text()); print("\n".join(line.strip() for line in chain(cfg["options"].get("install_requires").splitlines(), "\n".join(cfg["options.extras_require"].values()).splitlines()) if line.strip()))' \ - >> requirements/runtime-deps.in + @python requirements/sync-direct-runtime-deps.py diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 2bfb21ecd18..b2df16f1680 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,11 +1,11 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` -attrs >= 17.3.0 -multidict >=4.5, < 7.0 -async-timeout >= 4.0, < 5.0 ; python_version < "3.11" -yarl >= 1.0, < 2.0 -frozenlist >= 1.1.1 -aiosignal >= 1.1.2 aiodns; sys_platform=="linux" or sys_platform=="darwin" +aiosignal >= 1.1.2 +async-timeout >= 4.0, < 5.0 ; python_version < "3.11" +attrs >= 17.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' +frozenlist >= 1.1.1 +multidict >=4.5, < 7.0 +yarl >= 1.0, < 2.0 diff --git a/requirements/sync-direct-runtime-deps.py b/requirements/sync-direct-runtime-deps.py new file mode 100755 index 00000000000..adc28bdd287 --- /dev/null +++ b/requirements/sync-direct-runtime-deps.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +"""Sync direct runtime dependencies from setup.cfg to runtime-deps.in.""" + +from configparser import ConfigParser +from pathlib import Path + +cfg = ConfigParser() +cfg.read(Path("setup.cfg")) +reqs = cfg["options"]["install_requires"] + cfg.items("options.extras_require")[0][1] +reqs = sorted(reqs.split("\n"), key=str.casefold) +reqs.remove("") + +with open(Path("requirements", "runtime-deps.in"), "w") as outfile: + header = "# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`\n\n" + outfile.write(header) + outfile.write("\n".join(reqs) + "\n") diff --git a/requirements/test.in b/requirements/test.in index 417d45959be..5c1edf5dabe 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -1,5 +1,5 @@ --r base.in -c broken-projects.in +-r base.in coverage mypy; implementation_name == "cpython" diff --git a/setup.cfg b/setup.cfg index 8026c34cbab..331c80e154a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -47,12 +47,12 @@ zip_safe = False include_package_data = True install_requires = + aiosignal >= 1.1.2 + async-timeout >= 4.0, < 5.0 ; python_version < "3.11" attrs >= 17.3.0 + frozenlist >= 1.1.1 multidict >=4.5, < 7.0 - async-timeout >= 4.0, < 5.0 ; python_version < "3.11" yarl >= 1.0, < 2.0 - frozenlist >= 1.1.1 - aiosignal >= 1.1.2 [options.exclude_package_data] * = From 4d03b3b6c6921ac0a92ce9db32ec6529dd7ed1ad Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 2 Dec 2023 17:01:46 +0000 Subject: [PATCH 0033/1511] [PR #7930/895afa83 backport][3.10] Use .coveragerc for TYPE_CHECKING conditions (#7932) **This is a backport of PR #7930 as merged into master (895afa83027cb88b5a99d521cff48b549d325990).** Co-authored-by: Sam Bull --- .coveragerc | 4 ++++ aiohttp/__init__.py | 2 +- aiohttp/abc.py | 4 ++-- aiohttp/client_exceptions.py | 2 +- aiohttp/client_reqrep.py | 2 +- aiohttp/connector.py | 2 +- aiohttp/multipart.py | 2 +- aiohttp/payload.py | 4 ++-- aiohttp/test_utils.py | 2 +- aiohttp/tracing.py | 2 +- aiohttp/typedefs.py | 2 +- aiohttp/web_app.py | 4 ++-- aiohttp/web_fileresponse.py | 2 +- aiohttp/web_middlewares.py | 2 +- aiohttp/web_protocol.py | 2 +- aiohttp/web_request.py | 2 +- aiohttp/web_response.py | 2 +- aiohttp/web_routedef.py | 2 +- aiohttp/web_urldispatcher.py | 2 +- 19 files changed, 25 insertions(+), 21 deletions(-) diff --git a/.coveragerc b/.coveragerc index 61cb5ad822d..0b5d5bf0ad4 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,3 +2,7 @@ branch = True source = aiohttp, tests omit = site-packages + +[report] +exclude_also = + if TYPE_CHECKING diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 701d913bef4..855dce8ef2c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -104,7 +104,7 @@ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: # At runtime these are lazy-loaded at the bottom of the file. from .worker import ( GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, diff --git a/aiohttp/abc.py b/aiohttp/abc.py index ceb4490019a..ee838998997 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -22,7 +22,7 @@ from .helpers import get_running_loop from .typedefs import LooseCookies -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application from .web_exceptions import HTTPException from .web_request import BaseRequest, Request @@ -131,7 +131,7 @@ async def close(self) -> None: """Release resolver""" -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: IterableBase = Iterable[Morsel[str]] else: IterableBase = Iterable diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 588ffbf6ec6..9aae12a84e8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -15,7 +15,7 @@ ssl = SSLContext = None # type: ignore[assignment] -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo else: RequestInfo = ClientResponse = ConnectionKey = None diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 1d946aea320..4ae0ecbcdfb 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -81,7 +81,7 @@ __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client import ClientSession from .connector import Connection from .tracing import Trace diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 61c26430860..73f58b1a451 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -63,7 +63,7 @@ __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client import ClientTimeout from .client_reqrep import ConnectionKey from .tracing import Trace diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 3a8793398d5..602a6b67457 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -59,7 +59,7 @@ ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client_reqrep import ClientResponse diff --git a/aiohttp/payload.py b/aiohttp/payload.py index ba856693eed..6593b05c6f7 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -54,7 +54,7 @@ TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from typing import List @@ -401,7 +401,7 @@ def __init__( ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from typing import AsyncIterable, AsyncIterator _AsyncIterator = AsyncIterator[bytes] diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 2a026fe704f..b5821a7fb84 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -50,7 +50,7 @@ ) from .web_protocol import _RequestHandler -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from ssl import SSLContext else: SSLContext = None diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 70e2a62ec1d..62847a0bf7c 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -8,7 +8,7 @@ from .client_reqrep import ClientResponse -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client import ClientSession _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 57d95b384f2..5e963e1a10e 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -17,7 +17,7 @@ DEFAULT_JSON_ENCODER = json.dumps DEFAULT_JSON_DECODER = json.loads -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: _CIMultiDict = CIMultiDict[str] _CIMultiDictProxy = CIMultiDictProxy[str] _MultiDict = MultiDict[str] diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 6e822b80225..91bf5fdac61 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -60,7 +60,7 @@ __all__ = ("Application", "CleanupError") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] _Middlewares = FrozenList[Middleware] @@ -561,7 +561,7 @@ def exceptions(self) -> List[BaseException]: return cast(List[BaseException], self.args[1]) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] else: _CleanupContextBase = FrozenList diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 0f39c70dcb8..eb7a6a31d39 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -31,7 +31,7 @@ __all__ = ("FileResponse",) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_request import BaseRequest diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index cb24eec9107..5da1533c0df 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -12,7 +12,7 @@ "normalize_path_middleware", ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application _Func = TypeVar("_Func") diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index d0ed0591c17..ec5856a0a22 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -44,7 +44,7 @@ __all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_server import Server diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a7e32ca6c79..61fc831b032 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -66,7 +66,7 @@ __all__ = ("BaseRequest", "FileField", "Request") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application from .web_protocol import RequestHandler from .web_urldispatcher import UrlMappingMatchInfo diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e089c60ee4c..b6a4ba9b31e 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -44,7 +44,7 @@ __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_request import BaseRequest BaseClass = MutableMapping[str, Any] diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index a1eb0a76549..d79cd32a14a 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -20,7 +20,7 @@ from .abc import AbstractView from .typedefs import Handler, PathLike -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_request import Request from .web_response import StreamResponse from .web_urldispatcher import AbstractRoute, UrlDispatcher diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index e398a698818..ea28f15e2b9 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -69,7 +69,7 @@ ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application BaseDict = Dict[str, str] From cbca8f025155c9c0a306018dfb544509cf15d6e0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Dec 2023 06:46:05 +0000 Subject: [PATCH 0034/1511] [PR #7927/da2e349a backport][3.10] Pin proxy.py to 2.4.4rc4 for testing (#7935) Co-authored-by: J. Nick Koston --- requirements/dev.txt | 2 +- requirements/test.in | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 84c14387d33..35840675928 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -129,7 +129,7 @@ pluggy==1.2.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.3 +proxy-py==2.4.4rc4 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/test.in b/requirements/test.in index 5c1edf5dabe..3a82a00818a 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -3,7 +3,7 @@ coverage mypy; implementation_name == "cpython" -proxy.py +proxy.py >= 2.4.4rc4 pytest pytest-cov pytest-mock From 835fedbeb82954913da9c4fbcbabdf7312fe64f2 Mon Sep 17 00:00:00 2001 From: atbuy <56309299+atbuy@users.noreply.github.com> Date: Sun, 3 Dec 2023 16:38:54 +0200 Subject: [PATCH 0035/1511] Patchback 3.10 #7818 (#7936) --- CHANGES/7801.feature | 1 + aiohttp/__init__.py | 66 +++++++++++++++++---------------- aiohttp/client.py | 46 ++++++++++++----------- aiohttp/client_exceptions.py | 10 +++++ aiohttp/client_proto.py | 4 +- docs/client_reference.rst | 15 ++++++++ tests/test_client_functional.py | 17 ++++++++- 7 files changed, 104 insertions(+), 55 deletions(-) create mode 100644 CHANGES/7801.feature diff --git a/CHANGES/7801.feature b/CHANGES/7801.feature new file mode 100644 index 00000000000..a6fb4d8a58e --- /dev/null +++ b/CHANGES/7801.feature @@ -0,0 +1 @@ +Separated connection and socket timeout errors, from ServerTimeoutError. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 855dce8ef2c..3f8b2728863 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -4,37 +4,39 @@ from . import hdrs as hdrs from .client import ( - BaseConnector as BaseConnector, - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - ClientResponseError as ClientResponseError, - ClientSession as ClientSession, - ClientSSLError as ClientSSLError, - ClientTimeout as ClientTimeout, - ClientWebSocketResponse as ClientWebSocketResponse, - ContentTypeError as ContentTypeError, - Fingerprint as Fingerprint, - InvalidURL as InvalidURL, - NamedPipeConnector as NamedPipeConnector, - RequestInfo as RequestInfo, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TCPConnector as TCPConnector, - TooManyRedirects as TooManyRedirects, - UnixConnector as UnixConnector, - WSServerHandshakeError as WSServerHandshakeError, - request as request, + BaseConnector, + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientRequest, + ClientResponse, + ClientResponseError, + ClientSession, + ClientSSLError, + ClientTimeout, + ClientWebSocketResponse, + ConnectionTimeoutError, + ContentTypeError, + Fingerprint, + InvalidURL, + NamedPipeConnector, + RequestInfo, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TCPConnector, + TooManyRedirects, + UnixConnector, + WSServerHandshakeError, + request, ) from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar from .formdata import FormData as FormData @@ -131,6 +133,7 @@ "ClientSession", "ClientTimeout", "ClientWebSocketResponse", + "ConnectionTimeoutError", "ContentTypeError", "Fingerprint", "InvalidURL", @@ -139,6 +142,7 @@ "ServerDisconnectedError", "ServerFingerprintMismatch", "ServerTimeoutError", + "SocketTimeoutError", "TCPConnector", "TooManyRedirects", "UnixConnector", diff --git a/aiohttp/client.py b/aiohttp/client.py index 83ef1ba586a..2750d5e2e86 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -39,25 +39,27 @@ from . import hdrs, http, payload from .abc import AbstractCookieJar from .client_exceptions import ( - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientResponseError as ClientResponseError, - ClientSSLError as ClientSSLError, - ContentTypeError as ContentTypeError, - InvalidURL as InvalidURL, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TooManyRedirects as TooManyRedirects, - WSServerHandshakeError as WSServerHandshakeError, + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ConnectionTimeoutError, + ContentTypeError, + InvalidURL, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TooManyRedirects, + WSServerHandshakeError, ) from .client_reqrep import ( ClientRequest as ClientRequest, @@ -105,12 +107,14 @@ "ClientProxyConnectionError", "ClientResponseError", "ClientSSLError", + "ConnectionTimeoutError", "ContentTypeError", "InvalidURL", "ServerConnectionError", "ServerDisconnectedError", "ServerFingerprintMismatch", "ServerTimeoutError", + "SocketTimeoutError", "TooManyRedirects", "WSServerHandshakeError", # client_reqrep @@ -575,8 +579,8 @@ async def _request( req, traces=traces, timeout=real_timeout ) except asyncio.TimeoutError as exc: - raise ServerTimeoutError( - "Connection timeout " "to host {}".format(url) + raise ConnectionTimeoutError( + f"Connection timeout to host {url}" ) from exc assert conn.transport is not None diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 9aae12a84e8..d70988f6ede 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -29,6 +29,8 @@ "ClientSSLError", "ClientConnectorSSLError", "ClientConnectorCertificateError", + "ConnectionTimeoutError", + "SocketTimeoutError", "ServerConnectionError", "ServerTimeoutError", "ServerDisconnectedError", @@ -242,6 +244,14 @@ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): """Server timeout error.""" +class ConnectionTimeoutError(ServerTimeoutError): + """Connection timeout error.""" + + +class SocketTimeoutError(ServerTimeoutError): + """Socket timeout error.""" + + class ServerFingerprintMismatch(ServerConnectionError): """SSL certificate does not match expected fingerprint.""" diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 6225b33667c..321860a9fe5 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -7,7 +7,7 @@ ClientOSError, ClientPayloadError, ServerDisconnectedError, - ServerTimeoutError, + SocketTimeoutError, ) from .helpers import BaseTimerContext, status_code_must_be_empty_body from .http import HttpResponseParser, RawResponseMessage @@ -193,7 +193,7 @@ def start_timeout(self) -> None: self._reschedule_timeout() def _on_read_timeout(self) -> None: - exc = ServerTimeoutError("Timeout on reading data from socket") + exc = SocketTimeoutError("Timeout on reading data from socket") self.set_exception(exc) if self._payload is not None: self._payload.set_exception(exc) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d0348d70ca8..57e96f2a070 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2254,6 +2254,17 @@ Connection errors Derived from :exc:`ServerConnectionError` and :exc:`asyncio.TimeoutError` +.. class:: ConnectionTimeoutError + + Connection timeout on request: e.g. read timeout. + + Derived from :exc:`ServerTimeoutError` + +.. class:: SocketTimeoutError + + Reading from socket timeout. + + Derived from :exc:`ServerTimeoutError` Hierarchy of exceptions ^^^^^^^^^^^^^^^^^^^^^^^ @@ -2284,6 +2295,10 @@ Hierarchy of exceptions * :exc:`ServerTimeoutError` + * :exc:`ConnectionTimeoutError` + + * :exc:`SocketTimeoutError` + * :exc:`ClientPayloadError` * :exc:`ClientResponseError` diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 8a9a4e184be..585085127db 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -18,7 +18,7 @@ import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver -from aiohttp.client_exceptions import TooManyRedirects +from aiohttp.client_exceptions import SocketTimeoutError, TooManyRedirects from aiohttp.pytest_plugin import AiohttpClient, TestClient from aiohttp.test_utils import unused_port @@ -3206,6 +3206,21 @@ async def handler(request): await client.get("/") +async def test_socket_timeout(aiohttp_client: Any) -> None: + async def handler(request): + await asyncio.sleep(5) + return web.Response() + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + timeout = aiohttp.ClientTimeout(sock_read=0.1) + client = await aiohttp_client(app, timeout=timeout) + + with pytest.raises(SocketTimeoutError): + await client.get("/") + + async def test_read_timeout_closes_connection(aiohttp_client: AiohttpClient) -> None: request_count = 0 From 9ebbddb646c2cc6ce857cdc44470bb0322d4550a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 4 Dec 2023 00:20:43 +0000 Subject: [PATCH 0036/1511] Revert time-machine back to freezegun (#7937) (#7938) (cherry picked from commit 43f92fae09bcc9692ee96ac1413eda884afa2f63) --- requirements/constraints.txt | 20 +++++--------------- requirements/dev.txt | 16 +++------------- requirements/test.in | 2 +- requirements/test.txt | 6 +++--- tests/test_cookiejar.py | 27 +++++---------------------- 5 files changed, 17 insertions(+), 54 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7d3de754a4f..cc6c2d175b8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -70,6 +70,8 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv +freezegun==1.3.0 + # via -r requirements/test.in frozenlist==1.4.0 # via # -r requirements/runtime-deps.in @@ -89,11 +91,7 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==6.8.0 - # via sphinx -importlib-resources==6.1.0 - # via towncrier -incremental==21.3.0 +incremental==22.10.0 # via towncrier iniconfig==1.1.1 # via pytest @@ -167,11 +165,9 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 - # via time-machine + # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in -pytz==2021.3 - # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 @@ -217,9 +213,7 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -time-machine==2.13.0 ; implementation_name == "cpython" - # via -r requirements/test.in -tomli==1.2.2 +tomli==2.0.1 # via # build # cherry-picker @@ -267,10 +261,6 @@ wheel==0.37.0 # via pip-tools yarl==1.9.3 # via -r requirements/runtime-deps.in -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 35840675928..7329cc7bf8d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,6 +66,8 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv +freezegun==1.3.0 + # via -r requirements/test.in frozenlist==1.4.0 # via # -r requirements/runtime-deps.in @@ -85,10 +87,6 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 - # via sphinx -importlib-resources==6.1.0 - # via towncrier incremental==22.10.0 # via towncrier iniconfig==2.0.0 @@ -158,11 +156,9 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 - # via time-machine + # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in -pytz==2023.3.post1 - # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 @@ -203,8 +199,6 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -time-machine==2.13.0 ; implementation_name == "cpython" - # via -r requirements/test.in tomli==2.0.1 # via # build @@ -254,10 +248,6 @@ wheel==0.41.0 # via pip-tools yarl==1.9.3 # via -r requirements/runtime-deps.in -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/test.in b/requirements/test.in index 3a82a00818a..686cd6dbf2e 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -2,6 +2,7 @@ -r base.in coverage +freezegun mypy; implementation_name == "cpython" proxy.py >= 2.4.4rc4 pytest @@ -10,6 +11,5 @@ pytest-mock python-on-whales re-assert setuptools-git -time-machine; implementation_name == "cpython" trustme; platform_machine != "i686" # no 32-bit wheels wait-for-it diff --git a/requirements/test.txt b/requirements/test.txt index bd2f951b0a4..d3d053b7c06 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -36,6 +36,8 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest +freezegun==1.3.0 + # via -r requirements/test.in frozenlist==1.4.0 # via # -r requirements/runtime-deps.in @@ -83,7 +85,7 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 - # via time-machine + # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in re-assert==1.1.0 @@ -96,8 +98,6 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -time-machine==2.13.0 ; implementation_name == "cpython" - # via -r requirements/test.in tomli==2.0.1 # via # coverage diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 522dcc8e1b6..cffca3a4b59 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -3,22 +3,17 @@ import itertools import os import pickle -import sys import tempfile import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie from unittest import mock import pytest +from freezegun import freeze_time from yarl import URL from aiohttp import CookieJar, DummyCookieJar -try: - from time_machine import travel -except ImportError: - travel = None # type: ignore[assignment] - def dump_cookiejar() -> bytes: # pragma: no cover """Create pickled data for test_pickle_format().""" @@ -412,10 +407,10 @@ def timed_request(self, url, update_time, send_time): elif isinstance(send_time, float): send_time = datetime.datetime.fromtimestamp(send_time) - with travel(update_time, tick=False): + with freeze_time(update_time): self.jar.update_cookies(self.cookies_to_send) - with travel(send_time, tick=False): + with freeze_time(send_time): cookies_sent = self.jar.filter_cookies(URL(url)) self.jar.clear() @@ -607,10 +602,6 @@ def test_path_value(self) -> None: self.assertEqual(cookies_received["path-cookie"]["path"], "/somepath") self.assertEqual(cookies_received["wrong-path-cookie"]["path"], "/") - @unittest.skipIf( - sys.implementation.name != "cpython", - reason="time_machine leverages CPython specific pointers https://github.com/adamchainz/time-machine/issues/305", - ) def test_expires(self) -> None: ts_before = datetime.datetime( 1975, 1, 1, tzinfo=datetime.timezone.utc @@ -632,10 +623,6 @@ def test_expires(self) -> None: self.assertEqual(set(cookies_sent.keys()), {"shared-cookie"}) - @unittest.skipIf( - sys.implementation.name != "cpython", - reason="time_machine leverages CPython specific pointers https://github.com/adamchainz/time-machine/issues/305", - ) def test_max_age(self) -> None: cookies_sent = self.timed_request("http://maxagetest.com/", 1000, 1000) @@ -783,10 +770,6 @@ async def test_cookie_jar_clear_all(): assert len(sut) == 0 -@pytest.mark.skipif( - sys.implementation.name != "cpython", - reason="time_machine leverages CPython specific pointers https://github.com/adamchainz/time-machine/issues/305", -) async def test_cookie_jar_clear_expired(): sut = CookieJar() @@ -795,11 +778,11 @@ async def test_cookie_jar_clear_expired(): cookie["foo"] = "bar" cookie["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" - with travel("1980-01-01", tick=False): + with freeze_time("1980-01-01"): sut.update_cookies(cookie) sut.clear(lambda x: False) - with travel("1980-01-01", tick=False): + with freeze_time("1980-01-01"): assert len(sut) == 0 From fa3cd69b474f913ef25ce520a959c88610d45b3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Dec 2023 11:08:55 +0000 Subject: [PATCH 0037/1511] Bump freezegun from 1.3.0 to 1.3.1 (#7946) Bumps [freezegun](https://github.com/spulec/freezegun) from 1.3.0 to 1.3.1.
Changelog

Sourced from freezegun's changelog.

Freezegun Changelog

Commits
  • 9152728 Increase version number
  • ff98d1f Admin: Ensure version is correct in tagged release
  • d50e415 Admin: Add requirements.txt and tox.ini to sdist
  • 67b18bb Admin: Switch to pytest-cov
  • d310546 Add requirements.txt and tox.ini to sdist
  • 86e9124 Switch to pytest-cov
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=freezegun&package-manager=pip&previous-version=1.3.0&new-version=1.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 12 +++++++++++- requirements/dev.txt | 12 +++++++++++- requirements/test.txt | 2 +- 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index cc6c2d175b8..afbbb521397 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -70,7 +70,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv -freezegun==1.3.0 +freezegun==1.3.1 # via -r requirements/test.in frozenlist==1.4.0 # via @@ -91,6 +91,10 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx +importlib-metadata==7.0.0 + # via sphinx +importlib-resources==6.1.1 + # via towncrier incremental==22.10.0 # via towncrier iniconfig==1.1.1 @@ -168,6 +172,8 @@ python-dateutil==2.8.2 # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in +pytz==2023.3.post1 + # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 @@ -261,6 +267,10 @@ wheel==0.37.0 # via pip-tools yarl==1.9.3 # via -r requirements/runtime-deps.in +zipp==3.17.0 + # via + # importlib-metadata + # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 7329cc7bf8d..fcb6d571b68 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv -freezegun==1.3.0 +freezegun==1.3.1 # via -r requirements/test.in frozenlist==1.4.0 # via @@ -87,6 +87,10 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx +importlib-metadata==7.0.0 + # via sphinx +importlib-resources==6.1.1 + # via towncrier incremental==22.10.0 # via towncrier iniconfig==2.0.0 @@ -159,6 +163,8 @@ python-dateutil==2.8.2 # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in +pytz==2023.3.post1 + # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 @@ -248,6 +254,10 @@ wheel==0.41.0 # via pip-tools yarl==1.9.3 # via -r requirements/runtime-deps.in +zipp==3.17.0 + # via + # importlib-metadata + # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/test.txt b/requirements/test.txt index d3d053b7c06..d7bc3fb7b70 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -36,7 +36,7 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest -freezegun==1.3.0 +freezegun==1.3.1 # via -r requirements/test.in frozenlist==1.4.0 # via From 2e673d00f9565034990eed76e77cd980bf38510f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 11:30:51 +0000 Subject: [PATCH 0038/1511] Bump yarl from 1.9.3 to 1.9.4 (#7952) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [yarl](https://github.com/aio-libs/yarl) from 1.9.3 to 1.9.4.
Release notes

Sourced from yarl's releases.

1.9.4

Bug fixes

  • Started raising :py:exc:TypeError when a string value is passed into :py:meth:~yarl.URL.build as the port argument -- by :user:commonism.

    Previously the empty string as port would create malformed URLs when rendered as string representations. (#883)

Packaging updates and notes for downstreams

  • The leading -- has been dropped from the :pep:517 in-tree build backend config setting names. --pure-python is now just pure-python -- by :user:webknjaz.

    The usage now looks as follows:

    .. code-block:: console

    $ python -m build \
        --config-setting=pure-python=true \
        --config-setting=with-cython-tracing=true
    

    (#963)

Contributor-facing changes

  • A step-by-step :doc:Release Guide <contributing/release_guide> guide has been added, describing how to release yarl -- by :user:webknjaz.

    This is primarily targeting maintainers. (#960)

  • Coverage collection has been implemented for the Cython modules -- by :user:webknjaz.

    It will also be reported to Codecov from any non-release CI jobs.

    To measure coverage in a development environment, yarl can be installed in editable mode, which requires an environment variable YARL_CYTHON_TRACING=1 to be set:

    .. code-block:: console

    $ YARL_CYTHON_TRACING=1 python -Im pip install -e .
    

    Editable install produces C-files required for the Cython coverage plugin to map the measurements back to the PYX-files. (#961)

... (truncated)

Changelog

Sourced from yarl's changelog.

1.9.4 (2023-12-06)

Bug fixes

  • Started raising :py:exc:TypeError when a string value is passed into :py:meth:~yarl.URL.build as the port argument -- by :user:commonism.

    Previously the empty string as port would create malformed URLs when rendered as string representations. (:issue:883)

Packaging updates and notes for downstreams

  • The leading -- has been dropped from the :pep:517 in-tree build backend config setting names. --pure-python is now just pure-python -- by :user:webknjaz.

    The usage now looks as follows:

    .. code-block:: console

    $ python -m build \
        --config-setting=pure-python=true \
        --config-setting=with-cython-tracing=true
    

    (:issue:963)

Contributor-facing changes

  • A step-by-step :doc:Release Guide <contributing/release_guide> guide has been added, describing how to release yarl -- by :user:webknjaz.

    This is primarily targeting maintainers. (:issue:960)

  • Coverage collection has been implemented for the Cython modules -- by :user:webknjaz.

    It will also be reported to Codecov from any non-release CI jobs.

    To measure coverage in a development environment, yarl can be installed in editable mode, which requires an environment variable YARL_CYTHON_TRACING=1 to be set:

    .. code-block:: console

    $ YARL_CYTHON_TRACING=1 python -Im pip install -e .
    

... (truncated)

Commits
  • 6362ff1 ⇪📦 Release yarl v1.9.4
  • 241e5df 📝 Add a dedicated Towncrier template
  • f384fef 🎨 Sort coverage report config settings
  • 04399eb Exterminate offensive references from the project
  • 6e61b44 🎨Move tomllib access to _compat @ packaging
  • b3a5a71 Raise TypeError when a string is passed for port to URL.build()
  • e8cc8ab 🧪 Determine and use exact pure dist names @ GHA
  • cc8f29e 🧪 Skip setting up Python @ cibuildwheel jobs
  • d86eb20 Bump cython from 3.0.5 to 3.0.6 (#966)
  • d3f762d Bump idna from 3.4 to 3.6 (#965)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.9.3&new-version=1.9.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index daccca54d86..7a5839543fb 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ typing-extensions==4.8.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.3 +yarl==1.9.4 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index afbbb521397..ec03365ea7c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -265,7 +265,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.9.3 +yarl==1.9.4 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index fcb6d571b68..b1e507ec939 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -252,7 +252,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.9.3 +yarl==1.9.4 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a0f2aa861f7..b41bfc64fb5 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -30,5 +30,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.9.3 +yarl==1.9.4 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d7bc3fb7b70..541eb1824bc 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -124,5 +124,5 @@ uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.3 +yarl==1.9.4 # via -r requirements/runtime-deps.in From cc2252ace048b2da1f0773582b21888c36da309c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 11:32:28 +0000 Subject: [PATCH 0039/1511] Bump typing-extensions from 4.8.0 to 4.9.0 (#7958) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.8.0 to 4.9.0.
Release notes

Sourced from typing-extensions's releases.

4.9.0

This feature release adds typing_extensions.ReadOnly, as specified by PEP 705, and makes various other improvements, especially to @typing_extensions.deprecated().

There are no changes since 4.9.0rc1.

4.9.0rc1

  • Add support for PEP 705, adding typing_extensions.ReadOnly. Patch by Jelle Zijlstra.
  • All parameters on NewType.__call__ are now positional-only. This means that the signature of typing_extensions.NewType.__call__ now exactly matches the signature of typing.NewType.__call__. Patch by Alex Waygood.
  • Fix bug with using @deprecated on a mixin class. Inheriting from a deprecated class now raises a DeprecationWarning. Patch by Jelle Zijlstra.
  • @deprecated now gives a better error message if you pass a non-str argument to the msg parameter. Patch by Alex Waygood.
  • @deprecated is now implemented as a class for better introspectability. Patch by Jelle Zijlstra.
  • Exclude __match_args__ from Protocol members. Backport of python/cpython#110683 by Nikita Sobolev.
  • When creating a typing_extensions.NamedTuple class, ensure __set_name__ is called on all objects that define __set_name__ and exist in the values of the NamedTuple class's class dictionary. Patch by Alex Waygood, backporting python/cpython#111876.
  • Improve the error message when trying to call issubclass() against a Protocol that has non-method members. Patch by Alex Waygood (backporting python/cpython#112344, by Randolph Scholz).
Changelog

Sourced from typing-extensions's changelog.

Release 4.9.0 (December 9, 2023)

This feature release adds typing_extensions.ReadOnly, as specified by PEP 705, and makes various other improvements, especially to @typing_extensions.deprecated().

There are no changes since 4.9.0rc1.

Release 4.9.0rc1 (November 29, 2023)

  • Add support for PEP 705, adding typing_extensions.ReadOnly. Patch by Jelle Zijlstra.
  • All parameters on NewType.__call__ are now positional-only. This means that the signature of typing_extensions.NewType.__call__ now exactly matches the signature of typing.NewType.__call__. Patch by Alex Waygood.
  • Fix bug with using @deprecated on a mixin class. Inheriting from a deprecated class now raises a DeprecationWarning. Patch by Jelle Zijlstra.
  • @deprecated now gives a better error message if you pass a non-str argument to the msg parameter. Patch by Alex Waygood.
  • @deprecated is now implemented as a class for better introspectability. Patch by Jelle Zijlstra.
  • Exclude __match_args__ from Protocol members. Backport of python/cpython#110683 by Nikita Sobolev.
  • When creating a typing_extensions.NamedTuple class, ensure __set_name__ is called on all objects that define __set_name__ and exist in the values of the NamedTuple class's class dictionary. Patch by Alex Waygood, backporting python/cpython#111876.
  • Improve the error message when trying to call issubclass() against a Protocol that has non-method members. Patch by Alex Waygood (backporting python/cpython#112344, by Randolph Scholz).
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.8.0&new-version=4.9.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- requirements/typing-extensions.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7a5839543fb..d5b635e2a20 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -34,7 +34,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ec03365ea7c..1f4ada43bed 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -240,7 +240,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/cython.txt b/requirements/cython.txt index ee1b2f15393..352b7eb7273 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.6 # via -r requirements/cython.in multidict==6.0.4 # via -r requirements/multidict.in -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index b1e507ec939..c6c6b5609a2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -226,7 +226,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/lint.txt b/requirements/lint.txt index 9b6101de5d8..97a089e632a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/test.txt b/requirements/test.txt index 541eb1824bc..2b6b0bfa988 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -109,7 +109,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via # -r requirements/typing-extensions.in # annotated-types diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt index 043deab49a9..8ea8d0d4d08 100644 --- a/requirements/typing-extensions.txt +++ b/requirements/typing-extensions.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in # -typing-extensions==4.8.0 +typing-extensions==4.9.0 # via -r requirements/typing-extensions.in From 926367ff26b34303e945cdf4fba83b4b38945651 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 11:25:03 +0000 Subject: [PATCH 0040/1511] Bump python-on-whales from 0.67.0 to 0.68.0 (#7967) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.67.0 to 0.68.0.
Release notes

Sourced from python-on-whales's releases.

v0.68.0

What's Changed

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.67.0...v0.68.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.67.0&new-version=0.68.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1f4ada43bed..d5c3c7a7c8c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -170,7 +170,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.67.0 +python-on-whales==0.68.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index c6c6b5609a2..e86d52c3d1b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -161,7 +161,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.67.0 +python-on-whales==0.68.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index 2b6b0bfa988..e5dcc1bb60b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -86,7 +86,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.67.0 +python-on-whales==0.68.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From 8222403016b0da8c5d4850de63e87eafe5c56cb1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 16:20:10 +0000 Subject: [PATCH 0041/1511] [PR #7961/5e44ba46 backport][3.10] Add reminder to use 'fixes' in PRs (#7969) **This is a backport of PR #7961 as merged into master (5e44ba465c2168c6e1842df441828ec5759093e1).** Co-authored-by: Sam Bull --- .github/PULL_REQUEST_TEMPLATE.md | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 237c61a659f..3ac54a518b5 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -11,6 +11,7 @@ ## Related issue number + ## Checklist From f1cee99b1765b5580329e1061ee3a6d5f58243c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Dec 2023 11:46:54 +0000 Subject: [PATCH 0042/1511] Bump coverage from 7.3.2 to 7.3.3 (#7977) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.3.2 to 7.3.3.
Changelog

Sourced from coverage's changelog.

Version 7.3.3 — 2023-12-14

  • Fix: function definitions with multi-line signatures can now be excluded by matching any of the lines, closing issue 684. Thanks, Jan Rusak, Maciej Kowalczyk and Joanna Ejzel <pull 1705_>.

  • Fix: XML reports could fail with a TypeError if files had numeric components that were duplicates except for leading zeroes, like file1.py and file001.py. Fixes issue 1709_.

  • The coverage annotate command used to announce that it would be removed in a future version. Enough people got in touch to say that they use it, so it will stay. Don't expect it to keep up with other new features though.

  • Added new :ref:debug options <cmd_run_debug>:

    • pytest writes the pytest test name into the debug output.

    • dataop2 writes the full data being added to CoverageData objects.

.. _issue 684: nedbat/coveragepy#684 .. _pull 1705: nedbat/coveragepy#1705 .. _issue 1709: nedbat/coveragepy#1709

.. _changes_7-3-2:

Commits
  • 7270c53 docs: sample HTML for 7.3.3
  • 3d4c940 docs: prep for 7.3.3
  • be28121 fix: leading zeros can confuse human sorting. #1709
  • 1adda03 docs: a little more in the quick start about what will be measured. #1707
  • 6a9766e docs: remove deprecation notice from annotate docs
  • e0a550b fix: undeprecate coverage annotate
  • 05b47a3 build(deps): bump actions/setup-python from 4 to 5
  • 470e086 docs: two more contributors from #1705
  • 4c4644e build: add last commit to .git-blame-ignore-revs
  • 5d0b5d4 style: check_coverage close parens should be on their own line
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.3.2&new-version=7.3.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d5c3c7a7c8c..8a3a30ca72e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -52,7 +52,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.3.2 +coverage==7.3.3 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index e86d52c3d1b..c361457daad 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.3.2 +coverage==7.3.3 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index e5dcc1bb60b..c82ae784803 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -28,7 +28,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.3.2 +coverage==7.3.3 # via # -r requirements/test.in # pytest-cov From 477b23700586e90bffc3d76e149f3625e87f7d94 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 18 Dec 2023 14:34:28 -1000 Subject: [PATCH 0043/1511] Fix websocket connection leak (#7978) (#7980) --- CHANGES/7978.bugfix | 1 + aiohttp/web_ws.py | 94 +++++++++++++++++++++---------------- docs/web_reference.rst | 12 ++++- tests/test_web_websocket.py | 27 +++++++++++ 4 files changed, 93 insertions(+), 41 deletions(-) create mode 100644 CHANGES/7978.bugfix diff --git a/CHANGES/7978.bugfix b/CHANGES/7978.bugfix new file mode 100644 index 00000000000..3c7dc096ca7 --- /dev/null +++ b/CHANGES/7978.bugfix @@ -0,0 +1 @@ +Fix websocket connection leak diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 4e57bca4f69..783377716f5 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -140,9 +140,8 @@ def _send_heartbeat(self) -> None: def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: self._closed = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) self._exception = asyncio.TimeoutError() - self._req.transport.close() async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: # make pre-check to don't hide it by do_handshake() exceptions @@ -360,7 +359,10 @@ async def write_eof(self) -> None: # type: ignore[override] await self.close() self._eof_sent = True - async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + async def close( + self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True + ) -> bool: + """Close websocket connection.""" if self._writer is None: raise RuntimeError("Call .prepare() first") @@ -374,46 +376,53 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._waiting - if not self._closed: - self._closed = True - try: - await self._writer.close(code, message) - writer = self._payload_writer - assert writer is not None - await writer.drain() - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - return True + if self._closed: + return False - if self._closing: - return True + self._closed = True + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + if drain: + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + raise + except Exception as exc: + self._exception = exc + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + return True - reader = self._reader - assert reader is not None - try: - async with async_timeout.timeout(self._timeout): - msg = await reader.read() - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - return True + if self._closing: + return True - if msg.type == WSMsgType.CLOSE: - self._close_code = msg.data - return True + reader = self._reader + assert reader is not None + try: + async with async_timeout.timeout(self._timeout): + msg = await reader.read() + except asyncio.CancelledError: + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + raise + except Exception as exc: + self._exception = exc + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + return True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = asyncio.TimeoutError() + if msg.type == WSMsgType.CLOSE: + self._set_code_close_transport(msg.data) return True - else: - return False + + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = asyncio.TimeoutError() + return True + + def _set_code_close_transport(self, code: WSCloseCode) -> None: + """Set the close code and close the transport.""" + self._close_code = code + if self._req is not None and self._req.transport is not None: + self._req.transport.close() async def receive(self, timeout: Optional[float] = None) -> WSMessage: if self._reader is None: @@ -444,7 +453,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: set_result(waiter, True) self._waiting = None except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) raise except EofStream: self._close_code = WSCloseCode.OK @@ -464,8 +473,13 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: if msg.type == WSMsgType.CLOSE: self._closing = True self._close_code = msg.data + # Could be closed while awaiting reader. if not self._closed and self._autoclose: - await self.close() + # The client is likely going to close the + # connection out from under us so we do not + # want to drain any pending writes as it will + # likely result writing to a broken pipe. + await self.close(drain=False) elif msg.type == WSMsgType.CLOSING: self._closing = True elif msg.type == WSMsgType.PING and self._autoping: diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 874b15bd8e3..e38fb82863f 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -970,6 +970,14 @@ and :ref:`aiohttp-web-signals` handlers:: .. versionadded:: 3.3 + :param bool autoclose: Close connection when the client sends + a :const:`~aiohttp.WSMsgType.CLOSE` message, + ``True`` by default. If set to ``False``, + the connection is not closed and the + caller is responsible for calling + ``request.transport.close()`` to avoid + leaking resources. + The class supports ``async for`` statement for iterating over incoming messages:: @@ -1146,7 +1154,7 @@ and :ref:`aiohttp-web-signals` handlers:: The method is converted into :term:`coroutine`, *compress* parameter added. - .. method:: close(*, code=WSCloseCode.OK, message=b'') + .. method:: close(*, code=WSCloseCode.OK, message=b'', drain=True) :async: A :ref:`coroutine` that initiates closing @@ -1160,6 +1168,8 @@ and :ref:`aiohttp-web-signals` handlers:: :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. + :param bool drain: drain outgoing buffer before closing connection. + :raise RuntimeError: if connection is not started .. method:: receive(timeout=None) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 0ec1b5a7d84..d0aca0c019a 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -1,4 +1,6 @@ import asyncio +import time +from typing import Any from unittest import mock import aiosignal @@ -165,6 +167,20 @@ async def test_write_non_prepared() -> None: await ws.write(b"data") +async def test_heartbeat_timeout(make_request: Any) -> None: + """Verify the transport is closed when the heartbeat timeout is reached.""" + loop = asyncio.get_running_loop() + future = loop.create_future() + req = make_request("GET", "/") + lowest_time = time.get_clock_info("monotonic").resolution + req._protocol._timeout_ceil_threshold = lowest_time + ws = WebSocketResponse(heartbeat=lowest_time, timeout=lowest_time) + await ws.prepare(req) + ws._req.transport.close.side_effect = lambda: future.set_result(None) + await future + assert ws.closed + + def test_websocket_ready() -> None: websocket_ready = WebSocketReady(True, "chat") assert websocket_ready.ok is True @@ -233,6 +249,7 @@ async def test_send_str_closed(make_request) -> None: await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() + assert len(ws._req.transport.close.mock_calls) == 1 with pytest.raises(ConnectionError): await ws.send_str("string") @@ -289,6 +306,8 @@ async def test_close_idempotent(make_request) -> None: ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) assert await ws.close(code=1, message="message1") assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + assert not (await ws.close(code=2, message="message2")) @@ -322,12 +341,15 @@ async def test_write_eof_idempotent(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) + assert len(ws._req.transport.close.mock_calls) == 0 + ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() await ws.write_eof() await ws.write_eof() await ws.write_eof() + assert len(ws._req.transport.close.mock_calls) == 1 async def test_receive_eofstream_in_reader(make_request, loop) -> None: @@ -353,6 +375,7 @@ async def test_receive_timeouterror(make_request, loop) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) + assert len(ws._req.transport.close.mock_calls) == 0 ws._reader = mock.Mock() res = loop.create_future() @@ -362,6 +385,8 @@ async def test_receive_timeouterror(make_request, loop) -> None: with pytest.raises(asyncio.TimeoutError): await ws.receive() + assert len(ws._req.transport.close.mock_calls) == 1 + async def test_multiple_receive_on_close_connection(make_request) -> None: req = make_request("GET", "/") @@ -394,6 +419,7 @@ async def test_close_exc(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) + assert len(ws._req.transport.close.mock_calls) == 0 exc = ValueError() ws._writer = mock.Mock() @@ -401,6 +427,7 @@ async def test_close_exc(make_request) -> None: await ws.close() assert ws.closed assert ws.exception() is exc + assert len(ws._req.transport.close.mock_calls) == 1 ws._closed = False ws._writer.close.side_effect = asyncio.CancelledError() From a5230ee00cbc2d62f6dd7d03008d3da91f5ec372 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 11:06:09 +0000 Subject: [PATCH 0044/1511] Bump freezegun from 1.3.1 to 1.4.0 (#7981) Bumps [freezegun](https://github.com/spulec/freezegun) from 1.3.1 to 1.4.0.
Changelog

Sourced from freezegun's changelog.

1.4.0

  • asyncio-support from 1.3.x introduced quite a few bugs, so that functionality is now hidden behind a flag: with freeze_time('1970-01-02', real_asyncio=True):
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=freezegun&package-manager=pip&previous-version=1.3.1&new-version=1.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8a3a30ca72e..a1fd8bfef9a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -70,7 +70,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv -freezegun==1.3.1 +freezegun==1.4.0 # via -r requirements/test.in frozenlist==1.4.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index c361457daad..275c99949ef 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv -freezegun==1.3.1 +freezegun==1.4.0 # via -r requirements/test.in frozenlist==1.4.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index c82ae784803..7aa918f937a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -36,7 +36,7 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest -freezegun==1.3.1 +freezegun==1.4.0 # via -r requirements/test.in frozenlist==1.4.0 # via From d402dbecf4fd91ca3026d9810380680c10e09ba3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 11:26:09 +0000 Subject: [PATCH 0045/1511] Bump frozenlist from 1.4.0 to 1.4.1 (#7976) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [frozenlist](https://github.com/aio-libs/frozenlist) from 1.4.0 to 1.4.1.
Release notes

Sourced from frozenlist's releases.

1.4.1

Packaging updates and notes for downstreams

  • Declared Python 3.12 and PyPy 3.8-3.10 supported officially in the distribution package metadata.

    Related issues and pull requests on GitHub: #553.

  • Replaced the packaging is replaced from an old-fashioned :file:setup.py to an in-tree :pep:517 build backend -- by :user:webknjaz.

    Whenever the end-users or downstream packagers need to build frozenlist from source (a Git checkout or an sdist), they may pass a config_settings flag pure-python. If this flag is not set, a C-extension will be built and included into the distribution.

    Here is how this can be done with pip:

    .. code-block:: console

    $ python3 -m pip install . --config-settings=pure-python=
    

    This will also work with -e | --editable.

    The same can be achieved via pypa/build:

    .. code-block:: console

    $ python3 -m build --config-setting=pure-python=
    

    Adding -w | --wheel can force pypa/build produce a wheel from source directly, as opposed to building an sdist and then building from it.

    Related issues and pull requests on GitHub: #560.

Contributor-facing changes

  • It is now possible to request line tracing in Cython builds using the with-cython-tracing :pep:517 config setting -- :user:webknjaz.

    This can be used in CI and development environment to measure coverage on Cython modules, but is not normally useful to the end-users or

... (truncated)

Changelog

Sourced from frozenlist's changelog.

1.4.1 (2023-12-15)

Packaging updates and notes for downstreams

  • Declared Python 3.12 and PyPy 3.8-3.10 supported officially in the distribution package metadata.

    Related issues and pull requests on GitHub: :issue:553.

  • Replaced the packaging is replaced from an old-fashioned :file:setup.py to an in-tree :pep:517 build backend -- by :user:webknjaz.

    Whenever the end-users or downstream packagers need to build frozenlist from source (a Git checkout or an sdist), they may pass a config_settings flag pure-python. If this flag is not set, a C-extension will be built and included into the distribution.

    Here is how this can be done with pip:

    .. code-block:: console

    $ python3 -m pip install . --config-settings=pure-python=
    

    This will also work with -e | --editable.

    The same can be achieved via pypa/build:

    .. code-block:: console

    $ python3 -m build --config-setting=pure-python=
    

    Adding -w | --wheel can force pypa/build produce a wheel from source directly, as opposed to building an sdist and then building from it.

    Related issues and pull requests on GitHub: :issue:560.

Contributor-facing changes

  • It is now possible to request line tracing in Cython builds using the with-cython-tracing :pep:517 config setting -- :user:webknjaz.

... (truncated)

Commits
  • 457b28e ⇪📦 Bump to v1.4.1
  • 4998859 📝 Mark "dev" as a known word
  • 3d740da ⇪📦 Bump to v1.4.1.dev0
  • d92751c 🧪 Cache pre-commit.com virtualenvs @ CI
  • 7394415 🧪🐛 List explicit MyPy coverage.xml paths @ CI
  • a26ba84 🧪 Update codecov action input to plural files
  • e12ecf6 🧪 Upload MyPy coverage to Codecov
  • 59b9a74 🐛🧪 Fix the operator in test_lt
  • 01720b2 🧪 Keep building macosx_x86_64 wheels for tests
  • d9f5e0c 🧪💅 Add a reusable project name var to CI/CD
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=frozenlist&package-manager=pip&previous-version=1.4.0&new-version=1.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index d5b635e2a20..4640010dca2 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -16,7 +16,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.15.1 # via pycares -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a1fd8bfef9a..1db8f62d002 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -72,7 +72,7 @@ filelock==3.3.2 # via virtualenv freezegun==1.4.0 # via -r requirements/test.in -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/dev.txt b/requirements/dev.txt index 275c99949ef..13911df77f0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -68,7 +68,7 @@ filelock==3.12.2 # via virtualenv freezegun==1.4.0 # via -r requirements/test.in -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index b41bfc64fb5..2d4df7df38c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -16,7 +16,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.15.1 # via pycares -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/test.txt b/requirements/test.txt index 7aa918f937a..dc24d3b3ad1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -38,7 +38,7 @@ exceptiongroup==1.1.2 # via pytest freezegun==1.4.0 # via -r requirements/test.in -frozenlist==1.4.0 +frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal From 620badb02dbe3816aa83871d2327082f352c54fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 10:54:16 +0000 Subject: [PATCH 0046/1511] Bump cython from 3.0.6 to 3.0.7 (#7985) Bumps [cython](https://github.com/cython/cython) from 3.0.6 to 3.0.7.
Changelog

Sourced from cython's changelog.

3.0.7 (2023-12-19)

Bugs fixed

  • In the iterator of generator expressions, await and yield were not correctly analysed. (Github issue :issue:5851)

  • cpdef enums with the same name cimported from different modules could lead to invalid C code. (Github issue :issue:5887)

  • Some declarations in cpython.unicode were fixed and extended. (Github issue :issue:5902)

  • Compiling fused types used in pxd files could crash Cython in Python 3.11+. (Github issues :issue:5894, :issue:5588)

  • Source files with non-ASCII file names could crash Cython. (Github issue :issue:5873)

  • Includes all bug-fixes and features from the 0.29 maintenance branch up to the :ref:0.29.37 release.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.6&new-version=3.0.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1db8f62d002..9b2a27ebb5c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -60,7 +60,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.6 +cython==3.0.7 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 352b7eb7273..6cb581d1c5d 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.6 +cython==3.0.7 # via -r requirements/cython.in multidict==6.0.4 # via -r requirements/multidict.in From e7bfb6ec7968f2a97bcf4ee6c4f50931dd938bf6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Dec 2023 11:25:00 +0000 Subject: [PATCH 0047/1511] Bump coverage from 7.3.3 to 7.3.4 (#7989) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.3.3 to 7.3.4.
Changelog

Sourced from coverage's changelog.

Version 7.3.4 — 2023-12-20

  • Fix: the change for multi-line signature exclusions in 7.3.3 broke other forms of nested clauses being excluded properly. This is now fixed, closing issue 1713_.

  • Fix: in the HTML report, selecting code for copying won't select the line numbers also. Thanks, Robert Harris <pull 1717_>_.

.. _issue 1713: nedbat/coveragepy#1713 .. _pull 1717: nedbat/coveragepy#1717

.. _changes_7-3-3:

Commits
  • 87e406b docs: sample HTML for 7.3.4
  • 27a3392 docs: prep for 7.3.4
  • 9bda95d build: artifact@4, with required immutability changes
  • 390fa6c build: use best pip syntax in cheats
  • 19db169 refactor(test): move parser tests from check_coverage to parse
  • 538ca96 refactor(test): reorg the parser tests in prep for moving more here
  • 24df7e9 refactor: tweak up and type-hint the token parsing
  • 07b76b2 fix: some clause exclusions were broken #1713
  • 962429c refactor(test): use more uniform version-checking skips
  • dafebf1 refactor: no need for our own AST dump function
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.3.3&new-version=7.3.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9b2a27ebb5c..1f9c32c95e1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -52,7 +52,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.3.3 +coverage==7.3.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 13911df77f0..53c98962d44 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.3.3 +coverage==7.3.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index dc24d3b3ad1..26eb5b69034 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -28,7 +28,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.3.3 +coverage==7.3.4 # via # -r requirements/test.in # pytest-cov From bbf8f3c61c06fdef955623e2bec832cd1ca157ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Dec 2023 13:31:03 +0000 Subject: [PATCH 0048/1511] Bump mypy from 1.7.1 to 1.8.0 (#7992) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.7.1 to 1.8.0.
Changelog

Sourced from mypy's changelog.

Mypy Release Notes

Next release

Mypy 1.8

We’ve just uploaded mypy 1.8 to the Python Package Index (PyPI). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:

python3 -m pip install -U mypy

You can read the full documentation for this release on Read the Docs.

Type-checking Improvements

  • Do not intersect types in isinstance checks if at least one is final (Christoph Tyralla, PR 16330)
  • Detect that @final class without __bool__ cannot have falsey instances (Ilya Priven, PR 16566)
  • Do not allow TypedDict classes with extra keywords (Nikita Sobolev, PR 16438)
  • Do not allow class-level keywords for NamedTuple (Nikita Sobolev, PR 16526)
  • Make imprecise constraints handling more robust (Ivan Levkivskyi, PR 16502)
  • Fix strict-optional in extending generic TypedDict (Ivan Levkivskyi, PR 16398)
  • Allow type ignores of PEP 695 constructs (Shantanu, PR 16608)
  • Enable type_check_only support for TypedDict and NamedTuple (Nikita Sobolev, PR 16469)

Performance Improvements

  • Add fast path to analyzing special form assignments (Jukka Lehtosalo, PR 16561)

Improvements to Error Reporting

  • Don't show documentation links for plugin error codes (Ivan Levkivskyi, PR 16383)
  • Improve error messages for super checks and add more tests (Nikita Sobolev, PR 16393)
  • Add error code for mutable covariant override (Ivan Levkivskyi, PR 16399)

Stubgen Improvements

  • Preserve simple defaults in function signatures (Ali Hamdan, PR 15355)
  • Include __all__ in output (Jelle Zijlstra, PR 16356)
  • Fix stubgen regressions with pybind11 and mypy 1.7 (Chad Dombrova, PR 16504)

Stubtest Improvements

  • Improve handling of unrepresentable defaults (Jelle Zijlstra, PR 16433)
  • Print more helpful errors if a function is missing from stub (Alex Waygood, PR 16517)
  • Support @type_check_only decorator (Nikita Sobolev, PR 16422)
  • Warn about missing __del__ (Shantanu, PR 16456)
  • Fix crashes with some uses of final and deprecated (Shantanu, PR 16457)

Fixes to Crashes

  • Fix crash with type alias to Callable[[Unpack[Tuple[Any, ...]]], Any] (Alex Waygood, PR 16541)
  • Fix crash on TypeGuard in __call__ (Ivan Levkivskyi, PR 16516)
  • Fix crash on invalid enum in method (Ivan Levkivskyi, PR 16511)
  • Fix crash on unimported Any in TypedDict (Ivan Levkivskyi, PR 16510)

Documentation Updates

  • Update soft-error-limit default value to -1 (Sveinung Gundersen, PR 16542)

... (truncated)

Commits
  • 3b46750 remove +dev suffix from version
  • c9bc833 Fix tests broken by hatchling (#16655)
  • 60d30e3 Fix crash with type alias to Callable[[Unpack[Tuple[Any, ...]]], Any] (#16541)
  • f53f422 Allow type ignores of PEP 695 constructs (#16608)
  • 7c33e7c @​final class without bool cannot have falsey instances (#16566)
  • c224da5 Do not intersect types in isinstance checks if at least one is final (#16330)
  • d54cc35 Change example in test cases with no stubs available (#16513)
  • eb1ee97 Update hashes in sync-typeshed.py following recent typeshed sync (#16600)
  • 344298e Revert use of ParamSpec for functools.wraps
  • 3e5d813 Revert typeshed ctypes change
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.7.1&new-version=1.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1f9c32c95e1..c24d57abb44 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -110,7 +110,7 @@ multidict==6.0.4 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.8.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 53c98962d44..84b74e3ed0e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -105,7 +105,7 @@ multidict==6.0.4 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.8.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 97a089e632a..01ddeb9ae1e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ identify==2.5.26 # via pre-commit iniconfig==2.0.0 # via pytest -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.8.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index 26eb5b69034..0746cc6027a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -55,7 +55,7 @@ multidict==6.0.4 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.1 ; implementation_name == "cpython" +mypy==1.8.0 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From d381fce6e485bee2290d7000ce49626a54fd05f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Dec 2023 10:53:40 +0000 Subject: [PATCH 0049/1511] Bump coverage from 7.3.4 to 7.4.0 (#7997) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.3.4 to 7.4.0.
Changelog

Sourced from coverage's changelog.

Version 7.4.0 — 2023-12-27

  • In Python 3.12 and above, you can try an experimental core based on the new :mod:sys.monitoring <python:sys.monitoring> module by defining a COVERAGE_CORE=sysmon environment variable. This should be faster, though plugins and dynamic contexts are not yet supported with it. I am very interested to hear how it works (or doesn't!) for you.

.. _changes_7-3-4:

Commits
  • 23a015c docs: sample HTML for 7.4.0
  • 4f020d4 docs: prep for 7.4.0
  • 5bb88c3 build: temporarily disable metacov, it's flaky now with sysmon support
  • 3879b97 docs: mention sys.monitoring support
  • 9a84eeb style: environment variable names should be monospace
  • 7882b8c refactor: clean lint and mypy for sysmon et al
  • 5dad1a1 test: test which core we get
  • b7e0c34 fix: don't default to sysmon yet
  • e5babcf docs: explain the COVERAGE_*_CORE testing variables
  • ebdc277 fix: use core more consistently than tracer
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.3.4&new-version=7.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c24d57abb44..c1d9f7d046c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -52,7 +52,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.3.4 +coverage==7.4.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 84b74e3ed0e..412a1312dd8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.3.4 +coverage==7.4.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 0746cc6027a..bf90681ec83 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -28,7 +28,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.3.4 +coverage==7.4.0 # via # -r requirements/test.in # pytest-cov From 292d8a9257ee13b9930f234ad73b7d3c6aa01edd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 11:38:54 +0000 Subject: [PATCH 0050/1511] Bump pytest from 7.4.3 to 7.4.4 (#8001) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.3 to 7.4.4.
Release notes

Sourced from pytest's releases.

pytest 7.4.4 (2023-12-31)

Bug Fixes

  • #11140: Fix non-string constants at the top of file being detected as docstrings on Python>=3.8.
  • #11572: Handle an edge case where sys.stderr{.interpreted-text role="data"} and sys.__stderr__{.interpreted-text role="data"} might already be closed when faulthandler{.interpreted-text role="ref"} is tearing down.
  • #11710: Fixed tracebacks from collection errors not getting pruned.
  • #7966: Removed unhelpful error message from assertion rewrite mechanism when exceptions are raised in __iter__ methods. Now they are treated un-iterable instead.

Improved Documentation

  • #11091: Updated documentation to refer to hyphenated options: replaced --junitxml with --junit-xml and --collectonly with --collect-only.
Commits
  • 33f694f Prepare release version 7.4.4
  • 76c107c Merge pull request #11751 from bluetech/backport-11143-to-7.4.x
  • 531d76d [7.4.x] Improve reporting from iter exceptions (#11749)
  • a0f58fa Merge pull request #11143 from tushar-deepsource/patch-1
  • b1f3387 [7.4.x] #11091: documentation should use hypthonated properties (#11750)
  • 2cdd619 Merge pull request #11747 from pytest-dev/backport-11711-to-7.4.x
  • d06c05b [7.4.x] nodes: fix tracebacks from collection errors are not getting pruned
  • 5582bfc [7.4.x] Improves clarity in Sphinx documentation for function signature. (#11...
  • 13024ef [7.4.x] Fix for operation on closed file in faulthandler teardown (#11631)
  • a40dacf [7.4.x] XFAIL TestLocalPath.test_make_numbered_dir_multiprocess_safe (#11616)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=7.4.3&new-version=7.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c1d9f7d046c..c177e50bbd9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -158,7 +158,7 @@ pyjwt==2.3.0 # pyjwt pyparsing==2.4.7 # via packaging -pytest==7.4.3 +pytest==7.4.4 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 412a1312dd8..5939bcd8fef 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,7 +149,7 @@ pyjwt==2.8.0 # pyjwt pyproject-hooks==1.0.0 # via build -pytest==7.4.3 +pytest==7.4.4 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 01ddeb9ae1e..68000cbcc85 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -36,7 +36,7 @@ pluggy==1.2.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -pytest==7.4.3 +pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index bf90681ec83..f08c7fd1788 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -75,7 +75,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==7.4.3 +pytest==7.4.4 # via # -r requirements/test.in # pytest-cov From 1700e9d62e38591f0d094a8ebb8b54d1f22b9044 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Jan 2024 16:53:36 -1000 Subject: [PATCH 0051/1511] Implement happy eyeballs (RFC 8305) (#7954) (#8005) (cherry picked from commit c4ec3f1) --- CHANGES/7954.feature | 1 + aiohttp/connector.py | 88 +++++++++-- docs/client_reference.rst | 21 ++- requirements/base.txt | 2 + requirements/constraints.txt | 2 + requirements/dev.txt | 2 + requirements/runtime-deps.in | 1 + requirements/runtime-deps.txt | 2 + requirements/test.txt | 2 + setup.cfg | 1 + tests/conftest.py | 11 ++ tests/test_connector.py | 268 ++++++++++++++++++++++++++++++++-- tests/test_proxy.py | 148 ++++++++++++++++--- 13 files changed, 504 insertions(+), 45 deletions(-) create mode 100644 CHANGES/7954.feature diff --git a/CHANGES/7954.feature b/CHANGES/7954.feature new file mode 100644 index 00000000000..e536ee4b1c4 --- /dev/null +++ b/CHANGES/7954.feature @@ -0,0 +1 @@ +Implement happy eyeballs diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 73f58b1a451..baa3a7170f6 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1,6 +1,7 @@ import asyncio import functools import random +import socket import sys import traceback import warnings @@ -29,6 +30,7 @@ cast, ) +import aiohappyeyeballs import attr from . import hdrs, helpers @@ -750,6 +752,10 @@ class TCPConnector(BaseConnector): limit_per_host - Number of simultaneous connections to one host. enable_cleanup_closed - Enables clean-up closed ssl transports. Disabled by default. + happy_eyeballs_delay - This is the “Connection Attempt Delay” + as defined in RFC 8305. To disable + the happy eyeballs algorithm, set to None. + interleave - “First Address Family Count” as defined in RFC 8305 loop - Optional event loop. """ @@ -772,6 +778,8 @@ def __init__( enable_cleanup_closed: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, timeout_ceil_threshold: float = 5, + happy_eyeballs_delay: Optional[float] = 0.25, + interleave: Optional[int] = None, ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -792,7 +800,9 @@ def __init__( self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} self._family = family - self._local_addr = local_addr + self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) + self._happy_eyeballs_delay = happy_eyeballs_delay + self._interleave = interleave def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" @@ -978,6 +988,36 @@ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: return None async def _wrap_create_connection( + self, + *args: Any, + addr_infos: List[aiohappyeyeballs.AddrInfoType], + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + sock = await aiohappyeyeballs.start_connection( + addr_infos=addr_infos, + local_addr_infos=self._local_addr_infos, + happy_eyeballs_delay=self._happy_eyeballs_delay, + interleave=self._interleave, + loop=self._loop, + ) + return await self._loop.create_connection(*args, **kwargs, sock=sock) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + async def _wrap_existing_connection( self, *args: Any, req: ClientRequest, @@ -1143,6 +1183,27 @@ async def _start_tls_connection( return tls_transport, tls_proto + def _convert_hosts_to_addr_infos( + self, hosts: List[Dict[str, Any]] + ) -> List[aiohappyeyeballs.AddrInfoType]: + """Converts the list of hosts to a list of addr_infos. + + The list of hosts is the result of a DNS lookup. The list of + addr_infos is the result of a call to `socket.getaddrinfo()`. + """ + addr_infos: List[aiohappyeyeballs.AddrInfoType] = [] + for hinfo in hosts: + host = hinfo["host"] + is_ipv6 = ":" in host + family = socket.AF_INET6 if is_ipv6 else socket.AF_INET + if self._family and self._family != family: + continue + addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"]) + addr_infos.append( + (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr) + ) + return addr_infos + async def _create_direct_connection( self, req: ClientRequest, @@ -1187,36 +1248,27 @@ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: raise ClientConnectorError(req.connection_key, exc) from exc last_exc: Optional[Exception] = None - - for hinfo in hosts: - host = hinfo["host"] - port = hinfo["port"] - + addr_infos = self._convert_hosts_to_addr_infos(hosts) + while addr_infos: # Strip trailing dots, certificates contain FQDN without dots. # See https://github.com/aio-libs/aiohttp/issues/3636 server_hostname = ( - (req.server_hostname or hinfo["hostname"]).rstrip(".") - if sslcontext - else None + (req.server_hostname or host).rstrip(".") if sslcontext else None ) try: transp, proto = await self._wrap_create_connection( self._factory, - host, - port, timeout=timeout, ssl=sslcontext, - family=hinfo["family"], - proto=hinfo["proto"], - flags=hinfo["flags"], + addr_infos=addr_infos, server_hostname=server_hostname, - local_addr=self._local_addr, req=req, client_error=client_error, ) except ClientConnectorError as exc: last_exc = exc + aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) continue if req.is_ssl() and fingerprint: @@ -1227,6 +1279,10 @@ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: if not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transp) last_exc = exc + # Remove the bad peer from the list of addr_infos + sock: socket.socket = transp.get_extra_info("socket") + bad_peer = sock.getpeername() + aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer) continue return transp, proto @@ -1345,7 +1401,7 @@ async def _create_proxy_connection( if not runtime_has_start_tls: # HTTP proxy with support for upgrade to HTTPS sslcontext = self._get_ssl_context(req) - return await self._wrap_create_connection( + return await self._wrap_existing_connection( self._factory, timeout=timeout, ssl=sslcontext, diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 57e96f2a070..93b3459ba7c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1071,7 +1071,8 @@ is controlled by *force_close* constructor's parameter). family=0, ssl_context=None, local_addr=None, \ resolver=None, keepalive_timeout=sentinel, \ force_close=False, limit=100, limit_per_host=0, \ - enable_cleanup_closed=False, loop=None) + enable_cleanup_closed=False, timeout_ceil_threshold=5, \ + happy_eyeballs_delay=0.25, interleave=None, loop=None) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. @@ -1174,6 +1175,24 @@ is controlled by *force_close* constructor's parameter). If this parameter is set to True, aiohttp additionally aborts underlining transport after 2 seconds. It is off by default. + :param float happy_eyeballs_delay: The amount of time in seconds to wait for a + connection attempt to complete, before starting the next attempt in parallel. + This is the “Connection Attempt Delay” as defined in RFC 8305. To disable + Happy Eyeballs, set this to ``None``. The default value recommended by the + RFC is 0.25 (250 milliseconds). + + .. versionadded:: 3.10 + + :param int interleave: controls address reordering when a host name resolves + to multiple IP addresses. If ``0`` or unspecified, no reordering is done, and + addresses are tried in the order returned by the resolver. If a positive + integer is specified, the addresses are interleaved by address family, and + the given integer is interpreted as “First Address Family Count” as defined + in RFC 8305. The default is ``0`` if happy_eyeballs_delay is not specified, and + ``1`` if it is. + + .. versionadded:: 3.10 + .. attribute:: family *TCP* socket family e.g. :data:`socket.AF_INET` or diff --git a/requirements/base.txt b/requirements/base.txt index 4640010dca2..99f1b5ab9d7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,6 +6,8 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.0 + # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c177e50bbd9..b6d150fa076 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -6,6 +6,8 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.0 + # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in aioredis==2.0.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 5939bcd8fef..f88c11d3033 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -6,6 +6,8 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.0 + # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in aioredis==2.0.1 diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index b2df16f1680..70bd75bd99d 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,6 +1,7 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` aiodns; sys_platform=="linux" or sys_platform=="darwin" +aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" attrs >= 17.3.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 2d4df7df38c..6c1e407eec3 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,6 +6,8 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.0 + # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" diff --git a/requirements/test.txt b/requirements/test.txt index f08c7fd1788..72fb6a40e56 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,6 +6,8 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in +aiohappyeyeballs==2.3.0 + # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in annotated-types==0.5.0 diff --git a/setup.cfg b/setup.cfg index 331c80e154a..71dc26c9789 100644 --- a/setup.cfg +++ b/setup.cfg @@ -47,6 +47,7 @@ zip_safe = False include_package_data = True install_requires = + aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" attrs >= 17.3.0 diff --git a/tests/conftest.py b/tests/conftest.py index 44e5fb7285c..fcdb482a59f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,7 @@ from hashlib import md5, sha256 from pathlib import Path from tempfile import TemporaryDirectory +from unittest import mock from uuid import uuid4 import pytest @@ -197,3 +198,13 @@ def netrc_contents( monkeypatch.setenv("NETRC", str(netrc_file_path)) return netrc_file_path + + +@pytest.fixture +def start_connection(): + with mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) as start_connection_mock: + yield start_connection_mock diff --git a/tests/test_connector.py b/tests/test_connector.py index f27d4131049..1faec002487 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,9 +10,11 @@ import uuid from collections import deque from contextlib import closing +from typing import Any, List, Optional from unittest import mock import pytest +from aiohappyeyeballs import AddrInfoType from yarl import URL import aiohttp @@ -539,7 +541,9 @@ async def test__drop_acquire_per_host3(loop) -> None: assert conn._acquired_per_host[123] == {789} -async def test_tcp_connector_certificate_error(loop) -> None: +async def test_tcp_connector_certificate_error( + loop: Any, start_connection: mock.AsyncMock +) -> None: req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) async def certificate_error(*args, **kwargs): @@ -556,8 +560,10 @@ async def certificate_error(*args, **kwargs): assert isinstance(ctx.value, aiohttp.ClientSSLError) -async def test_tcp_connector_server_hostname_default(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test_tcp_connector_server_hostname_default( + loop: Any, start_connection: mock.AsyncMock +) -> None: + conn = aiohttp.TCPConnector() with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -570,8 +576,10 @@ async def test_tcp_connector_server_hostname_default(loop) -> None: assert create_connection.call_args.kwargs["server_hostname"] == "127.0.0.1" -async def test_tcp_connector_server_hostname_override(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test_tcp_connector_server_hostname_override( + loop: Any, start_connection: mock.AsyncMock +) -> None: + conn = aiohttp.TCPConnector() with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -595,6 +603,7 @@ async def test_tcp_connector_multiple_hosts_errors(loop) -> None: ip4 = "192.168.1.4" ip5 = "192.168.1.5" ips = [ip1, ip2, ip3, ip4, ip5] + addrs_tried = [] ips_tried = [] fingerprint = hashlib.sha256(b"foo").digest() @@ -624,11 +633,24 @@ async def _resolve_host(host, port, traces=None): os_error = certificate_error = ssl_error = fingerprint_error = False connected = False + async def start_connection(*args, **kwargs): + addr_infos: List[AddrInfoType] = kwargs["addr_infos"] + + first_addr_info = addr_infos[0] + first_addr_info_addr = first_addr_info[-1] + addrs_tried.append(first_addr_info_addr) + + mock_socket = mock.create_autospec(socket.socket, spec_set=True, instance=True) + mock_socket.getpeername.return_value = first_addr_info_addr + return mock_socket + async def create_connection(*args, **kwargs): nonlocal os_error, certificate_error, ssl_error, fingerprint_error nonlocal connected - ip = args[1] + sock = kwargs["sock"] + addr_info = sock.getpeername() + ip = addr_info[0] ips_tried.append(ip) @@ -645,6 +667,12 @@ async def create_connection(*args, **kwargs): raise ssl.SSLError if ip == ip4: + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + fingerprint_error = True tr, pr = mock.Mock(), mock.Mock() @@ -660,12 +688,21 @@ def get_extra_info(param): if param == "peername": return ("192.168.1.5", 12345) + if param == "socket": + return sock + assert False, param tr.get_extra_info = get_extra_info return tr, pr if ip == ip5: + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + connected = True tr, pr = mock.Mock(), mock.Mock() @@ -687,8 +724,13 @@ def get_extra_info(param): conn._loop.create_connection = create_connection - established_connection = await conn.connect(req, [], ClientTimeout()) - assert ips == ips_tried + with mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert ips_tried == ips + assert addrs_tried == [(ip, 443) for ip in ips] assert os_error assert certificate_error @@ -699,8 +741,214 @@ def get_extra_info(param): established_connection.close() -async def test_tcp_connector_resolve_host(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True) +@pytest.mark.parametrize( + ("happy_eyeballs_delay"), + [0.1, 0.25, None], +) +async def test_tcp_connector_happy_eyeballs( + loop: Any, happy_eyeballs_delay: Optional[float] +) -> None: + conn = aiohttp.TCPConnector(happy_eyeballs_delay=happy_eyeballs_delay) + + ip1 = "dead::beef::" + ip2 = "192.168.1.1" + ips = [ip1, ip2] + addrs_tried = [] + + req = ClientRequest( + "GET", + URL("https://mocked.host"), + loop=loop, + ) + + async def _resolve_host(host, port, traces=None): + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + conn._resolve_host = _resolve_host + + os_error = False + connected = False + + async def sock_connect(*args, **kwargs): + addr = args[1] + nonlocal os_error + + addrs_tried.append(addr) + + if addr[0] == ip1: + os_error = True + raise OSError + + async def create_connection(*args, **kwargs): + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + + nonlocal connected + connected = True + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + conn._loop.sock_connect = sock_connect + conn._loop.create_connection = create_connection + + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert addrs_tried == [(ip1, 443, 0, 0), (ip2, 443)] + + assert os_error + assert connected + + established_connection.close() + + +async def test_tcp_connector_interleave(loop: Any) -> None: + conn = aiohttp.TCPConnector(interleave=2) + + ip1 = "192.168.1.1" + ip2 = "192.168.1.2" + ip3 = "dead::beef::" + ip4 = "aaaa::beef::" + ip5 = "192.168.1.5" + ips = [ip1, ip2, ip3, ip4, ip5] + success_ips = [] + interleave = None + + req = ClientRequest( + "GET", + URL("https://mocked.host"), + loop=loop, + ) + + async def _resolve_host(host, port, traces=None): + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + conn._resolve_host = _resolve_host + + async def start_connection(*args, **kwargs): + nonlocal interleave + addr_infos: List[AddrInfoType] = kwargs["addr_infos"] + interleave = kwargs["interleave"] + # Mock the 4th host connecting successfully + fourth_addr_info = addr_infos[3] + fourth_addr_info_addr = fourth_addr_info[-1] + mock_socket = mock.create_autospec(socket.socket, spec_set=True, instance=True) + mock_socket.getpeername.return_value = fourth_addr_info_addr + return mock_socket + + async def create_connection(*args, **kwargs): + sock = kwargs["sock"] + addr_info = sock.getpeername() + ip = addr_info[0] + + success_ips.append(ip) + + sock: socket.socket = kwargs["sock"] + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + conn._loop.create_connection = create_connection + + with mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert success_ips == [ip4] + assert interleave == 2 + established_connection.close() + + +async def test_tcp_connector_family_is_respected(loop: Any) -> None: + conn = aiohttp.TCPConnector(family=socket.AF_INET) + + ip1 = "dead::beef::" + ip2 = "192.168.1.1" + ips = [ip1, ip2] + addrs_tried = [] + + req = ClientRequest( + "GET", + URL("https://mocked.host"), + loop=loop, + ) + + async def _resolve_host(host, port, traces=None): + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + conn._resolve_host = _resolve_host + connected = False + + async def sock_connect(*args, **kwargs): + addr = args[1] + addrs_tried.append(addr) + + async def create_connection(*args, **kwargs): + sock: socket.socket = kwargs["sock"] + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + + nonlocal connected + connected = True + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + conn._loop.sock_connect = sock_connect + conn._loop.create_connection = create_connection + + established_connection = await conn.connect(req, [], ClientTimeout()) + + # We should only try the IPv4 address since we specified + # the family to be AF_INET + assert addrs_tried == [(ip2, 443)] + + assert connected + + established_connection.close() + + +async def test_tcp_connector_resolve_host(loop: Any) -> None: + conn = aiohttp.TCPConnector(use_dns_cache=True) res = await conn._resolve_host("localhost", 8080) assert res diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 1ff53e3f899..2a8643f5047 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -4,6 +4,7 @@ import ssl import sys import unittest +from typing import Any from unittest import mock import pytest @@ -40,7 +41,12 @@ def tearDown(self): gc.collect() @mock.patch("aiohttp.connector.ClientRequest") - def test_connect(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_connect(self, start_connection: Any, ClientRequestMock: Any) -> None: req = ClientRequest( "GET", URL("http://www.python.org"), @@ -54,7 +60,18 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro([mock.MagicMock()]) + connector._resolve_host = make_mocked_coro( + [ + { + "hostname": "hostname", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": 0, + } + ] + ) proto = mock.Mock( **{ @@ -81,7 +98,12 @@ async def make_conn(): conn.close() @mock.patch("aiohttp.connector.ClientRequest") - def test_proxy_headers(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_headers(self, start_connection: Any, ClientRequestMock: Any) -> None: req = ClientRequest( "GET", URL("http://www.python.org"), @@ -96,7 +118,18 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro([mock.MagicMock()]) + connector._resolve_host = make_mocked_coro( + [ + { + "hostname": "hostname", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": 0, + } + ] + ) proto = mock.Mock( **{ @@ -122,7 +155,12 @@ async def make_conn(): conn.close() - def test_proxy_auth(self) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_auth(self, start_connection: Any) -> None: with self.assertRaises(ValueError) as ctx: ClientRequest( "GET", @@ -136,11 +174,16 @@ def test_proxy_auth(self) -> None: "proxy_auth must be None or BasicAuth() tuple", ) - def test_proxy_dns_error(self) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_dns_error(self, start_connection: Any) -> None: async def make_conn(): return aiohttp.TCPConnector() - connector = self.loop.run_until_complete(make_conn()) + connector: aiohttp.TCPConnector = self.loop.run_until_complete(make_conn()) connector._resolve_host = make_mocked_coro( raise_exception=OSError("dont take it serious") ) @@ -159,7 +202,12 @@ async def make_conn(): self.assertEqual(req.url.path, "/") self.assertEqual(dict(req.headers), expected_headers) - def test_proxy_connection_error(self) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_connection_error(self, start_connection: Any) -> None: async def make_conn(): return aiohttp.TCPConnector() @@ -192,7 +240,14 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_proxy_server_hostname_default(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_server_hostname_default( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -252,7 +307,14 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_proxy_server_hostname_override(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_proxy_server_hostname_override( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), @@ -316,7 +378,12 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect(self, start_connection: Any, ClientRequestMock: Any) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -376,7 +443,14 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_certificate_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_certificate_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -430,7 +504,14 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_ssl_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_ssl_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -486,7 +567,14 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_http_proxy_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_http_proxy_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -545,7 +633,14 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_resp_start_error(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_resp_start_error( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -598,7 +693,12 @@ async def make_conn(): ) @mock.patch("aiohttp.connector.ClientRequest") - def test_request_port(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_request_port(self, start_connection: Any, ClientRequestMock: Any) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -656,7 +756,14 @@ def test_proxy_auth_property_default(self) -> None: self.assertIsNone(req.proxy_auth) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_connect_pass_ssl_context(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect_pass_ssl_context( + self, start_connection: Any, ClientRequestMock: Any + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) @@ -724,7 +831,12 @@ async def make_conn(): self.loop.run_until_complete(req.close()) @mock.patch("aiohttp.connector.ClientRequest") - def test_https_auth(self, ClientRequestMock) -> None: + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_auth(self, start_connection: Any, ClientRequestMock: Any) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), From 6114b08c60c682b28e21e84b1d4a6246243bfe15 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 03:19:44 +0000 Subject: [PATCH 0052/1511] Bump attrs from 23.1.0 to 23.2.0 (#8002) Bumps [attrs](https://github.com/sponsors/hynek) from 23.1.0 to 23.2.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=23.1.0&new-version=23.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 99f1b5ab9d7..0a491124e81 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b6d150fa076..fda286b5272 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -22,7 +22,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in babel==2.9.1 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index f88c11d3033..80620d85403 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -22,7 +22,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in babel==2.12.1 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 6c1e407eec3..6020bcb2767 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 72fb6a40e56..549eb0473d9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.5.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.1.0 +attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 7aa951b747b9b5e4c0cefbae05430f60902051a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 11:25:05 +0000 Subject: [PATCH 0053/1511] Bump aiohappyeyeballs from 2.3.0 to 2.3.1 (#8007) Bumps [aiohappyeyeballs](https://github.com/bdraco/aiohappyeyeballs) from 2.3.0 to 2.3.1.
Release notes

Sourced from aiohappyeyeballs's releases.

v2.3.1 (2023-12-14)

Fix

  • fix: remove test import from tests (#31) (c529b15)
Changelog

Sourced from aiohappyeyeballs's changelog.

v2.3.1 (2023-12-14)

Fix

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.0&new-version=2.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 0a491124e81..7cfea4bd0ad 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.0 +aiohappyeyeballs==2.3.1 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fda286b5272..e540ac2a0a4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.0 +aiohappyeyeballs==2.3.1 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 80620d85403..57558be22b1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.0 +aiohappyeyeballs==2.3.1 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 6020bcb2767..36d2b5e6a4a 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.0 +aiohappyeyeballs==2.3.1 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 549eb0473d9..40991c884c9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.0 +aiohappyeyeballs==2.3.1 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From e643510f8531724aad12b92cde1eac0c91ddf427 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 11:10:20 +0000 Subject: [PATCH 0054/1511] Bump aiohappyeyeballs from 2.3.1 to 2.3.2 (#8009) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.3.1 to 2.3.2.
Release notes

Sourced from aiohappyeyeballs's releases.

v2.3.2 (2024-01-06)

Fix

  • fix: update urls for the new home for this library (#43) (c6d4358)
Changelog

Sourced from aiohappyeyeballs's changelog.

v2.3.2 (2024-01-06)

Fix

  • Update urls for the new home for this library (#43) (c6d4358)
Commits
  • f5afb7f 2.3.2
  • c6d4358 fix: update urls for the new home for this library (#43)
  • 9d70884 chore: fix workflow file name for readme badges (#42)
  • b9167b9 chore: adjust syntax for check_if_should_release workflow (#40)
  • dca4295 chore: avoid release workflow on chore push events as well (#39)
  • e254c16 chore: do not run release workflow on chore prs (#38)
  • c422737 chore: adjust workflow to accomodate env pypi settings change (#37)
  • affbe01 chore: update urls to point to aio-libs (#35)
  • 2938678 chore: remove FUNDING.yaml (#36)
  • e1d1747 chore: remove workflows that need GH_PAT (#34)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.1&new-version=2.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7cfea4bd0ad..33495ca2ba3 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.1 +aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e540ac2a0a4..ec6837ad545 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.1 +aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 57558be22b1..048eac2728a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.1 +aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 36d2b5e6a4a..2263f16bcfa 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.1 +aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 40991c884c9..15542e77ca1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.1 +aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 8de41885d3bf5e296b77f574163bc4e07ae32287 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 10 Jan 2024 14:56:23 -1000 Subject: [PATCH 0055/1511] [PR #8012/5f699bbb backport][3.10] Fix FileResponse doing blocking I/O in the event loop (#8016) Co-authored-by: J. Nick Koston --- CHANGES/8012.bugfix | 1 + aiohttp/web_fileresponse.py | 28 ++++++++++++++++++++-------- tests/test_web_sendfile.py | 8 ++++---- 3 files changed, 25 insertions(+), 12 deletions(-) create mode 100644 CHANGES/8012.bugfix diff --git a/CHANGES/8012.bugfix b/CHANGES/8012.bugfix new file mode 100644 index 00000000000..f5187075f3f --- /dev/null +++ b/CHANGES/8012.bugfix @@ -0,0 +1 @@ +Fix `web.FileResponse` doing blocking I/O in the event loop diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index eb7a6a31d39..c3b3814974e 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -124,19 +124,31 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + def _get_file_path_stat_and_gzip( + self, check_for_gzipped_file: bool + ) -> Tuple[pathlib.Path, os.stat_result, bool]: + """Return the file path, stat result, and gzip status. + + This method should be called from a thread executor + since it calls os.stat which may block. + """ filepath = self._path - - gzip = False - if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""): + if check_for_gzipped_file: gzip_path = filepath.with_name(filepath.name + ".gz") + try: + return gzip_path, gzip_path.stat(), True + except OSError: + # Fall through and try the non-gzipped file + pass - if gzip_path.is_file(): - filepath = gzip_path - gzip = True + return filepath, filepath.stat(), False + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() - st: os.stat_result = await loop.run_in_executor(None, filepath.stat) + check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") + filepath, st, gzip = await loop.run_in_executor( + None, self._get_file_path_stat_and_gzip, check_for_gzipped_file + ) etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 0258def090f..2817e085a6f 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -12,7 +12,6 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = True gz_filepath.stat.return_value.st_size = 1024 gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 @@ -34,7 +33,8 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = True + gz_filepath.stat.return_value.st_size = 1024 + gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -56,7 +56,7 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = False + gz_filepath.stat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -80,7 +80,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = False + gz_filepath.stat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" From d9d0b95d9db6a10f10743b189bfd8312552fc134 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 10 Jan 2024 15:51:49 -1000 Subject: [PATCH 0056/1511] Fix double compress when compression enabled and compressed file exists (#8014) (#8018) (cherry picked from commit 92655a5) --- CHANGES/8014.bugfix | 1 + aiohttp/web_fileresponse.py | 4 ++++ tests/test_web_sendfile_functional.py | 30 ++++++++++++++++++++++++++- 3 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8014.bugfix diff --git a/CHANGES/8014.bugfix b/CHANGES/8014.bugfix new file mode 100644 index 00000000000..681bb5966ae --- /dev/null +++ b/CHANGES/8014.bugfix @@ -0,0 +1 @@ +Fix double compress when compression enabled and compressed file exists diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index c3b3814974e..6496ffaf317 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -267,6 +267,10 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.headers[hdrs.CONTENT_ENCODING] = encoding if gzip: self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + # Disable compression if we are already sending + # a compressed file since we don't want to double + # compress. + self._compression = False self.etag = etag_value # type: ignore[assignment] self.last_modified = st.st_mtime # type: ignore[assignment] diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 3f4f13354ec..31f22892f66 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -250,7 +250,35 @@ async def handler(request): await client.close() -async def test_static_file_with_content_encoding(aiohttp_client, sender) -> None: +async def test_static_file_with_gziped_counter_part_enable_compression( + aiohttp_client: Any, sender: Any +): + """Test that enable_compression does not double compress when a .gz file is also present.""" + filepath = pathlib.Path(__file__).parent / "hello.txt" + + async def handler(request): + resp = sender(filepath) + resp.enable_compression() + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + resp = await client.get("/") + assert resp.status == 200 + body = await resp.read() + assert body == b"hello aiohttp\n" + assert resp.headers["Content-Type"] == "text/plain" + assert resp.headers.get("Content-Encoding") == "gzip" + resp.close() + await resp.release() + await client.close() + + +async def test_static_file_with_content_encoding( + aiohttp_client: Any, sender: Any +) -> None: filepath = pathlib.Path(__file__).parent / "hello.txt.gz" async def handler(request): From ed98a5eb2db664ec010ce2cbe75d1b59270503de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 10:34:12 +0000 Subject: [PATCH 0057/1511] Bump cython from 3.0.7 to 3.0.8 (#8020) Bumps [cython](https://github.com/cython/cython) from 3.0.7 to 3.0.8.
Changelog

Sourced from cython's changelog.

3.0.8 (2024-01-10)

Bugs fixed

  • Using const together with defined fused types could fail to compile. (Github issue :issue:5230)

  • A "use after free" bug was fixed in parallel sections. (Github issue :issue:5922)

  • Several types were not available as cython.* types in pure Python code.

  • The generated code is now correct C89 again, removing some C++ style // comments and C99-style declaration-after-code code ordering. This is still relevant for some ols C compilers, specifically ones that match old Python 2.7 installations.

Commits
  • a1b79a6 Prepare release of 3.0.8.
  • b9bfa7f Fix parsing of ptrdiff_t in PyrexTypes and add another "all types in Shadow.p...
  • f974ec1 Update changelog.
  • ffe6fa7 Avoid C99-isms.
  • 356495b Avoid C99-ism.
  • b85be7e Avoid C99-ism.
  • 30a6534 Fix some C99-isms in 3.0.x branch.
  • 9866ce4 Avoid C99-ism.
  • 6990d6e Use Py3.6 instead of Py3.7/8/9 for the C89 build since CPython switched to C9...
  • d3b92b0 Use Py3.7 instead of Py3.9 for the C89 build since CPython switched to C99 at...
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.7&new-version=3.0.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ec6837ad545..4d955baf48e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.7 +cython==3.0.8 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 6cb581d1c5d..f6b3175f34a 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.7 +cython==3.0.8 # via -r requirements/cython.in multidict==6.0.4 # via -r requirements/multidict.in From af647c3cfc86920aa8303e1357a5dc21b88b7f9f Mon Sep 17 00:00:00 2001 From: igorvoltaic Date: Fri, 12 Jan 2024 22:41:25 +0300 Subject: [PATCH 0058/1511] Backport to 3.10: Add runtime type check for `ClientSession` `timeout` param (#8022) (#8026) --- CHANGES/8021.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 9 +++++++-- tests/test_client_session.py | 13 +++++++------ 4 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8021.bugfix diff --git a/CHANGES/8021.bugfix b/CHANGES/8021.bugfix new file mode 100644 index 00000000000..f43843a587f --- /dev/null +++ b/CHANGES/8021.bugfix @@ -0,0 +1 @@ +Add runtime type check for ``ClientSession`` ``timeout`` parameter. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 931d5c5b9aa..e94381dcf28 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -143,6 +143,7 @@ Hugo Hromic Hugo van Kemenade Hynek Schlawack Igor Alexandrov +Igor Bolshakov Igor Davydenko Igor Mozharovsky Igor Pavlov diff --git a/aiohttp/client.py b/aiohttp/client.py index 2750d5e2e86..f4323a9935b 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -276,7 +276,7 @@ def __init__( self._default_auth = auth self._version = version self._json_serialize = json_serialize - if timeout is sentinel: + if timeout is sentinel or timeout is None: self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: warnings.warn( @@ -293,7 +293,12 @@ def __init__( stacklevel=2, ) else: - self._timeout = timeout # type: ignore[assignment] + if not isinstance(timeout, ClientTimeout): + raise ValueError( + f"timeout parameter cannot be of {type(timeout)} type, " + "please use 'timeout=ClientTimeout(...)'", + ) + self._timeout = timeout if read_timeout is not sentinel: raise ValueError( "read_timeout and timeout parameters " diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 2823fc46244..a0654ed8ccd 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -814,12 +814,6 @@ async def test_client_session_timeout_default_args(loop) -> None: await session1.close() -async def test_client_session_timeout_argument() -> None: - session = ClientSession(timeout=500) - assert session.timeout == 500 - await session.close() - - async def test_client_session_timeout_zero() -> None: timeout = client.ClientTimeout(total=10, connect=0, sock_connect=0, sock_read=0) try: @@ -829,6 +823,13 @@ async def test_client_session_timeout_zero() -> None: pytest.fail("0 should disable timeout.") +async def test_client_session_timeout_bad_argument() -> None: + with pytest.raises(ValueError): + ClientSession(timeout="test_bad_argumnet") + with pytest.raises(ValueError): + ClientSession(timeout=100) + + async def test_requote_redirect_url_default() -> None: session = ClientSession() assert session.requote_redirect_url From c465e850f67f95150d474c3313082b2b47abd490 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 19 Jan 2024 14:53:54 -1000 Subject: [PATCH 0059/1511] [PR #8033/5424c534 backport][3.10] Small cleanups to WebSocketWriter (#8037) --- aiohttp/http_websocket.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index f395a27614a..b63453f99e5 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -605,7 +605,7 @@ def __init__( *, use_mask: bool = False, limit: int = DEFAULT_LIMIT, - random: Any = random.Random(), + random: random.Random = random.Random(), compress: int = 0, notakeover: bool = False, ) -> None: @@ -668,20 +668,20 @@ async def _send_frame( else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: - mask = self.randrange(0, 0xFFFFFFFF) - mask = mask.to_bytes(4, "big") + mask_int = self.randrange(0, 0xFFFFFFFF) + mask = mask_int.to_bytes(4, "big") message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) - self._output_size += len(header) + len(mask) + len(message) + self._output_size += len(header) + len(mask) + msg_length else: - if len(message) > MSG_SIZE: + if msg_length > MSG_SIZE: self._write(header) self._write(message) else: self._write(header + message) - self._output_size += len(header) + len(message) + self._output_size += len(header) + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. From 6e3e53c67b7c875cf5f1baf362a7c65138803d98 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 20 Jan 2024 14:45:44 +0000 Subject: [PATCH 0060/1511] Retry connection (#7363) (#8038) Fixes #7297 (cherry picked from commit be9a3cc9b3e617cb146093df6e05a73258aedb39) --- CHANGES/7297.feature | 1 + aiohttp/client.py | 10 ++++ tests/test_client_functional.py | 86 ++++++++++++++++++++++++++++----- 3 files changed, 85 insertions(+), 12 deletions(-) create mode 100644 CHANGES/7297.feature diff --git a/CHANGES/7297.feature b/CHANGES/7297.feature new file mode 100644 index 00000000000..91d769a4b32 --- /dev/null +++ b/CHANGES/7297.feature @@ -0,0 +1 @@ +Added a feature to retry closed connections automatically for idempotent methods. -- by :user:`Dreamsorcerer` diff --git a/aiohttp/client.py b/aiohttp/client.py index f4323a9935b..d08211bd00e 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -167,6 +167,9 @@ class ClientTimeout: # 5 Minute default read timeout DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) +# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 +IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) + _RetType = TypeVar("_RetType") _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -507,6 +510,8 @@ async def _request( timer = tm.timer() try: with timer: + # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests + retry_persistent_connection = method in IDEMPOTENT_METHODS while True: url, auth_from_url = strip_auth_from_url(url) if auth and auth_from_url: @@ -614,6 +619,11 @@ async def _request( except BaseException: conn.close() raise + except (ClientOSError, ServerDisconnectedError): + if retry_persistent_connection: + retry_persistent_connection = False + continue + raise except ClientError: raise except OSError as exc: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 585085127db..654788afa72 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -8,6 +8,8 @@ import pathlib import socket import ssl +import sys +import time from typing import Any, AsyncIterator from unittest import mock @@ -214,6 +216,67 @@ async def handler(request): assert 0 == len(client._session.connector._conns) +async def test_keepalive_timeout_async_sleep() -> None: + async def handler(request): + body = await request.read() + assert b"" == body + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_route("GET", "/", handler) + + runner = web.AppRunner(app, tcp_keepalive=True, keepalive_timeout=0.001) + await runner.setup() + + port = unused_port() + site = web.TCPSite(runner, host="localhost", port=port) + await site.start() + + try: + async with aiohttp.client.ClientSession() as sess: + resp1 = await sess.get(f"http://localhost:{port}/") + await resp1.read() + # wait for server keepalive_timeout + await asyncio.sleep(0.01) + resp2 = await sess.get(f"http://localhost:{port}/") + await resp2.read() + finally: + await asyncio.gather(runner.shutdown(), site.stop()) + + +@pytest.mark.skipif( + sys.version_info[:2] == (3, 11), + reason="https://github.com/pytest-dev/pytest/issues/10763", +) +async def test_keepalive_timeout_sync_sleep() -> None: + async def handler(request): + body = await request.read() + assert b"" == body + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_route("GET", "/", handler) + + runner = web.AppRunner(app, tcp_keepalive=True, keepalive_timeout=0.001) + await runner.setup() + + port = unused_port() + site = web.TCPSite(runner, host="localhost", port=port) + await site.start() + + try: + async with aiohttp.client.ClientSession() as sess: + resp1 = await sess.get(f"http://localhost:{port}/") + await resp1.read() + # wait for server keepalive_timeout + # time.sleep is a more challenging scenario than asyncio.sleep + time.sleep(0.01) + resp2 = await sess.get(f"http://localhost:{port}/") + await resp2.read() + finally: + await asyncio.gather(runner.shutdown(), site.stop()) + + async def test_release_early(aiohttp_client) -> None: async def handler(request): await request.read() @@ -3043,21 +3106,20 @@ def connection_lost(self, exc): addr = server.sockets[0].getsockname() - connector = aiohttp.TCPConnector(limit=1) - session = aiohttp.ClientSession(connector=connector) + async with aiohttp.TCPConnector(limit=1) as connector: + async with aiohttp.ClientSession(connector=connector) as session: + url = "http://{}:{}/".format(*addr) - url = "http://{}:{}/".format(*addr) + r = await session.request("GET", url) + await r.read() + assert 1 == len(connector._conns) + closed_conn = next(iter(connector._conns.values())) - r = await session.request("GET", url) - await r.read() - assert 1 == len(connector._conns) + await session.request("GET", url) + assert 1 == len(connector._conns) + new_conn = next(iter(connector._conns.values())) + assert closed_conn is not new_conn - with pytest.raises(aiohttp.ClientConnectionError): - await session.request("GET", url) - assert 0 == len(connector._conns) - - await session.close() - await connector.close() server.close() await server.wait_closed() From 641f4ae70acbe837285282ade1f76f91630369ce Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 15:42:04 +0000 Subject: [PATCH 0061/1511] [PR #8010/2670e7b0 backport][3.10] Fix the Towncrier philosophy link (#8040) **This is a backport of PR #8010 as merged into master (2670e7b08da179e74a643dca8d795fd23fcd282e).** --- CHANGES/8010.doc | 2 ++ CHANGES/README.rst | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8010.doc diff --git a/CHANGES/8010.doc b/CHANGES/8010.doc new file mode 100644 index 00000000000..db1b0aa3225 --- /dev/null +++ b/CHANGES/8010.doc @@ -0,0 +1,2 @@ +On the `CHANGES/README.rst `_ page, +a link to the ``Towncrier philosophy`` has been fixed. diff --git a/CHANGES/README.rst b/CHANGES/README.rst index c6b5153913a..9f619296351 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -92,4 +92,4 @@ File :file:`CHANGES/4594.feature.rst`: (``tool.towncrier.type``). .. _Towncrier philosophy: - https://towncrier.readthedocs.io/en/actual-freaking-docs/#philosophy + https://towncrier.readthedocs.io/en/stable/#philosophy From 4b91b530e851acec62c7e9db4cf5c086bf153340 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 20 Jan 2024 22:50:32 +0000 Subject: [PATCH 0062/1511] Tightening the runtime type check for ssl (#7698) (#8042) Currently, the valid types of ssl parameter are SSLContext, Literal[False], Fingerprint or None. If user sets ssl = False, we disable ssl certificate validation which makes total sense. But if user set ssl = True by mistake, instead of enabling ssl certificate validation or raising errors, we silently disable the validation too which is a little subtle but weird. In this PR, we added a check that if user sets ssl=True, we enable certificate validation by treating it as using Default SSL Context. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Sviatoslav Sydorenko Co-authored-by: Sam Bull Co-authored-by: J. Nick Koston Co-authored-by: Sam Bull (cherry picked from commit 9e14ea19b5a48bb26797babc32202605066cb5f5) --- CHANGES/7698.feature | 1 + aiohttp/client.py | 17 ++++++++++++----- aiohttp/client_exceptions.py | 6 +++--- aiohttp/client_reqrep.py | 21 ++++++++++----------- aiohttp/connector.py | 6 +++--- tests/test_client_exceptions.py | 10 +++++----- tests/test_client_fingerprint.py | 6 +++--- tests/test_client_request.py | 4 ++-- tests/test_connector.py | 16 ++++++++-------- tests/test_proxy.py | 4 ++-- 10 files changed, 49 insertions(+), 42 deletions(-) create mode 100644 CHANGES/7698.feature diff --git a/CHANGES/7698.feature b/CHANGES/7698.feature new file mode 100644 index 00000000000..e8c4b3fb452 --- /dev/null +++ b/CHANGES/7698.feature @@ -0,0 +1 @@ +Added support for passing `True` to `ssl` while deprecating `None`. -- by :user:`xiangyan99` diff --git a/aiohttp/client.py b/aiohttp/client.py index d08211bd00e..36dbf6a7119 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -22,7 +22,6 @@ Generic, Iterable, List, - Literal, Mapping, Optional, Set, @@ -415,7 +414,7 @@ async def _request( verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, - ssl: Optional[Union[SSLContext, Literal[False], Fingerprint]] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, trace_request_ctx: Optional[SimpleNamespace] = None, @@ -571,7 +570,7 @@ async def _request( proxy_auth=proxy_auth, timer=timer, session=self, - ssl=ssl, + ssl=ssl if ssl is not None else True, server_hostname=server_hostname, proxy_headers=proxy_headers, traces=traces, @@ -752,7 +751,7 @@ def ws_connect( headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, Literal[False], None, Fingerprint] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, @@ -804,7 +803,7 @@ async def _ws_connect( headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, Literal[False], None, Fingerprint] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, @@ -838,6 +837,14 @@ async def _ws_connect( extstr = ws_ext_gen(compress=compress) real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + # For the sake of backward compatibility, if user passes in None, convert it to True + if ssl is None: + warnings.warn( + "ssl=None is deprecated, please use ssl=True", + DeprecationWarning, + stacklevel=2, + ) + ssl = True ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) # send request diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index d70988f6ede..60bf058e887 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -182,12 +182,12 @@ def port(self) -> Optional[int]: return self._conn_key.port @property - def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]: + def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]: return self._conn_key.ssl def __str__(self) -> str: return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( - self, self.ssl if self.ssl is not None else "default", self.strerror + self, "default" if self.ssl is True else self.ssl, self.strerror ) # OSError.__reduce__ does too much black magick @@ -221,7 +221,7 @@ def path(self) -> str: def __str__(self) -> str: return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( - self, self.ssl if self.ssl is not None else "default", self.strerror + self, "default" if self.ssl is True else self.ssl, self.strerror ) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4ae0ecbcdfb..bb43ae9318d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -17,7 +17,6 @@ Dict, Iterable, List, - Literal, Mapping, Optional, Tuple, @@ -151,22 +150,22 @@ def check(self, transport: asyncio.Transport) -> None: if ssl is not None: SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) else: # pragma: no cover - SSL_ALLOWED_TYPES = type(None) + SSL_ALLOWED_TYPES = (bool, type(None)) def _merge_ssl_params( - ssl: Union["SSLContext", Literal[False], Fingerprint, None], + ssl: Union["SSLContext", bool, Fingerprint], verify_ssl: Optional[bool], ssl_context: Optional["SSLContext"], fingerprint: Optional[bytes], -) -> Union["SSLContext", Literal[False], Fingerprint, None]: +) -> Union["SSLContext", bool, Fingerprint]: if verify_ssl is not None and not verify_ssl: warnings.warn( "verify_ssl is deprecated, use ssl=False instead", DeprecationWarning, stacklevel=3, ) - if ssl is not None: + if ssl is not True: raise ValueError( "verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive" @@ -179,7 +178,7 @@ def _merge_ssl_params( DeprecationWarning, stacklevel=3, ) - if ssl is not None: + if ssl is not True: raise ValueError( "verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive" @@ -192,7 +191,7 @@ def _merge_ssl_params( DeprecationWarning, stacklevel=3, ) - if ssl is not None: + if ssl is not True: raise ValueError( "verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive" @@ -214,7 +213,7 @@ class ConnectionKey: host: str port: Optional[int] is_ssl: bool - ssl: Union[SSLContext, None, Literal[False], Fingerprint] + ssl: Union[SSLContext, bool, Fingerprint] proxy: Optional[URL] proxy_auth: Optional[BasicAuth] proxy_headers_hash: Optional[int] # hash(CIMultiDict) @@ -276,7 +275,7 @@ def __init__( proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, Literal[False], Fingerprint, None] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List["Trace"]] = None, trust_env: bool = False, @@ -315,7 +314,7 @@ def __init__( real_response_class = response_class self.response_class: Type[ClientResponse] = real_response_class self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl + self._ssl = ssl if ssl is not None else True self.server_hostname = server_hostname if loop.get_debug(): @@ -357,7 +356,7 @@ def is_ssl(self) -> bool: return self.url.scheme in ("https", "wss") @property - def ssl(self) -> Union["SSLContext", None, Literal[False], Fingerprint]: + def ssl(self) -> Union["SSLContext", bool, Fingerprint]: return self._ssl @property diff --git a/aiohttp/connector.py b/aiohttp/connector.py index baa3a7170f6..d0954355244 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -768,7 +768,7 @@ def __init__( ttl_dns_cache: Optional[int] = 10, family: int = 0, ssl_context: Optional[SSLContext] = None, - ssl: Union[None, Literal[False], Fingerprint, SSLContext] = None, + ssl: Union[bool, Fingerprint, SSLContext] = True, local_addr: Optional[Tuple[str, int]] = None, resolver: Optional[AbstractResolver] = None, keepalive_timeout: Union[None, float, object] = sentinel, @@ -965,13 +965,13 @@ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: sslcontext = req.ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext - if sslcontext is not None: + if sslcontext is not True: # not verified or fingerprinted return self._make_ssl_context(False) sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext - if sslcontext is not None: + if sslcontext is not True: # not verified or fingerprinted return self._make_ssl_context(False) return self._make_ssl_context(True) diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index 8f34e4cc73c..f70ba5d09a6 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -119,7 +119,7 @@ class TestClientConnectorError: host="example.com", port=8080, is_ssl=False, - ssl=None, + ssl=True, proxy=None, proxy_auth=None, proxy_headers_hash=None, @@ -136,7 +136,7 @@ def test_ctor(self) -> None: assert err.os_error.strerror == "No such file" assert err.host == "example.com" assert err.port == 8080 - assert err.ssl is None + assert err.ssl is True def test_pickle(self) -> None: err = client.ClientConnectorError( @@ -153,7 +153,7 @@ def test_pickle(self) -> None: assert err2.os_error.strerror == "No such file" assert err2.host == "example.com" assert err2.port == 8080 - assert err2.ssl is None + assert err2.ssl is True assert err2.foo == "bar" def test_repr(self) -> None: @@ -171,7 +171,7 @@ def test_str(self) -> None: os_error=OSError(errno.ENOENT, "No such file"), ) assert str(err) == ( - "Cannot connect to host example.com:8080 ssl:" "default [No such file]" + "Cannot connect to host example.com:8080 ssl:default [No such file]" ) @@ -180,7 +180,7 @@ class TestClientConnectorCertificateError: host="example.com", port=8080, is_ssl=False, - ssl=None, + ssl=True, proxy=None, proxy_auth=None, proxy_headers_hash=None, diff --git a/tests/test_client_fingerprint.py b/tests/test_client_fingerprint.py index b1ae3cae36e..68dd528e0a2 100644 --- a/tests/test_client_fingerprint.py +++ b/tests/test_client_fingerprint.py @@ -37,7 +37,7 @@ def test_fingerprint_check_no_ssl() -> None: def test__merge_ssl_params_verify_ssl() -> None: with pytest.warns(DeprecationWarning): - assert _merge_ssl_params(None, False, None, None) is False + assert _merge_ssl_params(True, False, None, None) is False def test__merge_ssl_params_verify_ssl_conflict() -> None: @@ -50,7 +50,7 @@ def test__merge_ssl_params_verify_ssl_conflict() -> None: def test__merge_ssl_params_ssl_context() -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) with pytest.warns(DeprecationWarning): - assert _merge_ssl_params(None, None, ctx, None) is ctx + assert _merge_ssl_params(True, None, ctx, None) is ctx def test__merge_ssl_params_ssl_context_conflict() -> None: @@ -64,7 +64,7 @@ def test__merge_ssl_params_ssl_context_conflict() -> None: def test__merge_ssl_params_fingerprint() -> None: digest = hashlib.sha256(b"123").digest() with pytest.warns(DeprecationWarning): - ret = _merge_ssl_params(None, None, None, digest) + ret = _merge_ssl_params(True, None, None, digest) assert ret.fingerprint == digest diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c8ce98d4034..6521b70ad55 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -166,7 +166,7 @@ def test_host_port_default_http(make_request) -> None: req = make_request("get", "http://python.org/") assert req.host == "python.org" assert req.port == 80 - assert not req.ssl + assert not req.is_ssl() def test_host_port_default_https(make_request) -> None: @@ -400,7 +400,7 @@ def test_ipv6_default_http_port(make_request) -> None: req = make_request("get", "http://[2001:db8::1]/") assert req.host == "2001:db8::1" assert req.port == 80 - assert not req.ssl + assert not req.is_ssl() def test_ipv6_default_https_port(make_request) -> None: diff --git a/tests/test_connector.py b/tests/test_connector.py index 1faec002487..84c03fc6fb5 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -30,19 +30,19 @@ @pytest.fixture() def key(): # Connection key - return ConnectionKey("localhost", 80, False, None, None, None, None) + return ConnectionKey("localhost", 80, False, True, None, None, None) @pytest.fixture def key2(): # Connection key - return ConnectionKey("localhost", 80, False, None, None, None, None) + return ConnectionKey("localhost", 80, False, True, None, None, None) @pytest.fixture def ssl_key(): # Connection key - return ConnectionKey("localhost", 80, True, None, None, None, None) + return ConnectionKey("localhost", 80, True, True, None, None, None) @pytest.fixture @@ -1467,9 +1467,9 @@ async def test_cleanup_closed_disabled(loop, mocker) -> None: assert not conn._cleanup_closed_transports -async def test_tcp_connector_ctor(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - assert conn._ssl is None +async def test_tcp_connector_ctor() -> None: + conn = aiohttp.TCPConnector() + assert conn._ssl is True assert conn.use_dns_cache assert conn.family == 0 @@ -1555,7 +1555,7 @@ async def test___get_ssl_context3(loop) -> None: conn = aiohttp.TCPConnector(loop=loop, ssl=ctx) req = mock.Mock() req.is_ssl.return_value = True - req.ssl = None + req.ssl = True assert conn._get_ssl_context(req) is ctx @@ -1581,7 +1581,7 @@ async def test___get_ssl_context6(loop) -> None: conn = aiohttp.TCPConnector(loop=loop) req = mock.Mock() req.is_ssl.return_value = True - req.ssl = None + req.ssl = True assert conn._get_ssl_context(req) is conn._make_ssl_context(True) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 2a8643f5047..f335e42c254 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -92,7 +92,7 @@ async def make_conn(): auth=None, headers={"Host": "www.python.org"}, loop=self.loop, - ssl=None, + ssl=True, ) conn.close() @@ -150,7 +150,7 @@ async def make_conn(): auth=None, headers={"Host": "www.python.org", "Foo": "Bar"}, loop=self.loop, - ssl=None, + ssl=True, ) conn.close() From f47fa392eb23db29f2076d24a780a5de437fb7c8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 01:36:01 +0000 Subject: [PATCH 0063/1511] [PR #7995/43a5bc50 backport][3.10] Fix examples of `fallback_charset_resolver` function in client_advanced documentation (#8045) **This is a backport of PR #7995 as merged into master (43a5bc5097be31a25037fbfdbe39e86138a29cbd).** Co-authored-by: OMOTO Tsukasa --- CHANGES/7995.doc | 1 + docs/client_advanced.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7995.doc diff --git a/CHANGES/7995.doc b/CHANGES/7995.doc new file mode 100644 index 00000000000..70e3dfa5469 --- /dev/null +++ b/CHANGES/7995.doc @@ -0,0 +1 @@ +Fix examples of `fallback_charset_resolver` function in client_advanced documentation. -- by :user:`henry0312` diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 037e50a9363..958e31dcc7c 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -674,7 +674,7 @@ example, this can be used with the ``chardetng_py`` library.:: def charset_resolver(resp: ClientResponse, body: bytes) -> str: tld = resp.url.host.rsplit(".", maxsplit=1)[-1] - return detect(body, allow_utf8=True, tld=tld) + return detect(body, allow_utf8=True, tld=tld.encode()) ClientSession(fallback_charset_resolver=charset_resolver) @@ -682,4 +682,4 @@ Or, if ``chardetng_py`` doesn't work for you, then ``charset-normalizer`` is ano from charset_normalizer import detect - ClientSession(fallback_charset_resolver=lamba r, b: detect(b)["encoding"] or "utf-8") + ClientSession(fallback_charset_resolver=lambda r, b: detect(b)["encoding"] or "utf-8") From a42ed204b47b26718f175ea9af14ce645ed02739 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 21 Jan 2024 01:51:36 +0000 Subject: [PATCH 0064/1511] Implement filter_cookies() with domain-matching and path-matching (#8046) (#7944) --------- Co-authored-by: Sam Bull Co-authored-by: J. Nick Koston (cherry picked from commit 54ceb6fd48ff4c4add2107a460376fc72c2b76e9) --------- Co-authored-by: xiangxli --- CHANGES/7583.feature | 14 +++++ CONTRIBUTORS.txt | 1 + aiohttp/cookiejar.py | 62 ++++++++++---------- tests/test_cookiejar.py | 126 +++++++++++++++++++++++++++++++--------- 4 files changed, 142 insertions(+), 61 deletions(-) create mode 100644 CHANGES/7583.feature diff --git a/CHANGES/7583.feature b/CHANGES/7583.feature new file mode 100644 index 00000000000..c05e64be780 --- /dev/null +++ b/CHANGES/7583.feature @@ -0,0 +1,14 @@ +Implement filter_cookies() with domain-matching and path-matching on the keys, instead of testing every single cookie. +This may break existing cookies that have been saved with `CookieJar.save()`. Cookies can be migrated with this script:: + + import pickle + with file_path.open("rb") as f: + cookies = pickle.load(f) + + morsels = [(name, m) for c in cookies.values() for name, m in c.items()] + cookies.clear() + for name, m in morsels: + cookies[(m["domain"], m["path"].rstrip("/"))][name] = m + + with file_path.open("wb") as f: + pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e94381dcf28..270900207e1 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -346,6 +346,7 @@ William Grzybowski William S. Wilson Ong wouter bolsterlee +Xiang Li Yang Zhou Yannick Koechlin Yannick Péroux diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index a348f112cb5..87f05a0de3c 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -2,6 +2,7 @@ import calendar import contextlib import datetime +import itertools import os # noqa import pathlib import pickle @@ -10,7 +11,7 @@ from collections import defaultdict from http.cookies import BaseCookie, Morsel, SimpleCookie from math import ceil -from typing import ( # noqa +from typing import ( DefaultDict, Dict, Iterable, @@ -211,6 +212,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No # Cut everything from the last slash to the end path = "/" + path[1 : path.rfind("/")] cookie["path"] = path + path = path.rstrip("/") max_age = cookie["max-age"] if max_age: @@ -256,26 +258,41 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": request_origin = request_url.origin() is_not_secure = request_origin not in self._treat_as_secure_origin + # Send shared cookie + for c in self._cookies[("", "")].values(): + filtered[c.key] = c.value + + if is_ip_address(hostname): + if not self._unsafe: + return filtered + domains: Iterable[str] = (hostname,) + else: + # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com") + domains = itertools.accumulate( + reversed(hostname.split(".")), lambda x, y: f"{y}.{x}" + ) + # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar") + paths = itertools.accumulate( + request_url.path.split("/"), lambda x, y: f"{x}/{y}" + ) + # Create every combination of (domain, path) pairs. + pairs = itertools.product(domains, paths) + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 - for cookie in sorted(self, key=lambda c: len(c["path"])): + cookies = itertools.chain.from_iterable( + self._cookies[p].values() for p in pairs + ) + path_len = len(request_url.path) + for cookie in cookies: name = cookie.key domain = cookie["domain"] - # Send shared cookies - if not domain: - filtered[name] = cookie.value - continue - - if not self._unsafe and is_ip_address(hostname): - continue - if (domain, name) in self._host_only_cookies: if domain != hostname: continue - elif not self._is_domain_match(domain, hostname): - continue - if not self._is_path_match(request_url.path, cookie["path"]): + # Skip edge case when the cookie has a trailing slash but request doesn't. + if len(cookie["path"]) > path_len: continue if is_not_secure and cookie["secure"]: @@ -305,25 +322,6 @@ def _is_domain_match(domain: str, hostname: str) -> bool: return not is_ip_address(hostname) - @staticmethod - def _is_path_match(req_path: str, cookie_path: str) -> bool: - """Implements path matching adhering to RFC 6265.""" - if not req_path.startswith("/"): - req_path = "/" - - if req_path == cookie_path: - return True - - if not req_path.startswith(cookie_path): - return False - - if cookie_path.endswith("/"): - return True - - non_matching = req_path[len(cookie_path) :] - - return non_matching.startswith("/") - @classmethod def _parse_date(cls, date_str: str) -> Optional[int]: """Implements date string parsing adhering to RFC 6265.""" diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index cffca3a4b59..70ab0a4864e 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -154,28 +154,6 @@ def test_domain_matching() -> None: assert not test_func("test.com", "127.0.0.1") -def test_path_matching() -> None: - test_func = CookieJar._is_path_match - - assert test_func("/", "") - assert test_func("", "/") - assert test_func("/file", "") - assert test_func("/folder/file", "") - assert test_func("/", "/") - assert test_func("/file", "/") - assert test_func("/file", "/file") - assert test_func("/folder/", "/folder/") - assert test_func("/folder/", "/") - assert test_func("/folder/file", "/") - - assert not test_func("/", "/file") - assert not test_func("/", "/folder/") - assert not test_func("/file", "/folder/file") - assert not test_func("/folder/", "/folder/file") - assert not test_func("/different-file", "/file") - assert not test_func("/different-folder/", "/folder/") - - async def test_constructor(loop, cookies_to_send, cookies_to_receive) -> None: jar = CookieJar(loop=loop) jar.update_cookies(cookies_to_send) @@ -245,8 +223,98 @@ async def test_filter_cookie_with_unicode_domain(loop) -> None: assert len(jar.filter_cookies(URL("http://xn--9caa.com"))) == 1 -async def test_domain_filter_ip_cookie_send(loop) -> None: - jar = CookieJar(loop=loop) +@pytest.mark.parametrize( + ("url", "expected_cookies"), + ( + ( + "http://pathtest.com/one/two/", + ( + "no-path-cookie", + "path1-cookie", + "path2-cookie", + "shared-cookie", + "path3-cookie", + "path4-cookie", + ), + ), + ( + "http://pathtest.com/one/two", + ( + "no-path-cookie", + "path1-cookie", + "path2-cookie", + "shared-cookie", + "path3-cookie", + ), + ), + ( + "http://pathtest.com/one/two/three/", + ( + "no-path-cookie", + "path1-cookie", + "path2-cookie", + "shared-cookie", + "path3-cookie", + "path4-cookie", + ), + ), + ( + "http://test1.example.com/", + ( + "shared-cookie", + "domain-cookie", + "subdomain1-cookie", + "dotted-domain-cookie", + ), + ), + ( + "http://pathtest.com/", + ( + "shared-cookie", + "no-path-cookie", + "path1-cookie", + ), + ), + ), +) +async def test_filter_cookies_with_domain_path_lookup_multilevelpath( + loop, + url, + expected_cookies, +) -> None: + jar = CookieJar() + cookies = SimpleCookie( + "shared-cookie=first; " + "domain-cookie=second; Domain=example.com; " + "subdomain1-cookie=third; Domain=test1.example.com; " + "subdomain2-cookie=fourth; Domain=test2.example.com; " + "dotted-domain-cookie=fifth; Domain=.example.com; " + "different-domain-cookie=sixth; Domain=different.org; " + "secure-cookie=seventh; Domain=secure.com; Secure; " + "no-path-cookie=eighth; Domain=pathtest.com; " + "path1-cookie=ninth; Domain=pathtest.com; Path=/; " + "path2-cookie=tenth; Domain=pathtest.com; Path=/one; " + "path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; " + "path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; " + "expires-cookie=thirteenth; Domain=expirestest.com; Path=/;" + " Expires=Tue, 1 Jan 1980 12:00:00 GMT; " + "max-age-cookie=fourteenth; Domain=maxagetest.com; Path=/;" + " Max-Age=60; " + "invalid-max-age-cookie=fifteenth; Domain=invalid-values.com; " + " Max-Age=string; " + "invalid-expires-cookie=sixteenth; Domain=invalid-values.com; " + " Expires=string;" + ) + jar.update_cookies(cookies) + cookies = jar.filter_cookies(URL(url)) + + assert len(cookies) == len(expected_cookies) + for c in cookies: + assert c in expected_cookies + + +async def test_domain_filter_ip_cookie_send() -> None: + jar = CookieJar() cookies = SimpleCookie( "shared-cookie=first; " "domain-cookie=second; Domain=example.com; " @@ -488,11 +556,11 @@ def test_domain_filter_diff_host(self) -> None: def test_domain_filter_host_only(self) -> None: self.jar.update_cookies(self.cookies_to_receive, URL("http://example.com/")) + sub_cookie = SimpleCookie("subdomain=spam; Path=/;") + self.jar.update_cookies(sub_cookie, URL("http://foo.example.com/")) - cookies_sent = self.jar.filter_cookies(URL("http://example.com/")) - self.assertIn("unconstrained-cookie", set(cookies_sent.keys())) - - cookies_sent = self.jar.filter_cookies(URL("http://different.org/")) + cookies_sent = self.jar.filter_cookies(URL("http://foo.example.com/")) + self.assertIn("subdomain", set(cookies_sent.keys())) self.assertNotIn("unconstrained-cookie", set(cookies_sent.keys())) def test_secure_filter(self) -> None: @@ -827,7 +895,7 @@ async def test_pickle_format(cookies_to_send) -> None: with file_path.open("wb") as f: pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) """ - pickled = b"\x80\x05\x95\xc5\x07\x00\x00\x00\x00\x00\x00\x8c\x0bcollections\x94\x8c\x0bdefaultdict\x94\x93\x94\x8c\x0chttp.cookies\x94\x8c\x0cSimpleCookie\x94\x93\x94\x85\x94R\x94(\x8c\x00\x94\x8c\x01/\x94\x86\x94h\x05)\x81\x94\x8c\rshared-cookie\x94h\x03\x8c\x06Morsel\x94\x93\x94)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\t\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x08\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(\x8c\x03key\x94h\x0c\x8c\x05value\x94\x8c\x05first\x94\x8c\x0bcoded_value\x94h\x1cubs\x8c\x0bexample.com\x94h\t\x86\x94h\x05)\x81\x94(\x8c\rdomain-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h\x1eh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah!h\x1b\x8c\x06second\x94h\x1dh$ub\x8c\x14dotted-domain-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13\x8c\x0bexample.com\x94h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah%h\x1b\x8c\x05fifth\x94h\x1dh)ubu\x8c\x11test1.example.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x11subdomain1-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h*h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah-h\x1b\x8c\x05third\x94h\x1dh0ubs\x8c\x11test2.example.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x11subdomain2-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h1h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah4h\x1b\x8c\x06fourth\x94h\x1dh7ubs\x8c\rdifferent.org\x94h\t\x86\x94h\x05)\x81\x94\x8c\x17different-domain-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h8h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah;h\x1b\x8c\x05sixth\x94h\x1dh>ubs\x8c\nsecure.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\rsecure-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13h?h\x14h\x08h\x15\x88h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahBh\x1b\x8c\x07seventh\x94h\x1dhEubs\x8c\x0cpathtest.com\x94h\t\x86\x94h\x05)\x81\x94(\x8c\x0eno-path-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13hFh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahIh\x1b\x8c\x06eighth\x94h\x1dhLub\x8c\x0cpath1-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13\x8c\x0cpathtest.com\x94h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahMh\x1b\x8c\x05ninth\x94h\x1dhQubu\x8c\x0cpathtest.com\x94\x8c\x04/one\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath2-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11hSh\x12h\x08h\x13hRh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahVh\x1b\x8c\x05tenth\x94h\x1dhYubs\x8c\x0cpathtest.com\x94\x8c\x08/one/two\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath3-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h[h\x12h\x08h\x13hZh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah^h\x1b\x8c\x08eleventh\x94h\x1dhaubs\x8c\x0cpathtest.com\x94\x8c\t/one/two/\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath4-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11hch\x12h\x08h\x13hbh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahfh\x1b\x8c\x07twelfth\x94h\x1dhiubs\x8c\x0fexpirestest.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x0eexpires-cookie\x94h\x0e)\x81\x94(h\x10\x8c\x1cTue, 1 Jan 2999 12:00:00 GMT\x94h\x11h\th\x12h\x08h\x13hjh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahmh\x1b\x8c\nthirteenth\x94h\x1dhqubs\x8c\x0emaxagetest.com\x94h\t\x86\x94h\x05)\x81\x94\x8c\x0emax-age-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13hrh\x14\x8c\x0260\x94h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ahuh\x1b\x8c\nfourteenth\x94h\x1dhyubs\x8c\x12invalid-values.com\x94h\t\x86\x94h\x05)\x81\x94(\x8c\x16invalid-max-age-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13hzh\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah}h\x1b\x8c\tfifteenth\x94h\x1dh\x80ub\x8c\x16invalid-expires-cookie\x94h\x0e)\x81\x94(h\x10h\x08h\x11h\th\x12h\x08h\x13\x8c\x12invalid-values.com\x94h\x14h\x08h\x15h\x08h\x16h\x08h\x17h\x08h\x18h\x08u}\x94(h\x1ah\x81h\x1b\x8c\tsixteenth\x94h\x1dh\x85ubuu." + pickled = b"\x80\x04\x95\xc8\x0b\x00\x00\x00\x00\x00\x00\x8c\x0bcollections\x94\x8c\x0bdefaultdict\x94\x93\x94\x8c\x0chttp.cookies\x94\x8c\x0cSimpleCookie\x94\x93\x94\x85\x94R\x94(\x8c\x00\x94h\x08\x86\x94h\x05)\x81\x94\x8c\rshared-cookie\x94h\x03\x8c\x06Morsel\x94\x93\x94)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94\x8c\x01/\x94\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x08\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(\x8c\x03key\x94h\x0b\x8c\x05value\x94\x8c\x05first\x94\x8c\x0bcoded_value\x94h\x1cubs\x8c\x0bexample.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\rdomain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x1e\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah!h\x1b\x8c\x06second\x94h\x1dh-ub\x8c\x14dotted-domain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0bexample.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah.h\x1b\x8c\x05fifth\x94h\x1dh;ubu\x8c\x11test1.example.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x11subdomain1-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h<\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah?h\x1b\x8c\x05third\x94h\x1dhKubs\x8c\x11test2.example.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x11subdomain2-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94hL\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ahOh\x1b\x8c\x06fourth\x94h\x1dh[ubs\x8c\rdifferent.org\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x17different-domain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\\\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah_h\x1b\x8c\x05sixth\x94h\x1dhkubs\x8c\nsecure.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\rsecure-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94hl\x8c\x07max-age\x94h\x08\x8c\x06secure\x94\x88\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ahoh\x1b\x8c\x07seventh\x94h\x1dh{ubs\x8c\x0cpathtest.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\x0eno-path-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h|\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x7fh\x1b\x8c\x06eighth\x94h\x1dh\x8bub\x8c\x0cpath1-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0cpathtest.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x8ch\x1b\x8c\x05ninth\x94h\x1dh\x99ubu\x8c\x0cpathtest.com\x94\x8c\x04/one\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath2-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x9b\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x9a\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x9eh\x1b\x8c\x05tenth\x94h\x1dh\xaaubs\x8c\x0cpathtest.com\x94\x8c\x08/one/two\x94\x86\x94h\x05)\x81\x94(\x8c\x0cpath3-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\xac\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xab\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xafh\x1b\x8c\x08eleventh\x94h\x1dh\xbbub\x8c\x0cpath4-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94\x8c\t/one/two/\x94\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0cpathtest.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xbch\x1b\x8c\x07twelfth\x94h\x1dh\xcaubu\x8c\x0fexpirestest.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x0eexpires-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94\x8c\x1cTue, 1 Jan 2999 12:00:00 GMT\x94\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xcb\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xceh\x1b\x8c\nthirteenth\x94h\x1dh\xdbubs\x8c\x0emaxagetest.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x0emax-age-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xdc\x8c\x07max-age\x94\x8c\x0260\x94\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xdfh\x1b\x8c\nfourteenth\x94h\x1dh\xecubs\x8c\x12invalid-values.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\x16invalid-max-age-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xed\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xf0h\x1b\x8c\tfifteenth\x94h\x1dh\xfcub\x8c\x16invalid-expires-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x12invalid-values.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xfdh\x1b\x8c\tsixteenth\x94h\x1dj\n\x01\x00\x00ubuu." cookies = pickle.loads(pickled) cj = CookieJar() From 1a389a53b21305d05cbe9f4fa64b271b5c42015f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 20 Jan 2024 16:33:07 -1000 Subject: [PATCH 0065/1511] Fix duplicate cookie expiration calls in the CookieJar implementation (#7784) (#8047) Co-authored-by: Sam Bull --- CHANGES/7784.bugfix | 1 + aiohttp/cookiejar.py | 7 ++++++- tests/test_cookiejar.py | 23 ++++++++++++++++++++++- 3 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7784.bugfix diff --git a/CHANGES/7784.bugfix b/CHANGES/7784.bugfix new file mode 100644 index 00000000000..1f8ba8ddb44 --- /dev/null +++ b/CHANGES/7784.bugfix @@ -0,0 +1 @@ +Fix duplicate cookie expiration calls in the CookieJar implementation diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 87f05a0de3c..3c7629c7f33 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -154,7 +154,12 @@ def __iter__(self) -> "Iterator[Morsel[str]]": yield from val.values() def __len__(self) -> int: - return sum(1 for i in self) + """Return number of cookies. + + This function does not iterate self to avoid unnecessary expiration + checks. + """ + return sum(len(cookie.values()) for cookie in self._cookies.values()) def _do_expiration(self) -> None: self.clear(lambda x: False) diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 70ab0a4864e..261dbecd992 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -854,7 +854,28 @@ async def test_cookie_jar_clear_expired(): assert len(sut) == 0 -async def test_cookie_jar_clear_domain(): +async def test_cookie_jar_filter_cookies_expires(): + """Test that calling filter_cookies will expire stale cookies.""" + jar = CookieJar() + assert len(jar) == 0 + + cookie = SimpleCookie() + + cookie["foo"] = "bar" + cookie["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" + + with freeze_time("1980-01-01"): + jar.update_cookies(cookie) + + assert len(jar) == 1 + + # filter_cookies should expire stale cookies + jar.filter_cookies(URL("http://any.com/")) + + assert len(jar) == 0 + + +async def test_cookie_jar_clear_domain() -> None: sut = CookieJar() cookie = SimpleCookie() cookie["foo"] = "bar" From eead35ca5b8aeb1a14ee3cefabe5b1abe52088ce Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 21:59:27 +0000 Subject: [PATCH 0066/1511] [PR #8049/a379e634 backport][3.10] Set cause for ClientPayloadError (#8051) **This is a backport of PR #8049 as merged into master (a379e6344432d5c033f78c2733fe69659e3cff50).** --- aiohttp/client_proto.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 321860a9fe5..1ab8acd27b0 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -81,11 +81,11 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: if self._parser is not None: try: uncompleted = self._parser.feed_eof() - except Exception: + except Exception as e: if self._payload is not None: - self._payload.set_exception( - ClientPayloadError("Response payload is not completed") - ) + exc = ClientPayloadError("Response payload is not completed") + exc.__cause__ = e + self._payload.set_exception(exc) if not self.is_eof(): if isinstance(exc, OSError): From 88a19f67be20c6d95d7abaae660926f9c9ad5f7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Sun, 28 Jan 2024 03:34:34 +0100 Subject: [PATCH 0067/1511] =?UTF-8?q?[PR=20#8066/cba34699=20backport][3.10?= =?UTF-8?q?]=20=F0=9F=92=85=F0=9F=93=9D=20Restructure=20the=20changelog=20?= =?UTF-8?q?for=20clarity=20(#8067)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8066 as merged into master (cba346995b953b23421079ee0bccdfe85d736e7a).** PR #8066 (cherry picked from commit cba346995b953b23421079ee0bccdfe85d736e7a) ## What do these changes do? ## Are there changes in behavior for the user? ## Related issue number ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. --- .github/PULL_REQUEST_TEMPLATE.md | 42 +++++++++++++----- .github/workflows/ci-cd.yml | 6 ++- .pre-commit-config.yaml | 30 ++++++++++++- CHANGES/.TEMPLATE.rst | 52 +++++++++++++++++++++-- CHANGES/.gitignore | 27 ++++++++++++ CHANGES/8066.contrib.rst | 21 +++++++++ CHANGES/8066.packaging.rst | 1 + CHANGES/README.rst | 25 ++++++++--- docs/spelling_wordlist.txt | 7 +++ pyproject.toml | 73 +++++++++++++++++++++++++++++--- tools/check_changes.py | 17 +++++++- tools/cleanup_changes.py | 24 ++++++++--- 12 files changed, 289 insertions(+), 36 deletions(-) create mode 100644 CHANGES/8066.contrib.rst create mode 120000 CHANGES/8066.packaging.rst diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 3ac54a518b5..686f70cd975 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -21,13 +21,35 @@ - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. -- [ ] Add a new news fragment into the `CHANGES` folder - * name it `.` for example (588.bugfix) - * if you don't have an `issue_id` change it to the pr id after creating the pr - * ensure type is one of the following: - * `.feature`: Signifying a new feature. - * `.bugfix`: Signifying a bug fix. - * `.doc`: Signifying a documentation improvement. - * `.removal`: Signifying a deprecation or removal of public API. - * `.misc`: A ticket has been closed, but it is not of interest to users. - * Make sure to use full sentences with correct case and punctuation, for example: "Fix issue with non-ascii contents in doctest text files." +- [ ] Add a new news fragment into the `CHANGES/` folder + * name it `..rst` (e.g. `588.bugfix.rst`) + * if you don't have an issue number, change it to the pull request + number after creating the PR + * `.bugfix`: A bug fix for something the maintainers deemed an + improper undesired behavior that got corrected to match + pre-agreed expectations. + * `.feature`: A new behavior, public APIs. That sort of stuff. + * `.deprecation`: A declaration of future API removals and breaking + changes in behavior. + * `.breaking`: When something public is removed in a breaking way. + Could be deprecated in an earlier release. + * `.doc`: Notable updates to the documentation structure or build + process. + * `.packaging`: Notes for downstreams about unobvious side effects + and tooling. Changes in the test invocation considerations and + runtime assumptions. + * `.contrib`: Stuff that affects the contributor experience. e.g. + Running tests, building the docs, setting up the development + environment. + * `.misc`: Changes that are hard to assign to any of the above + categories. + * Make sure to use full sentences with correct case and punctuation, + for example: + ```rst + Fixed issue with non-ascii contents in doctest text files + -- by :user:`contributor-gh-handle`. + ``` + + Use the past tense or the present tense a non-imperative mood, + referring to what's changed compared to the last released version + of this project. diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 15a79d52ab5..95c8a985e78 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -414,8 +414,10 @@ jobs: version_file: aiohttp/__init__.py github_token: ${{ secrets.GITHUB_TOKEN }} dist_dir: dist - fix_issue_regex: "`#(\\d+) `_" - fix_issue_repl: "(#\\1)" + fix_issue_regex: >- + :issue:`(\d+)` + fix_issue_repl: >- + #\1 - name: >- Publish 🐍📦 to PyPI diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 587c46e991d..d11ab1bfa32 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,9 +6,35 @@ repos: language: fail entry: >- Changelog files must be named - ####.(bugfix|feature|removal|doc|misc)(.#)?(.rst)? + ####.( + bugfix + | feature + | deprecation + | breaking + | doc + | packaging + | contrib + | misc + )(.#)?(.rst)? exclude: >- - ^CHANGES/(\.TEMPLATE\.rst|\.gitignore|\d+\.(bugfix|feature|removal|doc|misc)(\.\d+)?(\.rst)?|README\.rst)$ + (?x) + ^ + CHANGES/( + \.gitignore + |(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40})\.( + bugfix + |feature + |deprecation + |breaking + |doc + |packaging + |contrib + |misc + )(\.\d+)?(\.rst)? + |README\.rst + |\.TEMPLATE\.rst + ) + $ files: ^CHANGES/ - id: changelogs-user-role name: Changelog files should use a non-broken :user:`name` role diff --git a/CHANGES/.TEMPLATE.rst b/CHANGES/.TEMPLATE.rst index a27a1994b53..9334cefd84f 100644 --- a/CHANGES/.TEMPLATE.rst +++ b/CHANGES/.TEMPLATE.rst @@ -11,11 +11,56 @@ {{ underline * definitions[category]['name']|length }} {% if definitions[category]['showcontent'] %} -{% for text, values in sections[section][category].items() %} +{% for text, change_note_refs in sections[section][category].items() %} - {{ text + '\n' }} - {{ values|join(',\n ') + '\n' }} -{% endfor %} + {# + NOTE: Replacing 'e' with 'f' is a hack that prevents Jinja's `int` + NOTE: filter internal implementation from treating the input as an + NOTE: infinite float when it looks like a scientific notation (with a + NOTE: single 'e' char in between digits), raising an `OverflowError`, + NOTE: subsequently. 'f' is still a hex letter so it won't affect the + NOTE: check for whether it's a (short or long) commit hash or not. + Ref: https://github.com/pallets/jinja/issues/1921 + -#} + {%- + set pr_issue_numbers = change_note_refs + | map('lower') + | map('replace', 'e', 'f') + | map('int', default=None) + | select('integer') + | map('string') + | list + -%} + {%- set arbitrary_refs = [] -%} + {%- set commit_refs = [] -%} + {%- with -%} + {%- set commit_ref_candidates = change_note_refs | reject('in', pr_issue_numbers) -%} + {%- for cf in commit_ref_candidates -%} + {%- if cf | length in (7, 8, 40) and cf | int(default=None, base=16) is not none -%} + {%- set _ = commit_refs.append(cf) -%} + {%- else -%} + {%- set _ = arbitrary_refs.append(cf) -%} + {%- endif -%} + {%- endfor -%} + {%- endwith -%} + + {% if pr_issue_numbers -%} + *Related issues and pull requests on GitHub:* + :issue:`{{ pr_issue_numbers | join('`, :issue:`') }}`. + {% endif %} + + {% if commit_refs -%} + *Related commits on GitHub:* + :commit:`{{ commit_refs | join('`, :commit:`') }}`. + {% endif %} + + {% if arbitrary_refs -%} + *Unlinked references:* + {{ arbitrary_refs | join(', ') }}`. + {% endif %} + +{% endfor %} {% else %} - {{ sections[section][category]['']|join(', ') }} @@ -34,3 +79,4 @@ No significant changes. {% endif %} {% endfor %} ---- +{{ '\n' * 2 }} diff --git a/CHANGES/.gitignore b/CHANGES/.gitignore index f935021a8f8..d6409a0dd82 100644 --- a/CHANGES/.gitignore +++ b/CHANGES/.gitignore @@ -1 +1,28 @@ +* +!.TEMPLATE.rst !.gitignore +!README.rst +!*.bugfix +!*.bugfix.rst +!*.bugfix.*.rst +!*.breaking +!*.breaking.rst +!*.breaking.*.rst +!*.contrib +!*.contrib.rst +!*.contrib.*.rst +!*.deprecation +!*.deprecation.rst +!*.deprecation.*.rst +!*.doc +!*.doc.rst +!*.doc.*.rst +!*.feature +!*.feature.rst +!*.feature.*.rst +!*.misc +!*.misc.rst +!*.misc.*.rst +!*.packaging +!*.packaging.rst +!*.packaging.*.rst diff --git a/CHANGES/8066.contrib.rst b/CHANGES/8066.contrib.rst new file mode 100644 index 00000000000..2468018e99b --- /dev/null +++ b/CHANGES/8066.contrib.rst @@ -0,0 +1,21 @@ +The changelog categorization was made clearer. The +contributors can now mark their fragment files more +accurately -- by :user:`webknjaz`. + +The new category tags are: + + * ``bugfix`` + + * ``feature`` + + * ``deprecation`` + + * ``breaking`` (previously, ``removal``) + + * ``doc`` + + * ``packaging`` + + * ``contrib`` + + * ``misc`` diff --git a/CHANGES/8066.packaging.rst b/CHANGES/8066.packaging.rst new file mode 120000 index 00000000000..57cdff225f5 --- /dev/null +++ b/CHANGES/8066.packaging.rst @@ -0,0 +1 @@ +8066.contrib.rst \ No newline at end of file diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 9f619296351..bf467d2bc07 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -43,7 +43,7 @@ with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a PR followed by a dot, then add a patch type, like ``feature``, -``doc``, ``misc`` etc., and add ``.rst`` as a suffix. If you +``doc``, ``contrib`` etc., and add ``.rst`` as a suffix. If you need to add more than one fragment, you may add an optional sequence number (delimited with another period) between the type and the suffix. @@ -51,11 +51,24 @@ and the suffix. In general the name will follow ``..rst`` pattern, where the categories are: -- ``feature``: Any new feature -- ``bugfix``: A bug fix -- ``doc``: A change to the documentation -- ``misc``: Changes internal to the repo like CI, test and build changes -- ``removal``: For deprecations and removals of an existing feature or behavior +- ``bugfix``: A bug fix for something we deemed an improper undesired + behavior that got corrected in the release to match pre-agreed + expectations. +- ``feature``: A new behavior, public APIs. That sort of stuff. +- ``deprecation``: A declaration of future API removals and breaking + changes in behavior. +- ``breaking``: When something public gets removed in a breaking way. + Could be deprecated in an earlier release. +- ``doc``: Notable updates to the documentation structure or build + process. +- ``packaging``: Notes for downstreams about unobvious side effects + and tooling. Changes in the test invocation considerations and + runtime assumptions. +- ``contrib``: Stuff that affects the contributor experience. e.g. + Running tests, building the docs, setting up the development + environment. +- ``misc``: Changes that are hard to assign to any of the above + categories. A pull request may have more than one of these components, for example a code change may introduce a new feature that deprecates an old diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 1523ccd2a65..768d52cfd05 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -17,6 +17,7 @@ Arsenic async asyncio asyncpg +attrs auth autocalculated autodetection @@ -34,6 +35,7 @@ backports BaseEventLoop basename BasicAuth +behaviour BodyPartReader boolean botocore @@ -89,6 +91,7 @@ Cythonize cythonized de deduplicate +defs Dependabot deprecations DER @@ -104,6 +107,7 @@ DNSResolver docstring docstrings DoS +downstreams Dup elasticsearch encodings @@ -313,6 +317,8 @@ Testsuite Tf timestamps TLS +tmp +tmpdir toolbar toplevel towncrier @@ -329,6 +335,7 @@ Unittest unix unsets unstripped +untyped uppercased upstr url diff --git a/pyproject.toml b/pyproject.toml index 1f590d002ef..85d7c87eb34 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,12 +5,73 @@ requires = [ build-backend = "setuptools.build_meta" [tool.towncrier] -package = "aiohttp" -filename = "CHANGES.rst" -directory = "CHANGES/" -title_format = "{version} ({project_date})" -template = "CHANGES/.TEMPLATE.rst" -issue_format = "`#{issue} `_" + package = "aiohttp" + filename = "CHANGES.rst" + directory = "CHANGES/" + title_format = "{version} ({project_date})" + template = "CHANGES/.TEMPLATE.rst" + issue_format = "{issue}" + + # NOTE: The types are declared because: + # NOTE: - there is no mechanism to override just the value of + # NOTE: `tool.towncrier.type.misc.showcontent`; + # NOTE: - and, we want to declare extra non-default types for + # NOTE: clarity and flexibility. + + [[tool.towncrier.section]] + path = "" + + [[tool.towncrier.type]] + # Something we deemed an improper undesired behavior that got corrected + # in the release to match pre-agreed expectations. + directory = "bugfix" + name = "Bug fixes" + showcontent = true + + [[tool.towncrier.type]] + # New behaviors, public APIs. That sort of stuff. + directory = "feature" + name = "Features" + showcontent = true + + [[tool.towncrier.type]] + # Declarations of future API removals and breaking changes in behavior. + directory = "deprecation" + name = "Deprecations (removal in next major release)" + showcontent = true + + [[tool.towncrier.type]] + # When something public gets removed in a breaking way. Could be + # deprecated in an earlier release. + directory = "breaking" + name = "Removals and backward incompatible breaking changes" + showcontent = true + + [[tool.towncrier.type]] + # Notable updates to the documentation structure or build process. + directory = "doc" + name = "Improved documentation" + showcontent = true + + [[tool.towncrier.type]] + # Notes for downstreams about unobvious side effects and tooling. Changes + # in the test invocation considerations and runtime assumptions. + directory = "packaging" + name = "Packaging updates and notes for downstreams" + showcontent = true + + [[tool.towncrier.type]] + # Stuff that affects the contributor experience. e.g. Running tests, + # building the docs, setting up the development environment. + directory = "contrib" + name = "Contributor-facing changes" + showcontent = true + + [[tool.towncrier.type]] + # Changes that are hard to assign to any of the above categories. + directory = "misc" + name = "Miscellaneous internal changes" + showcontent = true [tool.cibuildwheel] diff --git a/tools/check_changes.py b/tools/check_changes.py index da806e014f3..118d1182b9a 100755 --- a/tools/check_changes.py +++ b/tools/check_changes.py @@ -4,8 +4,21 @@ import sys from pathlib import Path -ALLOWED_SUFFIXES = ["feature", "bugfix", "doc", "removal", "misc"] -PATTERN = re.compile(r"\d+\.(" + "|".join(ALLOWED_SUFFIXES) + r")(\.\d+)?(\.rst)?") +ALLOWED_SUFFIXES = ( + "bugfix", + "feature", + "deprecation", + "breaking", + "doc", + "packaging", + "contrib", + "misc", +) +PATTERN = re.compile( + r"(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40})\.(" + + "|".join(ALLOWED_SUFFIXES) + + r")(\.\d+)?(\.rst)?", +) def get_root(script_path): diff --git a/tools/cleanup_changes.py b/tools/cleanup_changes.py index 673866b8d67..5b931138056 100755 --- a/tools/cleanup_changes.py +++ b/tools/cleanup_changes.py @@ -7,8 +7,21 @@ import subprocess from pathlib import Path -ALLOWED_SUFFIXES = ["feature", "bugfix", "doc", "removal", "misc"] -PATTERN = re.compile(r"(\d+)\.(" + "|".join(ALLOWED_SUFFIXES) + r")(\.\d+)?(\.rst)?") +ALLOWED_SUFFIXES = ( + "bugfix", + "feature", + "deprecation", + "breaking", + "doc", + "packaging", + "contrib", + "misc", +) +PATTERN = re.compile( + r"(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40})\.(" + + "|".join(ALLOWED_SUFFIXES) + + r")(\.\d+)?(\.rst)?", +) def main(): @@ -18,9 +31,10 @@ def main(): for fname in (root / "CHANGES").iterdir(): match = PATTERN.match(fname.name) if match is not None: - num = match.group(1) - tst = f"`#{num} `_" - if tst in changes: + commit_issue_or_pr = match.group(1) + tst_issue_or_pr = f":issue:`{commit_issue_or_pr}`" + tst_commit = f":commit:`{commit_issue_or_pr}`" + if tst_issue_or_pr in changes or tst_commit in changes: subprocess.run(["git", "rm", fname]) delete.append(fname.name) print("Deleted CHANGES records:", " ".join(delete)) From d9c7f56dcbf860b2a06899189f23d2f21149b775 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 28 Jan 2024 04:11:04 +0100 Subject: [PATCH 0068/1511] =?UTF-8?q?[PR=20#8069/69bbe874=20backport][3.10?= =?UTF-8?q?]=20=F0=9F=93=9D=20Only=20show=20changelog=20draft=20for=20non-?= =?UTF-8?q?releases=20(#8071)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8069 as merged into master (69bbe87400f826595d0f87bb2233cb1dae24ac84).** ## What do these changes do? $sbj. ## Are there changes in behavior for the user? RTD for tagged docs builds won't show the changelog draft (which should be empty in this context). ## Related issue number N/A ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Sviatoslav Sydorenko --- CHANGES/8067.doc.rst | 3 +++ docs/changes.rst | 13 ++++++++----- docs/conf.py | 6 ++++++ 3 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8067.doc.rst diff --git a/CHANGES/8067.doc.rst b/CHANGES/8067.doc.rst new file mode 100644 index 00000000000..3206db9ae87 --- /dev/null +++ b/CHANGES/8067.doc.rst @@ -0,0 +1,3 @@ +The Sphinx setup was updated to avoid showing the empty +changelog draft section in the tagged release documentation +builds on Read The Docs -- by :user:`webknjaz`. diff --git a/docs/changes.rst b/docs/changes.rst index 6a61dfbcc1e..089f67235a1 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -4,14 +4,17 @@ Changelog ========= -To be included in v\ |release| (if present) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. only:: not is_release -.. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] + To be included in v\ |release| (if present) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Released versions -^^^^^^^^^^^^^^^^^ + .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] + + Released versions + ^^^^^^^^^^^^^^^^^ .. include:: ../CHANGES.rst + :start-after: .. towncrier release notes start .. include:: ../HISTORY.rst diff --git a/docs/conf.py b/docs/conf.py index d3e04f3d48e..f21366fb488 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,6 +18,12 @@ from pathlib import Path PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() +IS_RELEASE_ON_RTD = ( + os.getenv("READTHEDOCS", "False") == "True" + and os.environ["READTHEDOCS_VERSION_TYPE"] == "tag" +) +if IS_RELEASE_ON_RTD: + tags.add("is_release") _docs_path = os.path.dirname(__file__) _version_path = os.path.abspath( From d445803848a1aeed2f67d2f4530773d34cdab647 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 28 Jan 2024 15:29:48 +0000 Subject: [PATCH 0069/1511] [PR #7916/822fbc74 backport][3.10] Add more information to contributing page (#8073) **This is a backport of PR #7916 as merged into master (822fbc7431f3c5522d3e587ad0b658bef8b6a0ab).** Co-authored-by: Sam Bull --- CHANGES/7916.doc | 1 + CHANGES/README.rst | 29 +- docs/_static/img/contributing-cov-comment.svg | 55 ++ docs/_static/img/contributing-cov-header.svg | 15 + docs/_static/img/contributing-cov-miss.svg | 709 ++++++++++++++++++ docs/_static/img/contributing-cov-partial.svg | 268 +++++++ docs/contributing.rst | 124 +-- docs/spelling_wordlist.txt | 1 + 8 files changed, 1137 insertions(+), 65 deletions(-) create mode 100644 CHANGES/7916.doc create mode 100644 docs/_static/img/contributing-cov-comment.svg create mode 100644 docs/_static/img/contributing-cov-header.svg create mode 100644 docs/_static/img/contributing-cov-miss.svg create mode 100644 docs/_static/img/contributing-cov-partial.svg diff --git a/CHANGES/7916.doc b/CHANGES/7916.doc new file mode 100644 index 00000000000..b616ae85bbe --- /dev/null +++ b/CHANGES/7916.doc @@ -0,0 +1 @@ +Updated :ref:`contributing/Tests coverage ` section to show how we use ``codecov`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/README.rst b/CHANGES/README.rst index bf467d2bc07..5beb8999226 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -1,7 +1,15 @@ -.. _Adding change notes with your PRs: +.. _Making a pull request: + +Making a pull request +===================== + +When making a pull request, please include a short summary of the changes +and a reference to any issue tickets that the PR is intended to solve. +All PRs with code changes should include tests. All changes should +include a changelog entry. Adding change notes with your PRs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------------- It is very important to maintain a log for news of how updating to the new version of the software will affect @@ -9,7 +17,7 @@ end-users. This is why we enforce collection of the change fragment files in pull requests as per `Towncrier philosophy`_. The idea is that when somebody makes a change, they must record -the bits that would affect end-users only including information +the bits that would affect end-users, only including information that would be useful to them. Then, when the maintainers publish a new release, they'll automatically use these records to compose a change log for the respective version. It is important to @@ -19,7 +27,7 @@ to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------------------- ``aiohttp`` uses `towncrier `_ for changelog management. @@ -34,11 +42,14 @@ for the users to understand what it means. combined with others, it will be a part of the "news digest" telling the readers **what changed** in a specific version of the library *since the previous version*. You should also use -reStructuredText syntax for highlighting code (inline or block), +*reStructuredText* syntax for highlighting code (inline or block), linking parts of the docs or external sites. -If you wish to sign your change, feel free to add ``-- by -:user:`github-username``` at the end (replace ``github-username`` -with your own!). +However, you do not need to reference the issue or PR numbers here +as *towncrier* will automatically add a reference to all of the +affected issues when rendering the news file. +If you wish to sign your change, feel free to add +``-- by :user:`github-username``` at the end (replace +``github-username`` with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a @@ -77,7 +88,7 @@ necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +----------------------------------------------------------- File :file:`CHANGES/6045.doc.1.rst`: diff --git a/docs/_static/img/contributing-cov-comment.svg b/docs/_static/img/contributing-cov-comment.svg new file mode 100644 index 00000000000..c5ba1005641 --- /dev/null +++ b/docs/_static/img/contributing-cov-comment.svg @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + Hits 31428 31504 +76 + + + + + + + + + + + + - + + + + Misses 632 633 +1 + + + + + + + + + + + + - + + + + Partials 203 205 +2 + + + diff --git a/docs/_static/img/contributing-cov-header.svg b/docs/_static/img/contributing-cov-header.svg new file mode 100644 index 00000000000..f51c8957cd1 --- /dev/null +++ b/docs/_static/img/contributing-cov-header.svg @@ -0,0 +1,15 @@ + + + + + + + + + Codecov + + + + Report + + diff --git a/docs/_static/img/contributing-cov-miss.svg b/docs/_static/img/contributing-cov-miss.svg new file mode 100644 index 00000000000..d431cd0f1fc --- /dev/null +++ b/docs/_static/img/contributing-cov-miss.svg @@ -0,0 +1,709 @@ + + + + + + + + + + + + + + 733 + + + + + + + + + + + + + + 740 + + + + + + + + + + + + + + + + + + async + + + + + + + + + + def + + + + + + + + + + resolve + + + + + ( + + + + + self + + + + + , + + + + + request + + + + + : + + + + + Request + + + + + ) + + + + + + + + + + - + + + + + > + + + + + _Resolve + + + + + : + + + + + + + + + + + 15 + + + + + + + + + + + + + + + + + 734 + + + + + + + + + + + + + + 741 + + + + + + + + + + + + + + + + + + + + + + + if + + + + + + + + + + ( + + + + + + + + + ! + + + + + + + + + + + + + + + + + + + + + + 735 + + + + + + + + + + + + + 742 + + + + + + + + + + + + + + + not + + + + + request + + + + + . + + + + + url + + + + + . + + + + + raw_path + + + + + . + + + + + startswith + + + + + ( + + + + + self + + + + + . + + + + + _prefix2 + + + + + ) + + + + + + + + + + + + + + + + + + 736 + + + + + + + + + + + + + 743 + + + + + + + + + + + + + + + and + + + + + request + + + + + . + + + + + url + + + + + . + + + + + raw_path + + + + + != + + + + + self + + + + + . + + + + + _prefix + + + + + + + + + + + + + + + + + 737 + + + + + + + + + + + + + 744 + + + + + + + + + + + + + + + ) + + + + + : + + + + + + + + + + + + + + + + + + + 738 + + + + + + + + + + + + + + 745 + + + + + + + + + + + + + + + + + + + + + + + return + + + + + + + + + + None + + + + + , + + + + + + + + + + set + + + + + ( + + + + + ) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 739 + + + + + + + + + + + + + + 746 + + + + + + + + + + + + + + + + + + match_info + + + + + = + + + + + + + + + + await + + + + + self + + + + + . + + + + + _app + + + + + . + + + + + router + + + + + . + + + + + resolve + + + + + ( + + + + + request + + + + + ) + + + + + + + + + + + 15 + + + + + + + diff --git a/docs/_static/img/contributing-cov-partial.svg b/docs/_static/img/contributing-cov-partial.svg new file mode 100644 index 00000000000..5eceb26b9eb --- /dev/null +++ b/docs/_static/img/contributing-cov-partial.svg @@ -0,0 +1,268 @@ + + + + + + + + + + + + + + 1001 + + + + + + + + + + + + + + + + + + + + url_part + + + + + = + + + + + request + + + + + . + + + + + rel_url + + + + + . + + + + + raw_path + + + + + + + + + + 15 + + + + + + + + + + + + + + + + + 1002 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + while + + + + + url_part + + + + + : + + + + + + + + + ! + + + + + + + + + + + + + + + + + + + + + + + 1003 + + + + + + + + + + + + + + + + + + + + + + + + + for + + + + + candidate + + + + + in + + + + + resource_index + + + + + . + + + + + get + + + + + ( + + + + + url_part + + + + + , + + + + + + + + + + ( + + + + + ) + + + + + ) + + + + + : + + + + + + + + + + + 15 + + + + + + + diff --git a/docs/contributing.rst b/docs/contributing.rst index 5263f4a3f47..84d223d0e0b 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,12 +1,12 @@ .. _aiohttp-contributing: Contributing -============ +************ (:doc:`contributing-admins`) Instructions for contributors ------------------------------ +============================= In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. @@ -25,7 +25,7 @@ Workflow is pretty straightforward: 4. Make sure all tests passed - 5. Add a file into the ``CHANGES`` folder (see `Changelog update`_ for how). + 5. Add a file into the ``CHANGES`` folder (see `Making a pull request`_ for how). 6. Commit changes to your own aiohttp clone @@ -53,7 +53,7 @@ Workflow is pretty straightforward: Preconditions for running aiohttp test suite --------------------------------------------- +============================================ We expect you to use a python virtual environment to run our tests. @@ -116,7 +116,7 @@ Congratulations, you are ready to run the test suite! Run autoformatter ------------------ +================= The project uses black_ + isort_ formatters to keep the source code style. Please run `make fmt` after every change before starting tests. @@ -127,7 +127,7 @@ Please run `make fmt` after every change before starting tests. Run aiohttp test suite ----------------------- +====================== After all the preconditions are met you can run tests typing the next command: @@ -158,35 +158,75 @@ Any extra texts (print statements and so on) should be removed. make test-3.10-no-extensions -Tests coverage --------------- +Code coverage +============= -We are trying hard to have good test coverage; please don't make it worse. +We use *codecov.io* as an indispensable tool for analyzing our coverage +results. Visit https://codecov.io/gh/aio-libs/aiohttp to see coverage +reports for the master branch, history, pull requests etc. -Use: +We'll use an example from a real PR to demonstrate how we use this. +Once the tests run in a PR, you'll see a comment posted by *codecov*. +The most important thing to check here is whether there are any new +missed or partial lines in the report: -.. code-block:: shell +.. image:: _static/img/contributing-cov-comment.svg + +Here, the PR has introduced 1 miss and 2 partials. Now we +click the link in the comment header to open the full report: + +.. image:: _static/img/contributing-cov-header.svg + :alt: Codecov report + +Now, if we look through the diff under 'Files changed' we find one of +our partials: - $ make cov-dev +.. image:: _static/img/contributing-cov-partial.svg + :alt: A while loop with partial coverage. -to run test suite and collect coverage information. Once the command -has finished check your coverage at the file that appears in the last -line of the output: -``open file:///.../aiohttp/htmlcov/index.html`` +In this case, the while loop is never skipped in our tests. This is +probably not worth writing a test for (and may be a situation that is +impossible to trigger anyway), so we leave this alone. -Please go to the link and make sure that your code change is covered. +We're still missing a partial and a miss, so we switch to the +'Indirect changes' tab and take a look through the diff there. This +time we find the remaining 2 lines: +.. image:: _static/img/contributing-cov-miss.svg + :alt: An if statement that isn't covered anymore. -The project uses *codecov.io* for storing coverage results. Visit -https://codecov.io/gh/aio-libs/aiohttp for looking on coverage of -master branch, history, pull requests etc. +After reviewing the PR, we find that this code is no longer needed as +the changes mean that this method will never be called under those +conditions. Thanks to this report, we were able to remove some +redundant code from a performance-critical part of our codebase (this +check would have been run, probably multiple times, for every single +incoming request). + +.. tip:: + Sometimes the diff on *codecov.io* doesn't make sense. This is usually + caused by the branch being out of sync with master. Try merging + master into the branch and it will likely fix the issue. Failing + that, try checking coverage locally as described in the next section. + +Other tools +----------- The browser extension https://docs.codecov.io/docs/browser-extension -is highly recommended for analyzing the coverage just in *Files -Changed* tab on *GitHub Pull Request* review page. +is also a useful tool for analyzing the coverage directly from *Files +Changed* tab on the *GitHub Pull Request* review page. + + +You can also produce coverage reports locally with ``make cov-dev`` +or just adding ``--cov-report=html`` to ``pytest``. + +This will run the test suite and collect coverage information. Once +finished, coverage results can be view by opening: +```console +$ python -m webbrowser -n file://"$(pwd)"/htmlcov/index.html +``` Documentation -------------- +============= We encourage documentation improvements. @@ -202,7 +242,7 @@ Once it finishes it will output the index html page Go to the link and make sure your doc changes looks good. Spell checking --------------- +============== We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: @@ -220,47 +260,19 @@ To run spell checker on Linux box you should install it first: $ sudo apt-get install enchant $ pip install sphinxcontrib-spelling -Changelog update ----------------- - -The ``CHANGES.rst`` file is managed using `towncrier -`_ tool and all non trivial -changes must be accompanied by a news entry. -To add an entry to the news file, first you need to have created an -issue describing the change you want to make. A Pull Request itself -*may* function as such, but it is preferred to have a dedicated issue -(for example, in case the PR ends up rejected due to code quality -reasons). - -Once you have an issue or pull request, you take the number and you -create a file inside of the ``CHANGES/`` directory named after that -issue number with an extension of ``.removal``, ``.feature``, -``.bugfix``, or ``.doc``. Thus if your issue or PR number is ``1234`` and -this change is fixing a bug, then you would create a file -``CHANGES/1234.bugfix``. PRs can span multiple categories by creating -multiple files (for instance, if you added a feature and -deprecated/removed the old feature at the same time, you would create -``CHANGES/NNNN.feature`` and ``CHANGES/NNNN.removal``). Likewise if a PR touches -multiple issues/PRs you may create a file for each of them with the -exact same contents and *Towncrier* will deduplicate them. - -The contents of this file are *reStructuredText* formatted text that -will be used as the content of the news file entry. You do not need to -reference the issue or PR numbers here as *towncrier* will automatically -add a reference to all of the affected issues when rendering the news -file. +.. include:: ../CHANGES/README.rst Making a Pull Request ---------------------- +===================== After finishing all steps make a GitHub_ Pull Request with *master* base branch. Backporting ------------ +=========== All Pull Requests are created against *master* git branch. @@ -301,7 +313,7 @@ like *needs backport to 3.1*. merging the backport. How to become an aiohttp committer ----------------------------------- +================================== Contribute! diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 768d52cfd05..514477e8fcb 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -67,6 +67,7 @@ CIMultiDict ClientSession cls cmd +codebase codec Codings committer From a67fb1066f5b8dced690f64bd09f689c032126a6 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 28 Jan 2024 16:58:25 +0000 Subject: [PATCH 0070/1511] Improve validation in HTTP parser (#8074) (#8077) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Paul J. Dorn Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) (cherry picked from commit 33ccdfb0a12690af5bb49bda2319ec0907fa7827) --- CHANGES/8074.bugfix.rst | 5 ++ CONTRIBUTORS.txt | 1 + aiohttp/http_parser.py | 32 +++++---- tests/test_http_parser.py | 139 +++++++++++++++++++++++++++++++++++++- 4 files changed, 160 insertions(+), 17 deletions(-) create mode 100644 CHANGES/8074.bugfix.rst diff --git a/CHANGES/8074.bugfix.rst b/CHANGES/8074.bugfix.rst new file mode 100644 index 00000000000..16c71445476 --- /dev/null +++ b/CHANGES/8074.bugfix.rst @@ -0,0 +1,5 @@ +Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. + +Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. Invalid header field names containing question mark or slash are now rejected. Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. + +(BACKWARD INCOMPATIBLE) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 270900207e1..08a4c5775a1 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -255,6 +255,7 @@ Pankaj Pandey Parag Jain Pau Freixes Paul Colomiets +Paul J. Dorn Paulius Šileikis Paulus Schoutsen Pavel Kamaev diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 85499177701..1877f558308 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -69,12 +69,11 @@ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / # "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA # token = 1*tchar -METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") -VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d).(\d)") -HDRRE: Final[Pattern[bytes]] = re.compile( - rb"[\x00-\x1F\x7F-\xFF()<>@,;:\[\]={} \t\"\\]" -) -HEXDIGIT = re.compile(rb"[0-9a-fA-F]+") +_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~") +TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII) +DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII) +HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+") class RawRequestMessage(NamedTuple): @@ -136,6 +135,7 @@ def parse_headers( self, lines: List[bytes] ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: headers: CIMultiDict[str] = CIMultiDict() + # note: "raw" does not mean inclusion of OWS before/after the field value raw_headers = [] lines_idx = 1 @@ -149,13 +149,14 @@ def parse_headers( except ValueError: raise InvalidHeader(line) from None + if len(bname) == 0: + raise InvalidHeader(bname) + # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} raise InvalidHeader(line) bvalue = bvalue.lstrip(b" \t") - if HDRRE.search(bname): - raise InvalidHeader(bname) if len(bname) > self.max_field_size: raise LineTooLong( "request header name {}".format( @@ -164,6 +165,9 @@ def parse_headers( str(self.max_field_size), str(len(bname)), ) + name = bname.decode("utf-8", "surrogateescape") + if not TOKENRE.fullmatch(name): + raise InvalidHeader(bname) header_length = len(bvalue) @@ -210,7 +214,6 @@ def parse_headers( ) bvalue = bvalue.strip(b" \t") - name = bname.decode("utf-8", "surrogateescape") value = bvalue.decode("utf-8", "surrogateescape") # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 @@ -338,7 +341,8 @@ def get_content_length() -> Optional[int]: # Shouldn't allow +/- or other number formats. # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 - if not length_hdr.strip(" \t").isdecimal(): + # msg.headers is already stripped of leading/trailing wsp + if not DIGITS.fullmatch(length_hdr): raise InvalidHeader(CONTENT_LENGTH) return int(length_hdr) @@ -566,7 +570,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: ) # method - if not METHRE.fullmatch(method): + if not TOKENRE.fullmatch(method): raise BadStatusLine(method) # version @@ -683,8 +687,8 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: raise BadStatusLine(line) version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - # The status code is a three-digit number - if len(status) != 3 or not status.isdecimal(): + # The status code is a three-digit ASCII number, no padding + if len(status) != 3 or not DIGITS.fullmatch(status): raise BadStatusLine(line) status_i = int(status) @@ -826,7 +830,7 @@ def feed_data( if self._lax: # Allow whitespace in lax mode. size_b = size_b.strip() - if not re.fullmatch(HEXDIGIT, size_b): + if not re.fullmatch(HEXDIGITS, size_b): exc = TransferEncodingError( chunk[:pos].decode("ascii", "surrogateescape") ) diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 820a76cb821..b931730529d 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -2,7 +2,8 @@ import asyncio import re -from typing import Any, List +from contextlib import nullcontext +from typing import Any, Dict, List from unittest import mock from urllib.parse import quote @@ -169,11 +170,27 @@ def test_cve_2023_37276(parser: Any) -> None: parser.feed_data(text) +@pytest.mark.parametrize( + "rfc9110_5_6_2_token_delim", + r'"(),/:;<=>?@[\]{}', +) +def test_bad_header_name(parser: Any, rfc9110_5_6_2_token_delim: str) -> None: + text = f"POST / HTTP/1.1\r\nhead{rfc9110_5_6_2_token_delim}er: val\r\n\r\n".encode() + expectation = pytest.raises(http_exceptions.BadHttpMessage) + if rfc9110_5_6_2_token_delim == ":": + # Inserting colon into header just splits name/value earlier. + expectation = nullcontext() + with expectation: + parser.feed_data(text) + + @pytest.mark.parametrize( "hdr", ( "Content-Length: -5", # https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length "Content-Length: +256", + "Content-Length: \N{superscript one}", + "Content-Length: \N{mathematical double-struck digit one}", "Foo: abc\rdef", # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 "Bar: abc\ndef", "Baz: abc\x00def", @@ -266,6 +283,20 @@ def test_parse_headers_longline(parser: Any) -> None: parser.feed_data(text) +def test_parse_unusual_request_line(parser) -> None: + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + text = b"#smol //a HTTP/1.3\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + assert len(messages) == 1 + msg, _ = messages[0] + assert msg.compression is None + assert not msg.upgrade + assert msg.method == "#smol" + assert msg.path == "//a" + assert msg.version == (1, 3) + + def test_parse(parser) -> None: text = b"GET /test HTTP/1.1\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) @@ -568,6 +599,43 @@ def test_headers_content_length_err_2(parser) -> None: parser.feed_data(text) +_pad: Dict[bytes, str] = { + b"": "empty", + # not a typo. Python likes triple zero + b"\000": "NUL", + b" ": "SP", + b" ": "SPSP", + # not a typo: both 0xa0 and 0x0a in case of 8-bit fun + b"\n": "LF", + b"\xa0": "NBSP", + b"\t ": "TABSP", +} + + +@pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"]) +@pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) +@pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) +def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: + text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) + expectation = pytest.raises(http_exceptions.BadHttpMessage) + if pad1 == pad2 == b"" and hdr != b"": + # one entry in param matrix is correct: non-empty name, not padded + expectation = nullcontext() + if pad1 == pad2 == hdr == b"": + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + with expectation: + parser.feed_data(text) + + +def test_empty_header_name(parser) -> None: + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text) + + def test_invalid_header(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"test line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): @@ -690,6 +758,34 @@ def test_http_request_bad_status_line(parser) -> None: assert r"\n" not in exc_info.value.message +_num: Dict[bytes, str] = { + # dangerous: accepted by Python int() + # unicodedata.category("\U0001D7D9") == 'Nd' + "\N{mathematical double-struck digit one}".encode(): "utf8digit", + # only added for interop tests, refused by Python int() + # unicodedata.category("\U000000B9") == 'No' + "\N{superscript one}".encode(): "utf8number", + "\N{superscript one}".encode("latin-1"): "latin1number", +} + + +@pytest.mark.parametrize("nonascii_digit", _num.keys(), ids=_num.values()) +def test_http_request_bad_status_line_number( + parser: Any, nonascii_digit: bytes +) -> None: + text = b"GET /digit HTTP/1." + nonascii_digit + b"\r\n\r\n" + with pytest.raises(http_exceptions.BadStatusLine): + parser.feed_data(text) + + +def test_http_request_bad_status_line_separator(parser: Any) -> None: + # single code point, old, multibyte NFKC, multibyte NFKD + utf8sep = "\N{arabic ligature sallallahou alayhe wasallam}".encode() + text = b"GET /ligature HTTP/1" + utf8sep + b"1\r\n\r\n" + with pytest.raises(http_exceptions.BadStatusLine): + parser.feed_data(text) + + def test_http_request_bad_status_line_whitespace(parser: Any) -> None: text = b"GET\n/path\fHTTP/1.1\r\n\r\n" with pytest.raises(http_exceptions.BadStatusLine): @@ -711,6 +807,31 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +def test_http_request_parser_utf8_request_line(parser) -> None: + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + messages, upgrade, tail = parser.feed_data( + # note the truncated unicode sequence + b"GET /P\xc3\xbcnktchen\xa0\xef\xb7 HTTP/1.1\r\n" + + # for easier grep: ASCII 0xA0 more commonly known as non-breaking space + # note the leading and trailing spaces + "sTeP: \N{latin small letter sharp s}nek\t\N{no-break space} " + "\r\n\r\n".encode() + ) + msg = messages[0][0] + + assert msg.method == "GET" + assert msg.path == "/Pünktchen\udca0\udcef\udcb7" + assert msg.version == (1, 1) + assert msg.headers == CIMultiDict([("STEP", "ßnek\t\xa0")]) + assert msg.raw_headers == ((b"sTeP", "ßnek\t\xa0".encode()),) + assert not msg.should_close + assert msg.compression is None + assert not msg.upgrade + assert not msg.chunked + assert msg.url.path == URL("/P%C3%BCnktchen\udca0\udcef\udcb7").path + + def test_http_request_parser_utf8(parser) -> None: text = "GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n".encode() messages, upgrade, tail = parser.feed_data(text) @@ -760,9 +881,15 @@ def test_http_request_parser_two_slashes(parser) -> None: assert not msg.chunked -def test_http_request_parser_bad_method(parser) -> None: +@pytest.mark.parametrize( + "rfc9110_5_6_2_token_delim", + [bytes([i]) for i in rb'"(),/:;<=>?@[\]{}'], +) +def test_http_request_parser_bad_method( + parser, rfc9110_5_6_2_token_delim: bytes +) -> None: with pytest.raises(http_exceptions.BadStatusLine): - parser.feed_data(b'G=":<>(e),[T];?" /get HTTP/1.1\r\n\r\n') + parser.feed_data(rfc9110_5_6_2_token_delim + b'ET" /get HTTP/1.1\r\n\r\n') def test_http_request_parser_bad_version(parser) -> None: @@ -974,6 +1101,12 @@ def test_http_response_parser_code_not_int(response) -> None: response.feed_data(b"HTTP/1.1 ttt test\r\n\r\n") +@pytest.mark.parametrize("nonascii_digit", _num.keys(), ids=_num.values()) +def test_http_response_parser_code_not_ascii(response, nonascii_digit: bytes) -> None: + with pytest.raises(http_exceptions.BadStatusLine): + response.feed_data(b"HTTP/1.1 20" + nonascii_digit + b" test\r\n\r\n") + + def test_http_request_chunked_payload(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] From bb590709f0abec6723be6d9afa6a8d71ff813635 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Sun, 28 Jan 2024 18:22:07 +0100 Subject: [PATCH 0071/1511] [PR #3955/8960063e backport][3.10] Replace all tmpdir fixtures with tmp_path (#3551) (#8075) **This is a backport of PR #3955 as merged into master (8960063ef4137d6c547a687a45ed55b943e9b8d1).** tmp_path is the replacement fixture in pytest for tmpdir; tmp_path uses the builtin pathlib.Path class. As it says on the tin, this commit replaces every instance of tmpdir in the test suite with tmp_path. Aside from s/tmpdir/tmp_path/ this also required changing instances of `tmpdir.join(foo)` to `tmp_path / foo`. This is intended to comprehensively address and close #3551, and should have no side effects. This does not affect end users. Co-authored-by: Matt VanEseltine --- CHANGES/3551.misc | 1 + CONTRIBUTORS.txt | 1 + tests/test_client_request.py | 4 ++-- tests/test_proxy_functional.py | 12 ++++++------ tests/test_web_functional.py | 6 +++--- tests/test_web_sendfile_functional.py | 22 +++++++++++----------- tests/test_web_urldispatcher.py | 4 ++-- 7 files changed, 26 insertions(+), 24 deletions(-) create mode 100644 CHANGES/3551.misc diff --git a/CHANGES/3551.misc b/CHANGES/3551.misc new file mode 100644 index 00000000000..63965c14821 --- /dev/null +++ b/CHANGES/3551.misc @@ -0,0 +1 @@ +Replace all tmpdir fixtures with tmp_path in test suite. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 08a4c5775a1..475ec8604e2 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -225,6 +225,7 @@ Martin Richard Martin Sucha Mathias Fröjdman Mathieu Dugré +Matt VanEseltine Matthias Marquardt Matthieu Hauglustaine Matthieu Rigal diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 6521b70ad55..f8107ffad88 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -762,8 +762,8 @@ async def test_pass_falsy_data(loop) -> None: await req.close() -async def test_pass_falsy_data_file(loop, tmpdir) -> None: - testfile = tmpdir.join("tmpfile").open("w+b") +async def test_pass_falsy_data_file(loop, tmp_path) -> None: + testfile = (tmp_path / "tmpfile").open("w+b") testfile.write(b"data") testfile.seek(0) skip = frozenset([hdrs.CONTENT_TYPE]) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index de5eeb258ff..f199404f159 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -721,12 +721,12 @@ async def test_proxy_from_env_http_with_auth(proxy_test_server, get_request, moc async def test_proxy_from_env_http_with_auth_from_netrc( - proxy_test_server, get_request, tmpdir, mocker + proxy_test_server, get_request, tmp_path, mocker ): url = "http://aiohttp.io/path" proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") - netrc_file = tmpdir.join("test_netrc") + netrc_file = tmp_path / "test_netrc" netrc_file_data = "machine 127.0.0.1 login {} password {}".format( auth.login, auth.password, @@ -747,12 +747,12 @@ async def test_proxy_from_env_http_with_auth_from_netrc( async def test_proxy_from_env_http_without_auth_from_netrc( - proxy_test_server, get_request, tmpdir, mocker + proxy_test_server, get_request, tmp_path, mocker ): url = "http://aiohttp.io/path" proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") - netrc_file = tmpdir.join("test_netrc") + netrc_file = tmp_path / "test_netrc" netrc_file_data = "machine 127.0.0.2 login {} password {}".format( auth.login, auth.password, @@ -773,12 +773,12 @@ async def test_proxy_from_env_http_without_auth_from_netrc( async def test_proxy_from_env_http_without_auth_from_wrong_netrc( - proxy_test_server, get_request, tmpdir, mocker + proxy_test_server, get_request, tmp_path, mocker ): url = "http://aiohttp.io/path" proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") - netrc_file = tmpdir.join("test_netrc") + netrc_file = tmp_path / "test_netrc" invalid_data = f"machine 127.0.0.1 {auth.login} pass {auth.password}" with open(str(netrc_file), "w") as f: f.write(invalid_data) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 28d97d9694c..04fc2e35fd1 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -1786,7 +1786,7 @@ async def handler(request): await resp.release() -async def test_response_with_bodypart_named(aiohttp_client, tmpdir) -> None: +async def test_response_with_bodypart_named(aiohttp_client, tmp_path) -> None: async def handler(request): reader = await request.multipart() part = await reader.next() @@ -1796,9 +1796,9 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - f = tmpdir.join("foobar.txt") + f = tmp_path / "foobar.txt" f.write_text("test", encoding="utf8") - with open(str(f), "rb") as fd: + with f.open("rb") as fd: data = {"file": fd} resp = await client.post("/", data=data) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 31f22892f66..b044f29bc81 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -558,18 +558,18 @@ def test_static_route_path_existence_check() -> None: web.StaticResource("/", nodirectory) -async def test_static_file_huge(aiohttp_client, tmpdir) -> None: +async def test_static_file_huge(aiohttp_client, tmp_path) -> None: filename = "huge_data.unknown_mime_type" # fill 20MB file - with tmpdir.join(filename).open("wb") as f: + with (tmp_path / filename).open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) - file_st = os.stat(str(tmpdir.join(filename))) + file_st = os.stat(str(tmp_path / filename)) app = web.Application() - app.router.add_static("/static", str(tmpdir)) + app.router.add_static("/static", str(tmp_path)) client = await aiohttp_client(app) resp = await client.get("/static/" + filename) @@ -579,7 +579,7 @@ async def test_static_file_huge(aiohttp_client, tmpdir) -> None: assert resp.headers.get("CONTENT-ENCODING") is None assert int(resp.headers.get("CONTENT-LENGTH")) == file_st.st_size - f = tmpdir.join(filename).open("rb") + f = (tmp_path / filename).open("rb") off = 0 cnt = 0 while off < file_st.st_size: @@ -988,11 +988,11 @@ async def handler(request): await client.close() -async def test_static_file_huge_cancel(aiohttp_client, tmpdir) -> None: +async def test_static_file_huge_cancel(aiohttp_client, tmp_path) -> None: filename = "huge_data.unknown_mime_type" # fill 100MB file - with tmpdir.join(filename).open("wb") as f: + with (tmp_path / filename).open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) @@ -1005,7 +1005,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmpdir.join(filename)))) + ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) return ret app = web.Application() @@ -1029,11 +1029,11 @@ async def handler(request): await client.close() -async def test_static_file_huge_error(aiohttp_client, tmpdir) -> None: +async def test_static_file_huge_error(aiohttp_client, tmp_path) -> None: filename = "huge_data.unknown_mime_type" # fill 20MB file - with tmpdir.join(filename).open("wb") as f: + with (tmp_path / filename).open("wb") as f: f.seek(20 * 1024 * 1024) f.write(b"1") @@ -1042,7 +1042,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmpdir.join(filename)))) + ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) return ret app = web.Application() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 3b92a10896c..264dafd00c0 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -525,13 +525,13 @@ async def post(self) -> web.Response: async def test_static_absolute_url( - aiohttp_client: AiohttpClient, tmpdir: pathlib.Path + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path ) -> None: # requested url is an absolute name like # /static/\\machine_name\c$ or /static/D:\path # where the static dir is totally different app = web.Application() - fname = tmpdir / "file.txt" + fname = tmp_path / "file.txt" fname.write_text("sample text", "ascii") here = pathlib.Path(__file__).parent app.router.add_static("/static", here) From 6018c7f6977d9dc01af86435eb0edfa5fd91578f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 28 Jan 2024 18:46:19 +0000 Subject: [PATCH 0072/1511] [PR #8079/1c335944 backport][3.10] Validate static paths (#8081) **This is a backport of PR #8079 as merged into master (1c335944d6a8b1298baf179b7c0b3069f10c514b).** --- CHANGES/8079.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 18 +++++-- docs/web_advanced.rst | 16 ++++-- docs/web_reference.rst | 12 +++-- tests/test_web_urldispatcher.py | 91 +++++++++++++++++++++++++++++++++ 5 files changed, 128 insertions(+), 10 deletions(-) create mode 100644 CHANGES/8079.bugfix.rst diff --git a/CHANGES/8079.bugfix.rst b/CHANGES/8079.bugfix.rst new file mode 100644 index 00000000000..57bc8bfebcc --- /dev/null +++ b/CHANGES/8079.bugfix.rst @@ -0,0 +1 @@ +Improved validation of paths for static resources -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index ea28f15e2b9..cb433e6c857 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -595,9 +595,14 @@ def url_for( # type: ignore[override] url = url / filename if append_version: + unresolved_path = self._directory.joinpath(filename) try: - filepath = self._directory.joinpath(filename).resolve() - if not self._follow_symlinks: + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + filepath = normalized_path.resolve() + else: + filepath = unresolved_path.resolve() filepath.relative_to(self._directory) except (ValueError, FileNotFoundError): # ValueError for case when path point to symlink @@ -662,8 +667,13 @@ async def _handle(self, request: Request) -> StreamResponse: # /static/\\machine_name\c$ or /static/D:\path # where the static dir is totally different raise HTTPForbidden() - filepath = self._directory.joinpath(filename).resolve() - if not self._follow_symlinks: + unresolved_path = self._directory.joinpath(filename) + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + filepath = normalized_path.resolve() + else: + filepath = unresolved_path.resolve() filepath.relative_to(self._directory) except (ValueError, FileNotFoundError) as error: # relatively safe diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 33c2ebf0736..3549a5c7e36 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -263,12 +263,22 @@ instead could be enabled with ``show_index`` parameter set to ``True``:: web.static('/prefix', path_to_static_folder, show_index=True) -When a symlink from the static directory is accessed, the server responses to -client with ``HTTP/404 Not Found`` by default. To allow the server to follow -symlinks, parameter ``follow_symlinks`` should be set to ``True``:: +When a symlink that leads outside the static directory is accessed, the server +responds to the client with ``HTTP/404 Not Found`` by default. To allow the server to +follow symlinks that lead outside the static root, the parameter ``follow_symlinks`` +should be set to ``True``:: web.static('/prefix', path_to_static_folder, follow_symlinks=True) +.. caution:: + + Enabling ``follow_symlinks`` can be a security risk, and may lead to + a directory transversal attack. You do NOT need this option to follow symlinks + which point to somewhere else within the static directory, this option is only + used to break out of the security sandbox. Enabling this option is highly + discouraged, and only expected to be used for edge cases in a local + development setting where remote users do not have access to the server. + When you want to enable cache busting, parameter ``append_version`` can be set to ``True`` diff --git a/docs/web_reference.rst b/docs/web_reference.rst index e38fb82863f..e0ebbae1851 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1875,9 +1875,15 @@ Application and Router by default it's not allowed and HTTP/403 will be returned on directory access. - :param bool follow_symlinks: flag for allowing to follow symlinks from - a directory, by default it's not allowed and - HTTP/404 will be returned on access. + :param bool follow_symlinks: flag for allowing to follow symlinks that lead + outside the static root directory, by default it's not allowed and + HTTP/404 will be returned on access. Enabling ``follow_symlinks`` + can be a security risk, and may lead to a directory transversal attack. + You do NOT need this option to follow symlinks which point to somewhere + else within the static directory, this option is only used to break out + of the security sandbox. Enabling this option is highly discouraged, + and only expected to be used for edge cases in a local development + setting where remote users do not have access to the server. :param bool append_version: flag for adding file version (hash) to the url query string, this value will diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 264dafd00c0..5a24b2ea30c 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -130,6 +130,97 @@ async def test_follow_symlink( assert (await r.text()) == data +async def test_follow_symlink_directory_traversal( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + # Tests that follow_symlinks does not allow directory transversal + data = "private" + + private_file = tmp_path / "private_file" + private_file.write_text(data) + + safe_path = tmp_path / "safe_dir" + safe_path.mkdir() + + app = web.Application() + + # Register global static route: + app.router.add_static("/", str(safe_path), follow_symlinks=True) + client = await aiohttp_client(app) + + await client.start_server() + # We need to use a raw socket to test this, as the client will normalize + # the path before sending it to the server. + reader, writer = await asyncio.open_connection(client.host, client.port) + writer.write(b"GET /../private_file HTTP/1.1\r\n\r\n") + response = await reader.readuntil(b"\r\n\r\n") + assert b"404 Not Found" in response + writer.close() + await writer.wait_closed() + await client.close() + + +async def test_follow_symlink_directory_traversal_after_normalization( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + # Tests that follow_symlinks does not allow directory transversal + # after normalization + # + # Directory structure + # |-- secret_dir + # | |-- private_file (should never be accessible) + # | |-- symlink_target_dir + # | |-- symlink_target_file (should be accessible via the my_symlink symlink) + # | |-- sandbox_dir + # | |-- my_symlink -> symlink_target_dir + # + secret_path = tmp_path / "secret_dir" + secret_path.mkdir() + + # This file is below the symlink target and should not be reachable + private_file = secret_path / "private_file" + private_file.write_text("private") + + symlink_target_path = secret_path / "symlink_target_dir" + symlink_target_path.mkdir() + + sandbox_path = symlink_target_path / "sandbox_dir" + sandbox_path.mkdir() + + # This file should be reachable via the symlink + symlink_target_file = symlink_target_path / "symlink_target_file" + symlink_target_file.write_text("readable") + + my_symlink_path = sandbox_path / "my_symlink" + pathlib.Path(str(my_symlink_path)).symlink_to(str(symlink_target_path), True) + + app = web.Application() + + # Register global static route: + app.router.add_static("/", str(sandbox_path), follow_symlinks=True) + client = await aiohttp_client(app) + + await client.start_server() + # We need to use a raw socket to test this, as the client will normalize + # the path before sending it to the server. + reader, writer = await asyncio.open_connection(client.host, client.port) + writer.write(b"GET /my_symlink/../private_file HTTP/1.1\r\n\r\n") + response = await reader.readuntil(b"\r\n\r\n") + assert b"404 Not Found" in response + writer.close() + await writer.wait_closed() + + reader, writer = await asyncio.open_connection(client.host, client.port) + writer.write(b"GET /my_symlink/symlink_target_file HTTP/1.1\r\n\r\n") + response = await reader.readuntil(b"\r\n\r\n") + assert b"200 OK" in response + response = await reader.readuntil(b"readable") + assert response == b"readable" + writer.close() + await writer.wait_closed() + await client.close() + + @pytest.mark.parametrize( "dir_name,filename,data", [ From a960eb6ebecd88f866ac0d4907c73271c91e667a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Sun, 28 Jan 2024 22:58:07 +0100 Subject: [PATCH 0073/1511] [PR #3957/79fe2045 backport][3.10] Improve test suite handling of paths, temp files (#8083) **This is a backport of PR #3957 as merged into master (79fe204522ecf91e9c1cf1a3547c03f821106a74).** * Improve test suite handling of paths, temp files This updates most uses of `os.path` to instead use `pathlib.Path`. Relatedly, and following up from #3955 (which replaced pytest's `tmpdir` fixture with `tmp_path`), this removes most ad-hoc tempfile creation in favor of the `tmp_path` fixture. Following conversion, unnecessary `os` and `tempfile` imports were removed. Most pathlib changes involve straightforward changes from `os` functions such as `os.mkdir` or `os.path.abspath` to their equivalent methods in `pathlib.Path`. Changing ad-hoc temporary path to `tmp_path` involved removing the `tmp_dir_path` fixture and replacing its functionality with `tmp_path` in `test_save_load` and `test_guess_filename_with_tempfile`. On `test_static_route_user_home` function: * I think that the intention of this test is to ensure that aiohttp correctly expands the home path if passed in a string. I refactored it to `pathlib.Path` and cut out duplication of `relative_to()` calls. But if it's not doing anything but expanding `~`, then it's testing the functionality of `pathlib.Path`, not aiohttp. On `unix_sockname` fixture: This fixture uses `tempfile.TemporaryDirectory`. Because it's a somewhat complicated fixture used across multiple test modules, I left it as-is for now. On `str(tmp_path)` and even `pathlib.Path(str(tmp_path))`: pytest uses `pathlib2` to provide `tmp_path` for Python 3.5 (only). This is mostly fine but it fails on a couple of corner cases, such as `os.symlink()` which blocks all but `str` and `PurePath` via isinstance type checking. In several cases, this requires conversion to string or conversion to string and then into `pathlib.Path` to maintain code compatibility. See: pytest-dev/pytest/issues/5017 * Correct test_guess_filename to use file object * Update symlink in tests; more guess_filename tests (cherry picked from commit 79fe204522ecf91e9c1cf1a3547c03f821106a74) ## What do these changes do? This updates most uses of `os.path` to instead use `pathlib.Path`. Relatedly, and following up from #3955 (which replaced pytest's `tmpdir` fixture with `tmp_path`), this removes most ad-hoc tempfile creation in favor of the `tmp_path` fixture. Following conversion, unnecessary `os` and `tempfile` imports were removed. Most pathlib changes involve straightforward changes from `os` functions such as `os.mkdir` or `os.path.abspath` to their equivalent methods in `pathlib.Path`. Changing ad-hoc temporary path to `tmp_path` involved removing the `tmp_dir_path` fixture and replacing its functionality with `tmp_path` in `test_save_load` and `test_guess_filename_with_tempfile`. On `test_static_route_user_home` function: * I think that the intention of this test is to ensure that aiohttp correctly expands the home path if passed in a string. I refactored it to `pathlib.Path` and cut out duplication of `relative_to()` calls. But if it's not doing anything but expanding `~`, then it's testing the functionality of `pathlib.Path`, not aiohttp. On `unix_sockname` fixture: This fixture uses `tempfile.TemporaryDirectory`. Because it's a somewhat complicated fixture used across multiple test modules, I left it as-is for now. On `str(tmp_path)` and even `pathlib.Path(str(tmp_path))`: pytest uses `pathlib2` to provide `tmp_path` for Python 3.5 (only). This is mostly fine but it fails on a couple of corner cases, such as `os.symlink()` which blocks all but `str` and `PurePath` via isinstance type checking. In several cases, this requires conversion to string or conversion to string and then into `pathlib.Path` to maintain code compatibility. See: pytest-dev/pytest/issues/5017 ## Are there changes in behavior for the user? These changes only affect the test suite and have no impact on the end user. ## Related issue number This is intended to address discussion following the simplistic changes from tmpdir to tmp_path of #3955. ## Checklist - [X] I think the code is well written - [X] Unit tests for the changes exist - [X] Documentation reflects the changes - [X] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [X] Add a new news fragment into the `CHANGES` folder * name it `.` for example (588.bugfix) * if you don't have an `issue_id` change it to the pr id after creating the pr * ensure type is one of the following: * `.feature`: Signifying a new feature. * `.bugfix`: Signifying a bug fix. * `.doc`: Signifying a documentation improvement. * `.removal`: Signifying a deprecation or removal of public API. * `.misc`: A ticket has been closed, but it is not of interest to users. * Make sure to use full sentences with correct case and punctuation, for example: "Fix issue with non-ascii contents in doctest text files." Co-authored-by: Matt VanEseltine --- CHANGES/3957.misc | 1 + tests/test_client_request.py | 21 ++--- tests/test_cookiejar.py | 8 +- tests/test_helpers.py | 15 +++- tests/test_multipart.py | 7 +- tests/test_proxy_functional.py | 6 +- tests/test_urldispatch.py | 70 ++++++++-------- tests/test_web_sendfile_functional.py | 36 ++++---- tests/test_web_urldispatcher.py | 113 ++++++++++---------------- tools/check_changes.py | 2 +- 10 files changed, 130 insertions(+), 149 deletions(-) create mode 100644 CHANGES/3957.misc diff --git a/CHANGES/3957.misc b/CHANGES/3957.misc new file mode 100644 index 00000000000..b4f9f58edb9 --- /dev/null +++ b/CHANGES/3957.misc @@ -0,0 +1 @@ +Improve test suite handling of paths and temp files to consistently use pathlib and pytest fixtures. diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f8107ffad88..c54e1828e34 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1,7 +1,7 @@ import asyncio import hashlib import io -import os.path +import pathlib import urllib.parse import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie @@ -921,12 +921,11 @@ async def test_chunked_transfer_encoding(loop, conn) -> None: async def test_file_upload_not_chunked(loop) -> None: - here = os.path.dirname(__file__) - fname = os.path.join(here, "aiohttp.png") - with open(fname, "rb") as f: + file_path = pathlib.Path(__file__).parent / "aiohttp.png" + with file_path.open("rb") as f: req = ClientRequest("post", URL("http://python.org/"), data=f, loop=loop) assert not req.chunked - assert req.headers["CONTENT-LENGTH"] == str(os.path.getsize(fname)) + assert req.headers["CONTENT-LENGTH"] == str(file_path.stat().st_size) await req.close() @@ -947,19 +946,17 @@ async def test_precompressed_data_stays_intact(loop) -> None: async def test_file_upload_not_chunked_seek(loop) -> None: - here = os.path.dirname(__file__) - fname = os.path.join(here, "aiohttp.png") - with open(fname, "rb") as f: + file_path = pathlib.Path(__file__).parent / "aiohttp.png" + with file_path.open("rb") as f: f.seek(100) req = ClientRequest("post", URL("http://python.org/"), data=f, loop=loop) - assert req.headers["CONTENT-LENGTH"] == str(os.path.getsize(fname) - 100) + assert req.headers["CONTENT-LENGTH"] == str(file_path.stat().st_size - 100) await req.close() async def test_file_upload_force_chunked(loop) -> None: - here = os.path.dirname(__file__) - fname = os.path.join(here, "aiohttp.png") - with open(fname, "rb") as f: + file_path = pathlib.Path(__file__).parent / "aiohttp.png" + with file_path.open("rb") as f: req = ClientRequest( "post", URL("http://python.org/"), data=f, chunked=True, loop=loop ) diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 261dbecd992..91352f50c3d 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -1,9 +1,8 @@ import asyncio import datetime import itertools -import os +import pathlib import pickle -import tempfile import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie from unittest import mock @@ -178,8 +177,8 @@ async def test_constructor_with_expired( assert jar._loop is loop -async def test_save_load(loop, cookies_to_send, cookies_to_receive) -> None: - file_path = tempfile.mkdtemp() + "/aiohttp.test.cookie" +async def test_save_load(tmp_path, loop, cookies_to_send, cookies_to_receive) -> None: + file_path = pathlib.Path(str(tmp_path)) / "aiohttp.test.cookie" # export cookie jar jar_save = CookieJar(loop=loop) @@ -193,7 +192,6 @@ async def test_save_load(loop, cookies_to_send, cookies_to_receive) -> None: for cookie in jar_load: jar_test[cookie.key] = cookie - os.unlink(file_path) assert jar_test == cookies_to_receive diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 390d2390065..b59528d3468 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -4,7 +4,6 @@ import gc import platform import sys -import tempfile import weakref from math import ceil, modf from pathlib import Path @@ -73,11 +72,21 @@ def test_parse_mimetype(mimetype, expected) -> None: # ------------------- guess_filename ---------------------------------- -def test_guess_filename_with_tempfile() -> None: - with tempfile.TemporaryFile() as fp: +def test_guess_filename_with_file_object(tmp_path) -> None: + file_path = tmp_path / "test_guess_filename" + with file_path.open("w+b") as fp: assert helpers.guess_filename(fp, "no-throw") is not None +def test_guess_filename_with_path(tmp_path) -> None: + file_path = tmp_path / "test_guess_filename" + assert helpers.guess_filename(file_path, "no-throw") is not None + + +def test_guess_filename_with_default() -> None: + assert helpers.guess_filename(None, "no-throw") == "no-throw" + + # ------------------- BasicAuth ----------------------------------- diff --git a/tests/test_multipart.py b/tests/test_multipart.py index c68ba2dd6ff..f9d130e7949 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1,6 +1,7 @@ import asyncio import io import json +import pathlib import zlib from unittest import mock @@ -1270,7 +1271,7 @@ async def test_write_preserves_content_disposition(self, buf, stream) -> None: async def test_preserve_content_disposition_header(self, buf, stream): # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 - with open(__file__, "rb") as fobj: + with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: part = writer.append( fobj, @@ -1297,7 +1298,7 @@ async def test_preserve_content_disposition_header(self, buf, stream): async def test_set_content_disposition_override(self, buf, stream): # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 - with open(__file__, "rb") as fobj: + with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: part = writer.append( fobj, @@ -1324,7 +1325,7 @@ async def test_set_content_disposition_override(self, buf, stream): async def test_reset_content_disposition_header(self, buf, stream): # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 - with open(__file__, "rb") as fobj: + with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: part = writer.append( fobj, diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index f199404f159..099922ac77f 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -731,7 +731,7 @@ async def test_proxy_from_env_http_with_auth_from_netrc( auth.login, auth.password, ) - with open(str(netrc_file), "w") as f: + with netrc_file.open("w") as f: f.write(netrc_file_data) mocker.patch.dict( os.environ, {"http_proxy": str(proxy.url), "NETRC": str(netrc_file)} @@ -757,7 +757,7 @@ async def test_proxy_from_env_http_without_auth_from_netrc( auth.login, auth.password, ) - with open(str(netrc_file), "w") as f: + with netrc_file.open("w") as f: f.write(netrc_file_data) mocker.patch.dict( os.environ, {"http_proxy": str(proxy.url), "NETRC": str(netrc_file)} @@ -780,7 +780,7 @@ async def test_proxy_from_env_http_without_auth_from_wrong_netrc( auth = aiohttp.BasicAuth("user", "pass") netrc_file = tmp_path / "test_netrc" invalid_data = f"machine 127.0.0.1 {auth.login} pass {auth.password}" - with open(str(netrc_file), "w") as f: + with netrc_file.open("w") as f: f.write(invalid_data) mocker.patch.dict( diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index bf15588bb13..6a656104fd2 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,4 +1,3 @@ -import os import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized @@ -49,7 +48,7 @@ def fill_routes(router): def go(): route1 = router.add_route("GET", "/plain", make_handler()) route2 = router.add_route("GET", "/variable/{name}", make_handler()) - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) return [route1, route2] + list(resource) return go @@ -342,7 +341,7 @@ def test_route_dynamic(router) -> None: def test_add_static(router) -> None: resource = router.add_static( - "/st", os.path.dirname(aiohttp.__file__), name="static" + "/st", pathlib.Path(aiohttp.__file__).parent, name="static" ) assert router["static"] is resource url = resource.url_for(filename="/dir/a.txt") @@ -351,7 +350,7 @@ def test_add_static(router) -> None: def test_add_static_append_version(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="/data.unknown_mime_type", append_version=True) expect_url = ( "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" @@ -361,7 +360,7 @@ def test_add_static_append_version(router) -> None: def test_add_static_append_version_set_from_constructor(router) -> None: resource = router.add_static( - "/st", os.path.dirname(__file__), append_version=True, name="static" + "/st", pathlib.Path(__file__).parent, append_version=True, name="static" ) url = resource.url_for(filename="/data.unknown_mime_type") expect_url = ( @@ -372,7 +371,7 @@ def test_add_static_append_version_set_from_constructor(router) -> None: def test_add_static_append_version_override_constructor(router) -> None: resource = router.add_static( - "/st", os.path.dirname(__file__), append_version=True, name="static" + "/st", pathlib.Path(__file__).parent, append_version=True, name="static" ) url = resource.url_for(filename="/data.unknown_mime_type", append_version=False) expect_url = "/st/data.unknown_mime_type" @@ -380,7 +379,7 @@ def test_add_static_append_version_override_constructor(router) -> None: def test_add_static_append_version_filename_without_slash(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="data.unknown_mime_type", append_version=True) expect_url = ( "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" @@ -389,27 +388,26 @@ def test_add_static_append_version_filename_without_slash(router) -> None: def test_add_static_append_version_non_exists_file(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="/non_exists_file", append_version=True) assert "/st/non_exists_file" == str(url) def test_add_static_append_version_non_exists_file_without_slash(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="non_exists_file", append_version=True) assert "/st/non_exists_file" == str(url) -def test_add_static_append_version_follow_symlink(router, tmpdir) -> None: +def test_add_static_append_version_follow_symlink(router, tmp_path) -> None: # Tests the access to a symlink, in static folder with apeend_version - tmp_dir_path = str(tmpdir) - symlink_path = os.path.join(tmp_dir_path, "append_version_symlink") - symlink_target_path = os.path.dirname(__file__) - os.symlink(symlink_target_path, symlink_path, True) + symlink_path = tmp_path / "append_version_symlink" + symlink_target_path = pathlib.Path(__file__).parent + pathlib.Path(str(symlink_path)).symlink_to(str(symlink_target_path), True) # Register global static route: resource = router.add_static( - "/st", tmp_dir_path, follow_symlinks=True, append_version=True + "/st", str(tmp_path), follow_symlinks=True, append_version=True ) url = resource.url_for(filename="/append_version_symlink/data.unknown_mime_type") @@ -421,16 +419,16 @@ def test_add_static_append_version_follow_symlink(router, tmpdir) -> None: assert expect_url == str(url) -def test_add_static_append_version_not_follow_symlink(router, tmpdir) -> None: +def test_add_static_append_version_not_follow_symlink(router, tmp_path) -> None: # Tests the access to a symlink, in static folder with apeend_version - tmp_dir_path = str(tmpdir) - symlink_path = os.path.join(tmp_dir_path, "append_version_symlink") - symlink_target_path = os.path.dirname(__file__) - os.symlink(symlink_target_path, symlink_path, True) + symlink_path = tmp_path / "append_version_symlink" + symlink_target_path = pathlib.Path(__file__).parent + + pathlib.Path(str(symlink_path)).symlink_to(str(symlink_target_path), True) # Register global static route: resource = router.add_static( - "/st", tmp_dir_path, follow_symlinks=False, append_version=True + "/st", str(tmp_path), follow_symlinks=False, append_version=True ) filename = "/append_version_symlink/data.unknown_mime_type" @@ -467,7 +465,7 @@ def test_dynamic_not_match(router) -> None: async def test_static_not_match(router) -> None: - router.add_static("/pre", os.path.dirname(aiohttp.__file__), name="name") + router.add_static("/pre", pathlib.Path(aiohttp.__file__).parent, name="name") resource = router["name"] ret = await resource.resolve(make_mocked_request("GET", "/another/path")) assert (None, set()) == ret @@ -503,17 +501,17 @@ def test_contains(router) -> None: def test_static_repr(router) -> None: - router.add_static("/get", os.path.dirname(aiohttp.__file__), name="name") + router.add_static("/get", pathlib.Path(aiohttp.__file__).parent, name="name") assert Matches(r" None: - route = router.add_static("/prefix", os.path.dirname(aiohttp.__file__)) + route = router.add_static("/prefix", pathlib.Path(aiohttp.__file__).parent) assert "/prefix" == route._prefix def test_static_remove_trailing_slash(router) -> None: - route = router.add_static("/prefix/", os.path.dirname(aiohttp.__file__)) + route = router.add_static("/prefix/", pathlib.Path(aiohttp.__file__).parent) assert "/prefix" == route._prefix @@ -778,7 +776,7 @@ def test_named_resources(router) -> None: route1 = router.add_route("GET", "/plain", make_handler(), name="route1") route2 = router.add_route("GET", "/variable/{name}", make_handler(), name="route2") route3 = router.add_static( - "/static", os.path.dirname(aiohttp.__file__), name="route3" + "/static", pathlib.Path(aiohttp.__file__).parent, name="route3" ) names = {route1.name, route2.name, route3.name} @@ -943,11 +941,11 @@ def test_resources_abc(router) -> None: def test_static_route_user_home(router) -> None: here = pathlib.Path(aiohttp.__file__).parent - home = pathlib.Path(os.path.expanduser("~")) - if not str(here).startswith(str(home)): # pragma: no cover + try: + static_dir = pathlib.Path("~") / here.relative_to(pathlib.Path.home()) + except ValueError: # pragma: no cover pytest.skip("aiohttp folder is not placed in user's HOME") - static_dir = "~/" + str(here.relative_to(home)) - route = router.add_static("/st", static_dir) + route = router.add_static("/st", str(static_dir)) assert here == route.get_info()["directory"] @@ -958,13 +956,13 @@ def test_static_route_points_to_file(router) -> None: async def test_404_for_static_resource(router) -> None: - resource = router.add_static("/st", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/st", pathlib.Path(aiohttp.__file__).parent) ret = await resource.resolve(make_mocked_request("GET", "/unknown/path")) assert (None, set()) == ret async def test_405_for_resource_adapter(router) -> None: - resource = router.add_static("/st", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/st", pathlib.Path(aiohttp.__file__).parent) ret = await resource.resolve(make_mocked_request("POST", "/st/abc.py")) assert (None, {"HEAD", "GET"}) == ret @@ -979,12 +977,12 @@ async def test_check_allowed_method_for_found_resource(router) -> None: def test_url_for_in_static_resource(router) -> None: - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) assert URL("/static/file.txt") == resource.url_for(filename="file.txt") def test_url_for_in_static_resource_pathlib(router) -> None: - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) assert URL("/static/file.txt") == resource.url_for( filename=pathlib.Path("file.txt") ) @@ -1163,7 +1161,7 @@ def test_frozen_app_on_subapp(app) -> None: def test_set_options_route(router) -> None: - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) options = None for route in resource: if route.method == "OPTIONS": @@ -1233,7 +1231,7 @@ def test_dynamic_resource_canonical() -> None: def test_static_resource_canonical() -> None: prefix = "/prefix" - directory = str(os.path.dirname(aiohttp.__file__)) + directory = str(pathlib.Path(aiohttp.__file__).parent) canonical = prefix res = StaticResource(prefix=prefix, directory=directory) assert res.canonical == canonical diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index b044f29bc81..d67d67743ba 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,5 +1,4 @@ import asyncio -import os import pathlib import socket import zlib @@ -503,7 +502,7 @@ async def test_static_file_ssl( aiohttp_client, client_ssl_ctx, ) -> None: - dirname = os.path.dirname(__file__) + dirname = pathlib.Path(__file__).parent filename = "data.unknown_mime_type" app = web.Application() app.router.add_static("/static", dirname) @@ -524,9 +523,10 @@ async def test_static_file_ssl( async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: - dirname = os.path.dirname(__file__) + dirname = pathlib.Path(__file__).parent relpath = "../README.rst" - assert os.path.isfile(os.path.join(dirname, relpath)) + full_path = dirname / relpath + assert full_path.is_file() app = web.Application() app.router.add_static("/static", dirname) @@ -541,7 +541,7 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: assert 404 == resp.status await resp.release() - url_abspath = "/static/" + os.path.abspath(os.path.join(dirname, relpath)) + url_abspath = "/static/" + str(full_path.resolve()) resp = await client.get(url_abspath) assert 403 == resp.status await resp.release() @@ -550,36 +550,36 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: def test_static_route_path_existence_check() -> None: - directory = os.path.dirname(__file__) + directory = pathlib.Path(__file__).parent web.StaticResource("/", directory) - nodirectory = os.path.join(directory, "nonexistent-uPNiOEAg5d") + nodirectory = directory / "nonexistent-uPNiOEAg5d" with pytest.raises(ValueError): web.StaticResource("/", nodirectory) async def test_static_file_huge(aiohttp_client, tmp_path) -> None: - filename = "huge_data.unknown_mime_type" + file_path = tmp_path / "huge_data.unknown_mime_type" # fill 20MB file - with (tmp_path / filename).open("wb") as f: + with file_path.open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) - file_st = os.stat(str(tmp_path / filename)) + file_st = file_path.stat() app = web.Application() app.router.add_static("/static", str(tmp_path)) client = await aiohttp_client(app) - resp = await client.get("/static/" + filename) + resp = await client.get("/static/" + file_path.name) assert 200 == resp.status ct = resp.headers["CONTENT-TYPE"] assert "application/octet-stream" == ct assert resp.headers.get("CONTENT-ENCODING") is None assert int(resp.headers.get("CONTENT-LENGTH")) == file_st.st_size - f = (tmp_path / filename).open("rb") + f = file_path.open("rb") off = 0 cnt = 0 while off < file_st.st_size: @@ -989,10 +989,10 @@ async def handler(request): async def test_static_file_huge_cancel(aiohttp_client, tmp_path) -> None: - filename = "huge_data.unknown_mime_type" + file_path = tmp_path / "huge_data.unknown_mime_type" # fill 100MB file - with (tmp_path / filename).open("wb") as f: + with file_path.open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) @@ -1005,7 +1005,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) + ret = web.FileResponse(file_path) return ret app = web.Application() @@ -1030,10 +1030,10 @@ async def handler(request): async def test_static_file_huge_error(aiohttp_client, tmp_path) -> None: - filename = "huge_data.unknown_mime_type" + file_path = tmp_path / "huge_data.unknown_mime_type" # fill 20MB file - with (tmp_path / filename).open("wb") as f: + with file_path.open("wb") as f: f.seek(20 * 1024 * 1024) f.write(b"1") @@ -1042,7 +1042,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) + ret = web.FileResponse(file_path) return ret app = web.Application() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 5a24b2ea30c..7e8fe53165d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,9 +1,6 @@ import asyncio import functools -import os import pathlib -import shutil -import tempfile from typing import Optional from unittest import mock from unittest.mock import MagicMock @@ -16,24 +13,6 @@ from aiohttp.web_urldispatcher import Resource, SystemRoute -@pytest.fixture(scope="function") -def tmp_dir_path(request): - """ - Give a path for a temporary directory - - The directory is destroyed at the end of the test. - """ - # Temporary directory. - tmp_dir = tempfile.mkdtemp() - - def teardown(): - # Delete the whole directory: - shutil.rmtree(tmp_dir) - - request.addfinalizer(teardown) - return tmp_dir - - @pytest.mark.parametrize( "show_index,status,prefix,data", [ @@ -63,7 +42,7 @@ def teardown(): ], ) async def test_access_root_of_static_handler( - tmp_dir_path: pathlib.Path, + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, show_index: bool, status: int, @@ -74,22 +53,22 @@ async def test_access_root_of_static_handler( # Try to access the root of static file server, and make # sure that correct HTTP statuses are returned depending if we directory # index should be shown or not. - # Put a file inside tmp_dir_path: - my_file_path = os.path.join(tmp_dir_path, "my_file") - with open(my_file_path, "w") as fw: - fw.write("hello") + # Put a file inside tmp_path: + my_file = tmp_path / "my_file" + my_dir = tmp_path / "my_dir" + my_dir.mkdir() + my_file_in_dir = my_dir / "my_file_in_dir" - my_dir_path = os.path.join(tmp_dir_path, "my_dir") - os.mkdir(my_dir_path) + with my_file.open("w") as fw: + fw.write("hello") - my_file_path = os.path.join(my_dir_path, "my_file_in_dir") - with open(my_file_path, "w") as fw: + with my_file_in_dir.open("w") as fw: fw.write("world") app = web.Application() # Register global static route: - app.router.add_static(prefix, tmp_dir_path, show_index=show_index) + app.router.add_static(prefix, str(tmp_path), show_index=show_index) client = await aiohttp_client(app) # Request the root of the static directory. @@ -103,25 +82,25 @@ async def test_access_root_of_static_handler( async def test_follow_symlink( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the access to a symlink, in static folder data = "hello world" - my_dir_path = os.path.join(tmp_dir_path, "my_dir") - os.mkdir(my_dir_path) + my_dir_path = tmp_path / "my_dir" + my_dir_path.mkdir() - my_file_path = os.path.join(my_dir_path, "my_file_in_dir") - with open(my_file_path, "w") as fw: + my_file_path = my_dir_path / "my_file_in_dir" + with my_file_path.open("w") as fw: fw.write(data) - my_symlink_path = os.path.join(tmp_dir_path, "my_symlink") - os.symlink(my_dir_path, my_symlink_path) + my_symlink_path = tmp_path / "my_symlink" + pathlib.Path(str(my_symlink_path)).symlink_to(str(my_dir_path), True) app = web.Application() # Register global static route: - app.router.add_static("/", tmp_dir_path, follow_symlinks=True) + app.router.add_static("/", str(tmp_path), follow_symlinks=True) client = await aiohttp_client(app) # Request the root of the static directory. @@ -229,7 +208,7 @@ async def test_follow_symlink_directory_traversal_after_normalization( ], ) async def test_access_to_the_file_with_spaces( - tmp_dir_path: pathlib.Path, + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, dir_name: str, filename: str, @@ -237,21 +216,19 @@ async def test_access_to_the_file_with_spaces( ) -> None: # Checks operation of static files with spaces - my_dir_path = os.path.join(tmp_dir_path, dir_name) - - if dir_name: - os.mkdir(my_dir_path) - - my_file_path = os.path.join(my_dir_path, filename) + my_dir_path = tmp_path / dir_name + if my_dir_path != tmp_path: + my_dir_path.mkdir() - with open(my_file_path, "w") as fw: + my_file_path = my_dir_path / filename + with my_file_path.open("w") as fw: fw.write(data) app = web.Application() - url = os.path.join("/", dir_name, filename) + url = "/" + str(pathlib.Path(dir_name, filename)) - app.router.add_static("/", tmp_dir_path) + app.router.add_static("/", str(tmp_path)) client = await aiohttp_client(app) r = await client.get(url) @@ -260,7 +237,7 @@ async def test_access_to_the_file_with_spaces( async def test_access_non_existing_resource( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests accessing non-existing resource # Try to access a non-exiting resource and make sure that 404 HTTP status @@ -268,7 +245,7 @@ async def test_access_non_existing_resource( app = web.Application() # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. @@ -322,13 +299,13 @@ def sync_handler(request): async def test_unauthorized_folder_access( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the unauthorized access to a folder of static file server. # Try to list a folder content of static file server when server does not # have permissions to do so for the folder. - my_dir_path = os.path.join(tmp_dir_path, "my_dir") - os.mkdir(my_dir_path) + my_dir = tmp_path / "my_dir" + my_dir.mkdir() app = web.Application() @@ -340,34 +317,34 @@ async def test_unauthorized_folder_access( path_constructor.return_value = path # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. - r = await client.get("/my_dir") + r = await client.get("/" + my_dir.name) assert r.status == 403 async def test_access_symlink_loop( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the access to a looped symlink, which could not be resolved. - my_dir_path = os.path.join(tmp_dir_path, "my_symlink") - os.symlink(my_dir_path, my_dir_path) + my_dir_path = tmp_path / "my_symlink" + pathlib.Path(str(my_dir_path)).symlink_to(str(my_dir_path), True) app = web.Application() # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. - r = await client.get("/my_symlink") + r = await client.get("/" + my_dir_path.name) assert r.status == 404 async def test_access_special_resource( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the access to a resource that is neither a file nor a directory. # Checks that if a special resource is accessed (f.e. named pipe or UNIX @@ -387,7 +364,7 @@ async def test_access_special_resource( path_constructor.return_value = path # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. @@ -622,20 +599,20 @@ async def test_static_absolute_url( # /static/\\machine_name\c$ or /static/D:\path # where the static dir is totally different app = web.Application() - fname = tmp_path / "file.txt" - fname.write_text("sample text", "ascii") + file_path = tmp_path / "file.txt" + file_path.write_text("sample text", "ascii") here = pathlib.Path(__file__).parent app.router.add_static("/static", here) client = await aiohttp_client(app) - resp = await client.get("/static/" + str(fname)) + resp = await client.get("/static/" + str(file_path.resolve())) assert resp.status == 403 async def test_for_issue_5250( - aiohttp_client: AiohttpClient, tmp_dir_path: pathlib.Path + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path ) -> None: app = web.Application() - app.router.add_static("/foo", tmp_dir_path) + app.router.add_static("/foo", tmp_path) async def get_foobar(request: web.Request) -> web.Response: return web.Response(body="success!") diff --git a/tools/check_changes.py b/tools/check_changes.py index 118d1182b9a..6cc4d050cd8 100755 --- a/tools/check_changes.py +++ b/tools/check_changes.py @@ -22,7 +22,7 @@ def get_root(script_path): - folder = script_path.absolute().parent + folder = script_path.resolve().parent while not (folder / ".git").exists(): folder = folder.parent if folder == folder.anchor: From 0124dc58f4a06a7316925e4c10e00033cabe1d2d Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 28 Jan 2024 22:04:09 +0000 Subject: [PATCH 0074/1511] Merge 3.9 --- CHANGES.rst | 197 +++++++++++++++++++++++++++++++++++++ CHANGES/3551.misc | 1 - CHANGES/7698.feature | 1 - CHANGES/7916.doc | 1 - CHANGES/7978.bugfix | 1 - CHANGES/7995.doc | 1 - CHANGES/8012.bugfix | 1 - CHANGES/8014.bugfix | 1 - CHANGES/8021.bugfix | 1 - CHANGES/8066.contrib.rst | 21 ---- CHANGES/8066.packaging.rst | 1 - CHANGES/8067.doc.rst | 3 - CHANGES/8074.bugfix.rst | 5 - CHANGES/8079.bugfix.rst | 1 - 14 files changed, 197 insertions(+), 39 deletions(-) delete mode 100644 CHANGES/3551.misc delete mode 100644 CHANGES/7698.feature delete mode 100644 CHANGES/7916.doc delete mode 100644 CHANGES/7978.bugfix delete mode 100644 CHANGES/7995.doc delete mode 100644 CHANGES/8012.bugfix delete mode 100644 CHANGES/8014.bugfix delete mode 100644 CHANGES/8021.bugfix delete mode 100644 CHANGES/8066.contrib.rst delete mode 120000 CHANGES/8066.packaging.rst delete mode 100644 CHANGES/8067.doc.rst delete mode 100644 CHANGES/8074.bugfix.rst delete mode 100644 CHANGES/8079.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 8c2a2707408..84b0b5b7a99 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,203 @@ .. towncrier release notes start +======= +3.9.2 (2024-01-28) +================== + +Bug fixes +--------- + +- Fixed server-side websocket connection leak. + + + *Related issues and pull requests on GitHub:* + :issue:`7978`. + + + +- Fixed ``web.FileResponse`` doing blocking I/O in the event loop. + + + *Related issues and pull requests on GitHub:* + :issue:`8012`. + + + +- Fixed double compress when compression enabled and compressed file exists in server file responses. + + + *Related issues and pull requests on GitHub:* + :issue:`8014`. + + + +- Added runtime type check for ``ClientSession`` ``timeout`` parameter. + + + *Related issues and pull requests on GitHub:* + :issue:`8021`. + + + +- Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. + Invalid header field names containing question mark or slash are now rejected. + Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. + + + *Related issues and pull requests on GitHub:* + :issue:`8074`. + + + +- Improved validation of paths for static resources requests to the server -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8079`. + + + + +Features +-------- + +- Added support for passing :py:data:`True` to ``ssl`` parameter in ``ClientSession`` while + deprecating :py:data:`None` -- by :user:`xiangyan99`. + + + *Related issues and pull requests on GitHub:* + :issue:`7698`. + + + +Breaking changes +---------------- + +- Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. + Invalid header field names containing question mark or slash are now rejected. + Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. + + + *Related issues and pull requests on GitHub:* + :issue:`8074`. + + + + +Improved documentation +---------------------- + +- Fixed examples of ``fallback_charset_resolver`` function in the :doc:`client_advanced` document. -- by :user:`henry0312`. + + + *Related issues and pull requests on GitHub:* + :issue:`7995`. + + + +- The Sphinx setup was updated to avoid showing the empty + changelog draft section in the tagged release documentation + builds on Read The Docs -- by :user:`webknjaz`. + + + *Related issues and pull requests on GitHub:* + :issue:`8067`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- The changelog categorization was made clearer. The + contributors can now mark their fragment files more + accurately -- by :user:`webknjaz`. + + The new category tags are: + + * ``bugfix`` + + * ``feature`` + + * ``deprecation`` + + * ``breaking`` (previously, ``removal``) + + * ``doc`` + + * ``packaging`` + + * ``contrib`` + + * ``misc`` + + + *Related issues and pull requests on GitHub:* + :issue:`8066`. + + + + +Contributor-facing changes +-------------------------- + +- Updated :ref:`contributing/Tests coverage ` section to show how we use ``codecov`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`7916`. + + + +- The changelog categorization was made clearer. The + contributors can now mark their fragment files more + accurately -- by :user:`webknjaz`. + + The new category tags are: + + * ``bugfix`` + + * ``feature`` + + * ``deprecation`` + + * ``breaking`` (previously, ``removal``) + + * ``doc`` + + * ``packaging`` + + * ``contrib`` + + * ``misc`` + + + *Related issues and pull requests on GitHub:* + :issue:`8066`. + + + + +Miscellaneous internal changes +------------------------------ + +- Replaced all ``tmpdir`` fixtures with ``tmp_path`` in test suite. + + + *Related issues and pull requests on GitHub:* + :issue:`3551`. + + + + +---- + + 3.9.1 (2023-11-26) ================== diff --git a/CHANGES/3551.misc b/CHANGES/3551.misc deleted file mode 100644 index 63965c14821..00000000000 --- a/CHANGES/3551.misc +++ /dev/null @@ -1 +0,0 @@ -Replace all tmpdir fixtures with tmp_path in test suite. diff --git a/CHANGES/7698.feature b/CHANGES/7698.feature deleted file mode 100644 index e8c4b3fb452..00000000000 --- a/CHANGES/7698.feature +++ /dev/null @@ -1 +0,0 @@ -Added support for passing `True` to `ssl` while deprecating `None`. -- by :user:`xiangyan99` diff --git a/CHANGES/7916.doc b/CHANGES/7916.doc deleted file mode 100644 index b616ae85bbe..00000000000 --- a/CHANGES/7916.doc +++ /dev/null @@ -1 +0,0 @@ -Updated :ref:`contributing/Tests coverage ` section to show how we use ``codecov`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/7978.bugfix b/CHANGES/7978.bugfix deleted file mode 100644 index 3c7dc096ca7..00000000000 --- a/CHANGES/7978.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix websocket connection leak diff --git a/CHANGES/7995.doc b/CHANGES/7995.doc deleted file mode 100644 index 70e3dfa5469..00000000000 --- a/CHANGES/7995.doc +++ /dev/null @@ -1 +0,0 @@ -Fix examples of `fallback_charset_resolver` function in client_advanced documentation. -- by :user:`henry0312` diff --git a/CHANGES/8012.bugfix b/CHANGES/8012.bugfix deleted file mode 100644 index f5187075f3f..00000000000 --- a/CHANGES/8012.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix `web.FileResponse` doing blocking I/O in the event loop diff --git a/CHANGES/8014.bugfix b/CHANGES/8014.bugfix deleted file mode 100644 index 681bb5966ae..00000000000 --- a/CHANGES/8014.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix double compress when compression enabled and compressed file exists diff --git a/CHANGES/8021.bugfix b/CHANGES/8021.bugfix deleted file mode 100644 index f43843a587f..00000000000 --- a/CHANGES/8021.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add runtime type check for ``ClientSession`` ``timeout`` parameter. diff --git a/CHANGES/8066.contrib.rst b/CHANGES/8066.contrib.rst deleted file mode 100644 index 2468018e99b..00000000000 --- a/CHANGES/8066.contrib.rst +++ /dev/null @@ -1,21 +0,0 @@ -The changelog categorization was made clearer. The -contributors can now mark their fragment files more -accurately -- by :user:`webknjaz`. - -The new category tags are: - - * ``bugfix`` - - * ``feature`` - - * ``deprecation`` - - * ``breaking`` (previously, ``removal``) - - * ``doc`` - - * ``packaging`` - - * ``contrib`` - - * ``misc`` diff --git a/CHANGES/8066.packaging.rst b/CHANGES/8066.packaging.rst deleted file mode 120000 index 57cdff225f5..00000000000 --- a/CHANGES/8066.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -8066.contrib.rst \ No newline at end of file diff --git a/CHANGES/8067.doc.rst b/CHANGES/8067.doc.rst deleted file mode 100644 index 3206db9ae87..00000000000 --- a/CHANGES/8067.doc.rst +++ /dev/null @@ -1,3 +0,0 @@ -The Sphinx setup was updated to avoid showing the empty -changelog draft section in the tagged release documentation -builds on Read The Docs -- by :user:`webknjaz`. diff --git a/CHANGES/8074.bugfix.rst b/CHANGES/8074.bugfix.rst deleted file mode 100644 index 16c71445476..00000000000 --- a/CHANGES/8074.bugfix.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. - -Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. Invalid header field names containing question mark or slash are now rejected. Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. - -(BACKWARD INCOMPATIBLE) diff --git a/CHANGES/8079.bugfix.rst b/CHANGES/8079.bugfix.rst deleted file mode 100644 index 57bc8bfebcc..00000000000 --- a/CHANGES/8079.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Improved validation of paths for static resources -- by :user:`bdraco`. From b58e50367c5187711a15ee8c34f4467d1ee7e2a9 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 28 Jan 2024 23:41:48 +0000 Subject: [PATCH 0075/1511] Fix merge to 3.10 --- CHANGES.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 84b0b5b7a99..c26cc90e76e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,6 @@ .. towncrier release notes start -======= 3.9.2 (2024-01-28) ================== From f3aeb9a0ef35ed10d85643261b7f20fb8f6dc10a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 10:41:22 +0000 Subject: [PATCH 0076/1511] Bump coverage from 7.4.0 to 7.4.1 (#8090) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.0 to 7.4.1.
Changelog

Sourced from coverage's changelog.

Version 7.4.1 — 2024-01-26

  • Python 3.13.0a3 is supported.

  • Fix: the JSON report now includes an explicit format version number, closing issue 1732_.

.. _issue 1732: nedbat/coveragepy#1732

.. _changes_7-4-0:

Commits
  • 07588ea test: give hypothesis a little more time
  • 2c96518 build: tags should be signed
  • 8d1857f docs: sample HTML for 7.4.1
  • ddc88f7 docs: prep for 7.4.1
  • 98cd671 docs: correct two library urls
  • 498b8c9 build: coverage runs have to skip windows pypy too
  • 75b22f0 test: ignore color in tracebacks
  • b7c41a2 build: show action environment variables for debugging
  • f8be865 build: run actions on 3.13 since a3 came out.
  • de60a6d build(deps): bump actions/dependency-review-action from 3 to 4
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.4.0&new-version=7.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4d955baf48e..c9d48ea05d3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.4.0 +coverage==7.4.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 048eac2728a..945a21380d1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.4.0 +coverage==7.4.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 15542e77ca1..cc531b48df1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.4.0 +coverage==7.4.1 # via # -r requirements/test.in # pytest-cov From aca206fc27ecec4e0dc14de6ab11816e7f35409c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 29 Jan 2024 17:29:15 +0000 Subject: [PATCH 0077/1511] Fix backwards compatibility with ssl (#8098) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #8097. --------- Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/8097.bugfix.rst | 1 + CHANGES/8098.bugfix.rst | 2 ++ aiohttp/client_reqrep.py | 2 ++ tests/test_connector.py | 5 +++++ 4 files changed, 10 insertions(+) create mode 120000 CHANGES/8097.bugfix.rst create mode 100644 CHANGES/8098.bugfix.rst diff --git a/CHANGES/8097.bugfix.rst b/CHANGES/8097.bugfix.rst new file mode 120000 index 00000000000..c799b052a6c --- /dev/null +++ b/CHANGES/8097.bugfix.rst @@ -0,0 +1 @@ +8098.bugfix.rst \ No newline at end of file diff --git a/CHANGES/8098.bugfix.rst b/CHANGES/8098.bugfix.rst new file mode 100644 index 00000000000..0242fb978e5 --- /dev/null +++ b/CHANGES/8098.bugfix.rst @@ -0,0 +1,2 @@ +Fixed backwards compatibility breakage of ``ssl`` parameter when set outside of +``ClientSession`` (e.g. directly in ``TCPConnector``) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index bb43ae9318d..e0de951a33a 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -159,6 +159,8 @@ def _merge_ssl_params( ssl_context: Optional["SSLContext"], fingerprint: Optional[bytes], ) -> Union["SSLContext", bool, Fingerprint]: + if ssl is None: + ssl = True # Double check for backwards compatibility if verify_ssl is not None and not verify_ssl: warnings.warn( "verify_ssl is deprecated, use ssl=False instead", diff --git a/tests/test_connector.py b/tests/test_connector.py index 84c03fc6fb5..fe027df896c 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -2249,6 +2249,11 @@ async def test_default_use_dns_cache() -> None: assert conn.use_dns_cache +async def test_ssl_none() -> None: + conn = aiohttp.TCPConnector(ssl=None) + assert conn._ssl is True + + async def test_resolver_not_called_with_address_is_ip(loop) -> None: resolver = mock.MagicMock() connector = aiohttp.TCPConnector(resolver=resolver) From 4a68fa22897ed1530d261a4ad0a1ff3e200cd03a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 29 Jan 2024 20:24:22 +0000 Subject: [PATCH 0078/1511] Merge 3.9 --- CHANGES.rst | 31 +++++++++++++++++++++++++++++++ CHANGES/3957.misc | 1 - CHANGES/8097.bugfix.rst | 1 - CHANGES/8098.bugfix.rst | 2 -- 4 files changed, 31 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/3957.misc delete mode 120000 CHANGES/8097.bugfix.rst delete mode 100644 CHANGES/8098.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index c26cc90e76e..64dff9b516d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,37 @@ .. towncrier release notes start +3.9.3 (2024-01-29) +================== + +Bug fixes +--------- + +- Fixed backwards compatibility breakage (in 3.9.2) of ``ssl`` parameter when set outside + of ``ClientSession`` (e.g. directly in ``TCPConnector``) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8097`, :issue:`8098`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved test suite handling of paths and temp files to consistently use pathlib and pytest fixtures. + + + *Related issues and pull requests on GitHub:* + :issue:`3957`. + + + + +---- + + 3.9.2 (2024-01-28) ================== diff --git a/CHANGES/3957.misc b/CHANGES/3957.misc deleted file mode 100644 index b4f9f58edb9..00000000000 --- a/CHANGES/3957.misc +++ /dev/null @@ -1 +0,0 @@ -Improve test suite handling of paths and temp files to consistently use pathlib and pytest fixtures. diff --git a/CHANGES/8097.bugfix.rst b/CHANGES/8097.bugfix.rst deleted file mode 120000 index c799b052a6c..00000000000 --- a/CHANGES/8097.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -8098.bugfix.rst \ No newline at end of file diff --git a/CHANGES/8098.bugfix.rst b/CHANGES/8098.bugfix.rst deleted file mode 100644 index 0242fb978e5..00000000000 --- a/CHANGES/8098.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed backwards compatibility breakage of ``ssl`` parameter when set outside of -``ClientSession`` (e.g. directly in ``TCPConnector``) -- by :user:`Dreamsorcerer`. From 9e938f1ccf78346241342f1aa2fd590eee331a72 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 29 Jan 2024 21:08:36 +0000 Subject: [PATCH 0079/1511] Bump version --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 12209def6c4..6f6ab8e6b99 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.3" +__version__ = "3.9.3.dev0" from typing import TYPE_CHECKING, Tuple From 77052043da4c63cb2178523b9b6f62467eec1d9f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 01:37:02 +0100 Subject: [PATCH 0080/1511] =?UTF-8?q?[PR=20#8099/3aa243ae=20backport][3.10?= =?UTF-8?q?]=20=F0=9F=93=9D=F0=9F=92=85=20Ask=20PR=20submitters=20about=20?= =?UTF-8?q?the=20complexities=20(#8106)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8099 as merged into master (3aa243ae89c597b5576863a7ac309732f6756151).** I envision this may help the contributors and the maintainers be more mindful regarding what goes into the repository, by illuminating the connected burdens of maintaining the contributions long-term. ## What do these changes do? $sbj. ## Are there changes in behavior for the user? Nope. ## Related issue number N/A ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Sviatoslav Sydorenko --- .github/PULL_REQUEST_TEMPLATE.md | 14 ++++++++++++++ CHANGES/8099.contrib.rst | 4 ++++ 2 files changed, 18 insertions(+) create mode 100644 CHANGES/8099.contrib.rst diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 686f70cd975..d4b1dba4340 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,6 +8,20 @@ +## Is it a substantial burden for the maintainers to support this? + + + ## Related issue number diff --git a/CHANGES/8099.contrib.rst b/CHANGES/8099.contrib.rst new file mode 100644 index 00000000000..827ecfa5827 --- /dev/null +++ b/CHANGES/8099.contrib.rst @@ -0,0 +1,4 @@ +The pull request template is now asking the contributors to +answer a question about the long-term maintenance challenges +they envision as a result of merging their patches +-- by :user:`webknjaz`. From ab2e368f7e5369682af2046f13e3558786782df3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 01:37:07 +0100 Subject: [PATCH 0081/1511] =?UTF-8?q?[PR=20#8099/3aa243ae=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=93=9D=F0=9F=92=85=20Ask=20PR=20submitters=20about=20t?= =?UTF-8?q?he=20complexities=20(#8105)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8099 as merged into master (3aa243ae89c597b5576863a7ac309732f6756151).** I envision this may help the contributors and the maintainers be more mindful regarding what goes into the repository, by illuminating the connected burdens of maintaining the contributions long-term. ## What do these changes do? $sbj. ## Are there changes in behavior for the user? Nope. ## Related issue number N/A ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Sviatoslav Sydorenko --- .github/PULL_REQUEST_TEMPLATE.md | 14 ++++++++++++++ CHANGES/8099.contrib.rst | 4 ++++ 2 files changed, 18 insertions(+) create mode 100644 CHANGES/8099.contrib.rst diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 686f70cd975..d4b1dba4340 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,6 +8,20 @@ +## Is it a substantial burden for the maintainers to support this? + + + ## Related issue number diff --git a/CHANGES/8099.contrib.rst b/CHANGES/8099.contrib.rst new file mode 100644 index 00000000000..827ecfa5827 --- /dev/null +++ b/CHANGES/8099.contrib.rst @@ -0,0 +1,4 @@ +The pull request template is now asking the contributors to +answer a question about the long-term maintenance challenges +they envision as a result of merging their patches +-- by :user:`webknjaz`. From 2a6e1c316029d713d0f69abb192029620c5ea3e5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:06:37 +0100 Subject: [PATCH 0082/1511] [PR #8107/854e6d87 backport][3.10] Partially revert "Add more information to contributing page (#7916)" (#8109) **This is a backport of PR #8107 as merged into master (854e6d8702c20e725b3e747ab48265ecf0184e20).** This partially reverts commit 822fbc7431f3c5522d3e587ad0b658bef8b6a0ab. In particular, this drops the top level title from README in the `CHANGES/` folder and restores the original label. For the proper Sphinx ToC structuring, a title in the `docs/contributing.rst` document. Co-authored-by: Sviatoslav Sydorenko --- CHANGES/README.rst | 22 +++++++--------------- docs/contributing.rst | 26 ++++++++++++++------------ 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 5beb8999226..37cd14d2cf8 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -1,15 +1,7 @@ -.. _Making a pull request: - -Making a pull request -===================== - -When making a pull request, please include a short summary of the changes -and a reference to any issue tickets that the PR is intended to solve. -All PRs with code changes should include tests. All changes should -include a changelog entry. +.. _Adding change notes with your PRs: Adding change notes with your PRs ---------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is very important to maintain a log for news of how updating to the new version of the software will affect @@ -27,7 +19,7 @@ to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ---------------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``aiohttp`` uses `towncrier `_ for changelog management. @@ -47,9 +39,9 @@ linking parts of the docs or external sites. However, you do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. -If you wish to sign your change, feel free to add -``-- by :user:`github-username``` at the end (replace -``github-username`` with your own!). +If you wish to sign your change, feel free to add ``-- by +:user:`github-username``` at the end (replace ``github-username`` +with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a @@ -88,7 +80,7 @@ necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests ------------------------------------------------------------ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File :file:`CHANGES/6045.doc.1.rst`: diff --git a/docs/contributing.rst b/docs/contributing.rst index 84d223d0e0b..6497212813d 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,12 +1,12 @@ .. _aiohttp-contributing: Contributing -************ +============ (:doc:`contributing-admins`) Instructions for contributors -============================= +----------------------------- In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. @@ -25,7 +25,7 @@ Workflow is pretty straightforward: 4. Make sure all tests passed - 5. Add a file into the ``CHANGES`` folder (see `Making a pull request`_ for how). + 5. Add a file into the ``CHANGES`` folder (see `Changelog update`_ for how). 6. Commit changes to your own aiohttp clone @@ -53,7 +53,7 @@ Workflow is pretty straightforward: Preconditions for running aiohttp test suite -============================================ +-------------------------------------------- We expect you to use a python virtual environment to run our tests. @@ -116,7 +116,7 @@ Congratulations, you are ready to run the test suite! Run autoformatter -================= +----------------- The project uses black_ + isort_ formatters to keep the source code style. Please run `make fmt` after every change before starting tests. @@ -127,7 +127,7 @@ Please run `make fmt` after every change before starting tests. Run aiohttp test suite -====================== +---------------------- After all the preconditions are met you can run tests typing the next command: @@ -159,7 +159,7 @@ Any extra texts (print statements and so on) should be removed. make test-3.10-no-extensions Code coverage -============= +------------- We use *codecov.io* as an indispensable tool for analyzing our coverage results. Visit https://codecov.io/gh/aio-libs/aiohttp to see coverage @@ -226,7 +226,7 @@ $ python -m webbrowser -n file://"$(pwd)"/htmlcov/index.html ``` Documentation -============= +------------- We encourage documentation improvements. @@ -242,7 +242,7 @@ Once it finishes it will output the index html page Go to the link and make sure your doc changes looks good. Spell checking -============== +-------------- We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: @@ -261,18 +261,20 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Changelog update +---------------- .. include:: ../CHANGES/README.rst Making a Pull Request -===================== +--------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. Backporting -=========== +----------- All Pull Requests are created against *master* git branch. @@ -313,7 +315,7 @@ like *needs backport to 3.1*. merging the backport. How to become an aiohttp committer -================================== +---------------------------------- Contribute! From d4bef7a3e76cc0c7ecd5a2a6e48f1b0b58656e5b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:06:42 +0100 Subject: [PATCH 0083/1511] [PR #8107/854e6d87 backport][3.9] Partially revert "Add more information to contributing page (#7916)" (#8108) **This is a backport of PR #8107 as merged into master (854e6d8702c20e725b3e747ab48265ecf0184e20).** This partially reverts commit 822fbc7431f3c5522d3e587ad0b658bef8b6a0ab. In particular, this drops the top level title from README in the `CHANGES/` folder and restores the original label. For the proper Sphinx ToC structuring, a title in the `docs/contributing.rst` document. Co-authored-by: Sviatoslav Sydorenko --- CHANGES/README.rst | 22 +++++++--------------- docs/contributing.rst | 26 ++++++++++++++------------ 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 5beb8999226..37cd14d2cf8 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -1,15 +1,7 @@ -.. _Making a pull request: - -Making a pull request -===================== - -When making a pull request, please include a short summary of the changes -and a reference to any issue tickets that the PR is intended to solve. -All PRs with code changes should include tests. All changes should -include a changelog entry. +.. _Adding change notes with your PRs: Adding change notes with your PRs ---------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is very important to maintain a log for news of how updating to the new version of the software will affect @@ -27,7 +19,7 @@ to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ---------------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``aiohttp`` uses `towncrier `_ for changelog management. @@ -47,9 +39,9 @@ linking parts of the docs or external sites. However, you do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. -If you wish to sign your change, feel free to add -``-- by :user:`github-username``` at the end (replace -``github-username`` with your own!). +If you wish to sign your change, feel free to add ``-- by +:user:`github-username``` at the end (replace ``github-username`` +with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a @@ -88,7 +80,7 @@ necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests ------------------------------------------------------------ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File :file:`CHANGES/6045.doc.1.rst`: diff --git a/docs/contributing.rst b/docs/contributing.rst index 84d223d0e0b..6497212813d 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,12 +1,12 @@ .. _aiohttp-contributing: Contributing -************ +============ (:doc:`contributing-admins`) Instructions for contributors -============================= +----------------------------- In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. @@ -25,7 +25,7 @@ Workflow is pretty straightforward: 4. Make sure all tests passed - 5. Add a file into the ``CHANGES`` folder (see `Making a pull request`_ for how). + 5. Add a file into the ``CHANGES`` folder (see `Changelog update`_ for how). 6. Commit changes to your own aiohttp clone @@ -53,7 +53,7 @@ Workflow is pretty straightforward: Preconditions for running aiohttp test suite -============================================ +-------------------------------------------- We expect you to use a python virtual environment to run our tests. @@ -116,7 +116,7 @@ Congratulations, you are ready to run the test suite! Run autoformatter -================= +----------------- The project uses black_ + isort_ formatters to keep the source code style. Please run `make fmt` after every change before starting tests. @@ -127,7 +127,7 @@ Please run `make fmt` after every change before starting tests. Run aiohttp test suite -====================== +---------------------- After all the preconditions are met you can run tests typing the next command: @@ -159,7 +159,7 @@ Any extra texts (print statements and so on) should be removed. make test-3.10-no-extensions Code coverage -============= +------------- We use *codecov.io* as an indispensable tool for analyzing our coverage results. Visit https://codecov.io/gh/aio-libs/aiohttp to see coverage @@ -226,7 +226,7 @@ $ python -m webbrowser -n file://"$(pwd)"/htmlcov/index.html ``` Documentation -============= +------------- We encourage documentation improvements. @@ -242,7 +242,7 @@ Once it finishes it will output the index html page Go to the link and make sure your doc changes looks good. Spell checking -============== +-------------- We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: @@ -261,18 +261,20 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Changelog update +---------------- .. include:: ../CHANGES/README.rst Making a Pull Request -===================== +--------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. Backporting -=========== +----------- All Pull Requests are created against *master* git branch. @@ -313,7 +315,7 @@ like *needs backport to 3.1*. merging the backport. How to become an aiohttp committer -================================== +---------------------------------- Contribute! From 9f5537d24ab14ff7839fc5b76ab9b36c20cd3af3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:58:36 +0100 Subject: [PATCH 0084/1511] [PR #8110/2d8ffdf5 backport][3.10] Re-add a PR preparation intro from PR #7916 (#8112) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8110 as merged into master (2d8ffdf5bdab05b7bf8aa7fc9fc8c4c9eee1d57e).** None Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- docs/contributing.rst | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 6497212813d..9abd367a150 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -261,13 +261,22 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Preparing a pull request +------------------------ + +When making a pull request, please include a short summary of the changes +and a reference to any issue tickets that the PR is intended to solve. +All PRs with code changes should include tests. All changes should +include a changelog entry. + + Changelog update ---------------- .. include:: ../CHANGES/README.rst -Making a Pull Request +Making a pull request --------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. From efe3c8895ba44796e031fbb96784d6bc5807072a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:58:47 +0100 Subject: [PATCH 0085/1511] [PR #8110/2d8ffdf5 backport][3.9] Re-add a PR preparation intro from PR #7916 (#8111) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8110 as merged into master (2d8ffdf5bdab05b7bf8aa7fc9fc8c4c9eee1d57e).** None Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- docs/contributing.rst | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 6497212813d..9abd367a150 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -261,13 +261,22 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Preparing a pull request +------------------------ + +When making a pull request, please include a short summary of the changes +and a reference to any issue tickets that the PR is intended to solve. +All PRs with code changes should include tests. All changes should +include a changelog entry. + + Changelog update ---------------- .. include:: ../CHANGES/README.rst -Making a Pull Request +Making a pull request --------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. From 36b6c53ede51c8ce73572b4fe458a96a59aea127 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 02:45:29 +0100 Subject: [PATCH 0086/1511] =?UTF-8?q?[PR=20#8113/8b33fe6f=20backport][3.10?= =?UTF-8?q?]=20=F0=9F=93=9D=20Make=20the=20changelog=20examples=20elaborat?= =?UTF-8?q?e=20(#8115)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8113 as merged into master (8b33fe6f3cfc0ecf8747586f1cf47542d0a6039b).** This patch shows the use of sentences with full stops and it also includes an excerpt with a paragraph, which demonstrates that it is possible to include details extending shorter sentences. Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/README.rst | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 37cd14d2cf8..78d8b2f308f 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -86,21 +86,28 @@ File :file:`CHANGES/6045.doc.1.rst`: .. code-block:: rst - Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz` + Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`. -File :file:`CHANGES/4431.bugfix.rst`: +File :file:`CHANGES/8074.bugfix.rst`: .. code-block:: rst - Fixed HTTP client requests to honor ``no_proxy`` environment - variables -- by :user:`scirelli` + Fixed an unhandled exception in the Python HTTP parser on header + lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP + major and minor version are now rejected. Invalid header field + names containing question mark or slash are now rejected. Such + requests are incompatible with :rfc:`9110#section-5.6.2` and are + not known to be of any legitimate use. File :file:`CHANGES/4594.feature.rst`: .. code-block:: rst Added support for ``ETag`` to :py:class:`~aiohttp.web.FileResponse` - -- by :user:`greshilov`, :user:`serhiy-storchaka` and :user:`asvetlov` + -- by :user:`greshilov`, :user:`serhiy-storchaka` and + :user:`asvetlov`. .. tip:: From 36952e45fd942492d8ab74fc3b7d480e16ee21e6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 02:45:38 +0100 Subject: [PATCH 0087/1511] =?UTF-8?q?[PR=20#8113/8b33fe6f=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=93=9D=20Make=20the=20changelog=20examples=20elaborate?= =?UTF-8?q?=20(#8114)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8113 as merged into master (8b33fe6f3cfc0ecf8747586f1cf47542d0a6039b).** This patch shows the use of sentences with full stops and it also includes an excerpt with a paragraph, which demonstrates that it is possible to include details extending shorter sentences. Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/README.rst | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 37cd14d2cf8..78d8b2f308f 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -86,21 +86,28 @@ File :file:`CHANGES/6045.doc.1.rst`: .. code-block:: rst - Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz` + Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`. -File :file:`CHANGES/4431.bugfix.rst`: +File :file:`CHANGES/8074.bugfix.rst`: .. code-block:: rst - Fixed HTTP client requests to honor ``no_proxy`` environment - variables -- by :user:`scirelli` + Fixed an unhandled exception in the Python HTTP parser on header + lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP + major and minor version are now rejected. Invalid header field + names containing question mark or slash are now rejected. Such + requests are incompatible with :rfc:`9110#section-5.6.2` and are + not known to be of any legitimate use. File :file:`CHANGES/4594.feature.rst`: .. code-block:: rst Added support for ``ETag`` to :py:class:`~aiohttp.web.FileResponse` - -- by :user:`greshilov`, :user:`serhiy-storchaka` and :user:`asvetlov` + -- by :user:`greshilov`, :user:`serhiy-storchaka` and + :user:`asvetlov`. .. tip:: From 814672551f7a8cfd06a42ef3353e9713cb28b0c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 10:36:23 +0000 Subject: [PATCH 0088/1511] Bump multidict from 6.0.4 to 6.0.5 (#8128) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [multidict](https://github.com/aio-libs/multidict) from 6.0.4 to 6.0.5.
Release notes

Sourced from multidict's releases.

6.0.5

Bug fixes

  • Upgraded the C-API macros that have been deprecated in Python 3.9 and later removed in 3.13 -- by @​iemelyanov💰.

    Related issues and pull requests on GitHub: #862, #864, #868, #898.

  • Reverted to using the public argument parsing API PyArg_ParseTupleAndKeywords() under Python 3.12 -- by @​charles-dyfis-net💰 and @​webknjaz💰.

    The effect is that this change prevents build failures with clang 16.9.6 and gcc-14 reported in #926. It also fixes a segmentation fault crash caused by passing keyword arguments to MultiDict.getall() discovered by @​jonaslb💰 and @​hroncok💰 while examining the problem.

    Related issues and pull requests on GitHub: #862, #909, #926, #929.

  • Fixed a SystemError: null argument to internal routine error on a MultiDict.items().isdisjoint() call when using C Extensions.

    Related issues and pull requests on GitHub: #927.

Improved documentation

  • On the Contributing docs age, a link to the Towncrier philosophy has been fixed.

    Related issues and pull requests on GitHub: #911.

Packaging updates and notes for downstreams

  • Stopped marking all files as installable package data -- by @​webknjaz💰.

    This change helps setuptools understand that C-headers are not to be installed under lib/python3.{x}/site-packages/.

    Related commits on GitHub: 31e1170.

... (truncated)

Changelog

Sourced from multidict's changelog.

6.0.5 (2024-02-01)

Bug fixes

  • Upgraded the C-API macros that have been deprecated in Python 3.9 and later removed in 3.13 -- by :user:iemelyanov.

    Related issues and pull requests on GitHub: :issue:862, :issue:864, :issue:868, :issue:898.

  • Reverted to using the public argument parsing API :c:func:PyArg_ParseTupleAndKeywords under Python 3.12 -- by :user:charles-dyfis-net and :user:webknjaz.

    The effect is that this change prevents build failures with clang 16.9.6 and gcc-14 reported in :issue:926. It also fixes a segmentation fault crash caused by passing keyword arguments to :py:meth:MultiDict.getall() <multidict.MultiDict.getall> discovered by :user:jonaslb and :user:hroncok while examining the problem.

    Related issues and pull requests on GitHub: :issue:862, :issue:909, :issue:926, :issue:929.

  • Fixed a SystemError: null argument to internal routine error on a MultiDict.items().isdisjoint() call when using C Extensions.

    Related issues and pull requests on GitHub: :issue:927.

Improved documentation

  • On the Contributing docs <https://github.com/aio-libs/multidict/blob/master/CHANGES/README.rst>_ page, a link to the Towncrier philosophy has been fixed.

    Related issues and pull requests on GitHub:

... (truncated)

Commits
  • a9b281b ⇪ 📦 Release v6.0.5
  • ed825c8 🧪 Download artifacts to dist/ @ release job
  • 7b04a64 🧪 Normalize issue refs @ release action
  • 74840e8 🧪 Pass Codecov token to reusable linters job
  • 41c133e 🧪 Bump Codecov action to v4
  • adb1976 📝 Fix return type @ Sphinx config
  • 99e435f 📝 Mention bylines in the changelog guidelines
  • 736169e 📝 Clarify need to only ref PR @ change note name
  • 887846f 📝 Highlight the RST term @ changelog guide
  • 8f57f8a 📝 Add a missing comma @ changelog guide
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.0.4&new-version=6.0.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 33495ca2ba3..e10f80a9cca 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==21.2.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c9d48ea05d3..02ccf9390da 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -107,7 +107,7 @@ jinja2==3.0.3 # towncrier markupsafe==2.0.1 # via jinja2 -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index f6b3175f34a..201da88c351 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.8 # via -r requirements/cython.in -multidict==6.0.4 +multidict==6.0.5 # via -r requirements/multidict.in typing-extensions==4.9.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 945a21380d1..866b00bd9fa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -103,7 +103,7 @@ jinja2==3.1.2 # towncrier markupsafe==2.1.3 # via jinja2 -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 9c4f984cd75..915f9c24dcc 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.0.4 +multidict==6.0.5 # via -r requirements/multidict.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 2263f16bcfa..4a968058d61 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.4.1 # aiosignal idna==3.4 # via yarl -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index cc531b48df1..29021aecde1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,7 +53,7 @@ idna==3.4 # yarl iniconfig==2.0.0 # via pytest -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl From 9726a679b4f1fe2247065f1a0f9b30dbd2495fda Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 13:23:23 -0600 Subject: [PATCH 0089/1511] [PR #8127/b97d9a97 backport][3.10] Treat Accept-Encoding header as case-insensitive for gzip file check (#8131) Co-authored-by: Steve Repsher This is a backport of PR #8127 as merged into master (b97d9a9). --- CHANGES/8104.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/web_fileresponse.py | 6 +++++- aiohttp/web_response.py | 2 ++ tests/test_web_sendfile.py | 5 ++++- 5 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8104.bugfix.rst diff --git a/CHANGES/8104.bugfix.rst b/CHANGES/8104.bugfix.rst new file mode 100644 index 00000000000..1ebe6f06d9d --- /dev/null +++ b/CHANGES/8104.bugfix.rst @@ -0,0 +1 @@ +Treated values of ``Accept-Encoding`` header as case-insensitive when checking for gzip files -- by :user:`steverep`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 475ec8604e2..8df68497dbe 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -299,6 +299,7 @@ Stepan Pletnev Stephan Jaensch Stephen Cirelli Stephen Granade +Steve Repsher Steven Seguin Sunghyun Hwang Sunit Deshpande diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 6496ffaf317..7dbe50f0a5a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -145,7 +145,11 @@ def _get_file_path_stat_and_gzip( async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() - check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + check_for_gzipped_file = ( + "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + ) filepath, st, gzip = await loop.run_in_executor( None, self._get_file_path_stat_and_gzip, check_for_gzipped_file ) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index b6a4ba9b31e..40d6f01ecaa 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -400,6 +400,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: if self._compression_force: await self._do_start_compression(self._compression_force) else: + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() for coding in ContentCoding: if coding.value in accept_encoding: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 2817e085a6f..d472c407b7a 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -8,7 +8,10 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: request = make_mocked_request( - "GET", "http://python.org/logo.png", headers={hdrs.ACCEPT_ENCODING: "gzip"} + "GET", + "http://python.org/logo.png", + # Header uses some uppercase to ensure case-insensitive treatment + headers={hdrs.ACCEPT_ENCODING: "GZip"}, ) gz_filepath = mock.create_autospec(Path, spec_set=True) From a4e94ffc341d884ed0345d634b56984e1c460274 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 13:23:38 -0600 Subject: [PATCH 0090/1511] [PR #8127/b97d9a97 backport][3.9] Treat Accept-Encoding header as case-insensitive for gzip file check (#8130) Co-authored-by: Steve Repsher This is a backport of PR #8127 as merged into master (b97d9a9). --- CHANGES/8104.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/web_fileresponse.py | 6 +++++- aiohttp/web_response.py | 2 ++ tests/test_web_sendfile.py | 5 ++++- 5 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8104.bugfix.rst diff --git a/CHANGES/8104.bugfix.rst b/CHANGES/8104.bugfix.rst new file mode 100644 index 00000000000..1ebe6f06d9d --- /dev/null +++ b/CHANGES/8104.bugfix.rst @@ -0,0 +1 @@ +Treated values of ``Accept-Encoding`` header as case-insensitive when checking for gzip files -- by :user:`steverep`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 64b6f575f26..be4a3ad48d4 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -299,6 +299,7 @@ Stepan Pletnev Stephan Jaensch Stephen Cirelli Stephen Granade +Steve Repsher Steven Seguin Sunghyun Hwang Sunit Deshpande diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 6496ffaf317..7dbe50f0a5a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -145,7 +145,11 @@ def _get_file_path_stat_and_gzip( async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() - check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + check_for_gzipped_file = ( + "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + ) filepath, st, gzip = await loop.run_in_executor( None, self._get_file_path_stat_and_gzip, check_for_gzipped_file ) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index b6a4ba9b31e..40d6f01ecaa 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -400,6 +400,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: if self._compression_force: await self._do_start_compression(self._compression_force) else: + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() for coding in ContentCoding: if coding.value in accept_encoding: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 2817e085a6f..d472c407b7a 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -8,7 +8,10 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: request = make_mocked_request( - "GET", "http://python.org/logo.png", headers={hdrs.ACCEPT_ENCODING: "gzip"} + "GET", + "http://python.org/logo.png", + # Header uses some uppercase to ensure case-insensitive treatment + headers={hdrs.ACCEPT_ENCODING: "GZip"}, ) gz_filepath = mock.create_autospec(Path, spec_set=True) From e56e9b9d9de9344bba572cf8e1627cc40c87938d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 3 Feb 2024 18:12:53 +0000 Subject: [PATCH 0091/1511] Bump pypa/cibuildwheel from 2.16.4 to 2.16.5 (#8120) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.4 to 2.16.5.
Release notes

Sourced from pypa/cibuildwheel's releases.

v2.16.5

  • 🐛 Fix an incompatibility with the GitHub Action and new GitHub Runner images for Windows that bundle Powershell 7.3+ (#1741)
  • 🛠 Preliminary support for new macos-14 arm64 runners (#1743)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v2.16.5

30 January 2024

  • 🐛 Fix an incompatibility with the GitHub Action and new GitHub Runner images for Windows that bundle Powershell 7.3+ (#1741)
  • 🛠 Preliminary support for new macos-14 arm64 runners (#1743)
Commits
  • ce3fb78 Bump version: v2.16.5
  • 5b0b458 fix: download pipx for action, allow support for M1 (#1743)
  • a7ea5fb Merge pull request #1739 from henryiii/henryiii/chore/checkschemas
  • bc55e8b Merge pull request #1741 from jborean93/pwsh-7.4
  • c753cd2 Add support for PowerShell 7.4 in GHA
  • 07bd78c chore: check schemas
  • d7db575 docs: add keyvi as an example that combines cibuildwheel with the ccache acti...
  • 7154e18 [Bot] Update dependencies (#1738)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.16.4&new-version=2.16.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e0680c88f25..f6647f7cd8b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -369,7 +369,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.16.4 + uses: pypa/cibuildwheel@v2.16.5 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 257a7c4d031061ed28f429222fbb9f7bb4d74753 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 11:29:02 +0000 Subject: [PATCH 0092/1511] bump slotscheck from 0.17.1 to 0.17.3 (#8135) [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.17.1&new-version=0.17.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 02ccf9390da..1e3fb385de6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -193,7 +193,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.17.1 +slotscheck==0.17.3 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 866b00bd9fa..d9197e86828 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -182,7 +182,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.17.1 +slotscheck==0.17.3 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 68000cbcc85..1976da1d1ba 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.17.1 +slotscheck==0.17.3 # via -r requirements/lint.in tomli==2.0.1 # via From 0467c9b43f094197ac2c2a871cbe2884e2ace04f Mon Sep 17 00:00:00 2001 From: "Paul J. Dorn" Date: Thu, 8 Feb 2024 00:09:07 +0000 Subject: [PATCH 0093/1511] Backport 3.10: Add tests, accidentally dropped before (#8088) (#8141) Cherry picked from commit 0016004f0e5b861d35afc56a9a59040769af3122 --- CHANGES/8088.contrib.rst | 1 + tests/test_http_parser.py | 55 ++++++++++++++++++++++++++++++++------- 2 files changed, 47 insertions(+), 9 deletions(-) create mode 100644 CHANGES/8088.contrib.rst diff --git a/CHANGES/8088.contrib.rst b/CHANGES/8088.contrib.rst new file mode 100644 index 00000000000..b3aec71bdf7 --- /dev/null +++ b/CHANGES/8088.contrib.rst @@ -0,0 +1 @@ +Enabled HTTP parser tests originally intended for 3.9.2 release -- by :user:`pajod`. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index b931730529d..3c47231e389 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -283,9 +283,20 @@ def test_parse_headers_longline(parser: Any) -> None: parser.feed_data(text) +@pytest.fixture +def xfail_c_parser_status(request) -> None: + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + raises=http_exceptions.BadStatusLine, + ) + ) + + +@pytest.mark.usefixtures("xfail_c_parser_status") def test_parse_unusual_request_line(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"#smol //a HTTP/1.3\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 @@ -612,24 +623,37 @@ def test_headers_content_length_err_2(parser) -> None: } +@pytest.fixture +def xfail_c_parser_empty_header(request) -> None: + if not all( + (request.getfixturevalue(name) == b"") for name in ("pad1", "pad2", "hdr") + ): + return + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + ) + ) + + @pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"]) @pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) @pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) +@pytest.mark.usefixtures("xfail_c_parser_empty_header") def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) expectation = pytest.raises(http_exceptions.BadHttpMessage) if pad1 == pad2 == b"" and hdr != b"": # one entry in param matrix is correct: non-empty name, not padded expectation = nullcontext() - if pad1 == pad2 == hdr == b"": - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") with expectation: parser.feed_data(text) def test_empty_header_name(parser) -> None: - if not isinstance(response, HttpResponseParserPy): + if not isinstance(parser, HttpRequestParserPy): pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): @@ -807,9 +831,20 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +@pytest.fixture +def xfail_c_parser_url(request) -> None: + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + raises=http_exceptions.InvalidURLError, + ) + ) + + +@pytest.mark.usefixtures("xfail_c_parser_url") def test_http_request_parser_utf8_request_line(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") messages, upgrade, tail = parser.feed_data( # note the truncated unicode sequence b"GET /P\xc3\xbcnktchen\xa0\xef\xb7 HTTP/1.1\r\n" + @@ -829,7 +864,9 @@ def test_http_request_parser_utf8_request_line(parser) -> None: assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url.path == URL("/P%C3%BCnktchen\udca0\udcef\udcb7").path + # python HTTP parser depends on Cython and CPython URL to match + # .. but yarl.URL("/abs") is not equal to URL.build(path="/abs"), see #6409 + assert msg.url == URL.build(path="/Pünktchen\udca0\udcef\udcb7", encoded=True) def test_http_request_parser_utf8(parser) -> None: From a7a72aefc39a83274ed250ea9f6601cbfdb6eebf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 18:58:08 +0000 Subject: [PATCH 0094/1511] [PR #8143/5df14cf7 backport][3.9] Add CODECOV_TOKEN (#8144) **This is a backport of PR #8143 as merged into master (5df14cf7ede67442e4934b28309674efbfaff2af).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index f6647f7cd8b..9a2b6f4c353 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -252,6 +252,7 @@ jobs: }},Py-${{ steps.python-install.outputs.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() From b3e5376a1921170b635e5b93dee53f46130f955d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 19:04:23 +0000 Subject: [PATCH 0095/1511] [PR #8143/5df14cf7 backport][3.10] Add CODECOV_TOKEN (#8145) **This is a backport of PR #8143 as merged into master (5df14cf7ede67442e4934b28309674efbfaff2af).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 95c8a985e78..8ee338a805c 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -252,6 +252,7 @@ jobs: }},Py-${{ steps.python-install.outputs.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() From 1dc8a072c14d817b4b64ccf4a1a72426cec15735 Mon Sep 17 00:00:00 2001 From: Alexander Macdonald Date: Thu, 8 Feb 2024 14:51:33 -0800 Subject: [PATCH 0096/1511] rename a shadowed test and re-enable F811 to catch future cases (#8139) (#8148) (cherry picked from commit 3c0f1eb29d3512419ea65e7cdeb61ba3f3496f00) --- CHANGES/8139.contrib.rst | 1 + setup.cfg | 2 +- tests/test_resolver.py | 26 -------------------------- tests/test_urldispatch.py | 16 +++++++++------- 4 files changed, 11 insertions(+), 34 deletions(-) create mode 100644 CHANGES/8139.contrib.rst diff --git a/CHANGES/8139.contrib.rst b/CHANGES/8139.contrib.rst new file mode 100644 index 00000000000..fd743e70f4a --- /dev/null +++ b/CHANGES/8139.contrib.rst @@ -0,0 +1 @@ +Two definitions for "test_invalid_route_name" existed, only one was being run. Refactored them into a single parameterized test. Enabled lint rule to prevent regression. -- by :user:`alexmac`. diff --git a/setup.cfg b/setup.cfg index 71dc26c9789..c291057ec7a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -87,7 +87,7 @@ zip_ok = false [flake8] extend-select = B950 # TODO: don't disable D*, fix up issues instead -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,F811,D1,D4 +ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 6140e385cc1..1b389f3601b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -179,33 +179,7 @@ async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: async def test_close_for_threaded_resolver(loop) -> None: - resolver = ThreadedResolver(loop=loop) - await resolver.close() - - -async def test_threaded_negative_lookup_with_unknown_result() -> None: - loop = Mock() - - # If compile CPython with `--disable-ipv6` option, - # we will get an (int, bytes) tuple, instead of a Exception. - async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: - return [ - ( - socket.AF_INET6, - socket.SOCK_STREAM, - 6, - "", - (10, b"\x01\xbb\x00\x00\x00\x00*\x04NB\x00\x1a\x00\x00"), - ) - ] - - loop.getaddrinfo = unknown_addrinfo resolver = ThreadedResolver() - resolver._loop = loop - with patch("socket.has_ipv6", False): - res = await resolver.resolve("www.python.org") - assert len(res) == 0 - await resolver.close() diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 6a656104fd2..cbd6395e238 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1130,14 +1130,16 @@ def test_subapp_iter(app) -> None: assert list(resource) == [r1, r2] -def test_invalid_route_name(router) -> None: - with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="invalid name") - - -def test_invalid_route_name(router) -> None: +@pytest.mark.parametrize( + "route_name", + ( + "invalid name", + "class", + ), +) +def test_invalid_route_name(router, route_name: str) -> None: with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="class") # identifier + router.add_get("/", make_handler(), name=route_name) def test_frozen_router(router) -> None: From 2a5dc570d8ef27f8864dfdad04a46bb54bacdb7a Mon Sep 17 00:00:00 2001 From: Alexander Macdonald Date: Thu, 8 Feb 2024 17:38:38 -0800 Subject: [PATCH 0097/1511] rename a shadowed test and re-enable F811 to catch future cases (#8139) (#8147) (cherry picked from commit 3c0f1eb29d3512419ea65e7cdeb61ba3f3496f00) --- CHANGES/8139.contrib.rst | 1 + setup.cfg | 2 +- tests/test_resolver.py | 26 -------------------------- tests/test_urldispatch.py | 16 +++++++++------- 4 files changed, 11 insertions(+), 34 deletions(-) create mode 100644 CHANGES/8139.contrib.rst diff --git a/CHANGES/8139.contrib.rst b/CHANGES/8139.contrib.rst new file mode 100644 index 00000000000..fd743e70f4a --- /dev/null +++ b/CHANGES/8139.contrib.rst @@ -0,0 +1 @@ +Two definitions for "test_invalid_route_name" existed, only one was being run. Refactored them into a single parameterized test. Enabled lint rule to prevent regression. -- by :user:`alexmac`. diff --git a/setup.cfg b/setup.cfg index c0515be8eeb..83da3961014 100644 --- a/setup.cfg +++ b/setup.cfg @@ -86,7 +86,7 @@ zip_ok = false [flake8] extend-select = B950 # TODO: don't disable D*, fix up issues instead -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,F811,D1,D4 +ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 6140e385cc1..1b389f3601b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -179,33 +179,7 @@ async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: async def test_close_for_threaded_resolver(loop) -> None: - resolver = ThreadedResolver(loop=loop) - await resolver.close() - - -async def test_threaded_negative_lookup_with_unknown_result() -> None: - loop = Mock() - - # If compile CPython with `--disable-ipv6` option, - # we will get an (int, bytes) tuple, instead of a Exception. - async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: - return [ - ( - socket.AF_INET6, - socket.SOCK_STREAM, - 6, - "", - (10, b"\x01\xbb\x00\x00\x00\x00*\x04NB\x00\x1a\x00\x00"), - ) - ] - - loop.getaddrinfo = unknown_addrinfo resolver = ThreadedResolver() - resolver._loop = loop - with patch("socket.has_ipv6", False): - res = await resolver.resolve("www.python.org") - assert len(res) == 0 - await resolver.close() diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 7c4941f9b3c..4f3abb8bcd7 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1130,14 +1130,16 @@ def test_subapp_iter(app) -> None: assert list(resource) == [r1, r2] -def test_invalid_route_name(router) -> None: - with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="invalid name") - - -def test_invalid_route_name(router) -> None: +@pytest.mark.parametrize( + "route_name", + ( + "invalid name", + "class", + ), +) +def test_invalid_route_name(router, route_name: str) -> None: with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="class") # identifier + router.add_get("/", make_handler(), name=route_name) def test_frozen_router(router) -> None: From eb397f3cddcd784cd7b834e0876303b0876563f5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 01:44:52 +0000 Subject: [PATCH 0098/1511] [PR #8140/d2ea8118 backport][3.10] Use NPM clean install and upgrade node to v18 (#8150) **This is a backport of PR #8140 as merged into master (d2ea811853c2cb9305f7afe1a96265d31edb4f3b).** Co-authored-by: Steve Repsher --- .github/workflows/ci-cd.yml | 4 ++-- CHANGES/8116.contrib.rst | 1 + Makefile | 2 +- vendor/README.rst | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8116.contrib.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 8ee338a805c..d78c4b56304 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -102,13 +102,13 @@ jobs: uses: actions/cache@v3.3.2 id: cache with: - key: llhttp-${{ hashFiles('vendor/llhttp/package.json', 'vendor/llhttp/src/**/*') }} + key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} path: vendor/llhttp/build - name: Setup NodeJS if: steps.cache.outputs.cache-hit != 'true' uses: actions/setup-node@v4 with: - node-version: '14' + node-version: 18 - name: Generate llhttp sources if: steps.cache.outputs.cache-hit != 'true' run: | diff --git a/CHANGES/8116.contrib.rst b/CHANGES/8116.contrib.rst new file mode 100644 index 00000000000..c1c7f2ca3bf --- /dev/null +++ b/CHANGES/8116.contrib.rst @@ -0,0 +1 @@ +Updated CI and documentation to use NPM clean install and upgrade node to version 18 -- by :user:`steverep`. diff --git a/Makefile b/Makefile index e3ec98c7ce8..bb2d437a134 100644 --- a/Makefile +++ b/Makefile @@ -61,7 +61,7 @@ aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cython -3 -o $@ $< -I aiohttp -Werror vendor/llhttp/node_modules: vendor/llhttp/package.json - cd vendor/llhttp; npm install + cd vendor/llhttp; npm ci .llhttp-gen: vendor/llhttp/node_modules $(MAKE) -C vendor/llhttp generate diff --git a/vendor/README.rst b/vendor/README.rst index 6156f37f80e..e653068b897 100644 --- a/vendor/README.rst +++ b/vendor/README.rst @@ -12,7 +12,7 @@ newer release, add ``--remote``):: Then build ``llhttp``:: cd vendor/llhttp/ - npm install + npm ci make Then build our parser:: From 9910f5f3be6f2464e0a6a9c24c0fec07bd36daf5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 01:53:16 +0000 Subject: [PATCH 0099/1511] [PR #8140/d2ea8118 backport][3.9] Use NPM clean install and upgrade node to v18 (#8149) **This is a backport of PR #8140 as merged into master (d2ea811853c2cb9305f7afe1a96265d31edb4f3b).** Co-authored-by: Steve Repsher --- .github/workflows/ci-cd.yml | 4 ++-- CHANGES/8116.contrib.rst | 1 + Makefile | 2 +- vendor/README.rst | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8116.contrib.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 9a2b6f4c353..a0492bccd4a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -102,13 +102,13 @@ jobs: uses: actions/cache@v4.0.0 id: cache with: - key: llhttp-${{ hashFiles('vendor/llhttp/package.json', 'vendor/llhttp/src/**/*') }} + key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} path: vendor/llhttp/build - name: Setup NodeJS if: steps.cache.outputs.cache-hit != 'true' uses: actions/setup-node@v4 with: - node-version: '14' + node-version: 18 - name: Generate llhttp sources if: steps.cache.outputs.cache-hit != 'true' run: | diff --git a/CHANGES/8116.contrib.rst b/CHANGES/8116.contrib.rst new file mode 100644 index 00000000000..c1c7f2ca3bf --- /dev/null +++ b/CHANGES/8116.contrib.rst @@ -0,0 +1 @@ +Updated CI and documentation to use NPM clean install and upgrade node to version 18 -- by :user:`steverep`. diff --git a/Makefile b/Makefile index e3ec98c7ce8..bb2d437a134 100644 --- a/Makefile +++ b/Makefile @@ -61,7 +61,7 @@ aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cython -3 -o $@ $< -I aiohttp -Werror vendor/llhttp/node_modules: vendor/llhttp/package.json - cd vendor/llhttp; npm install + cd vendor/llhttp; npm ci .llhttp-gen: vendor/llhttp/node_modules $(MAKE) -C vendor/llhttp generate diff --git a/vendor/README.rst b/vendor/README.rst index 6156f37f80e..e653068b897 100644 --- a/vendor/README.rst +++ b/vendor/README.rst @@ -12,7 +12,7 @@ newer release, add ``--remote``):: Then build ``llhttp``:: cd vendor/llhttp/ - npm install + npm ci make Then build our parser:: From 4682c1c049a2a39c9545490a810338bdcf98f336 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 9 Feb 2024 12:33:59 +0000 Subject: [PATCH 0100/1511] Upgrade to llhttp 9.2 (#8146) (#8151) (cherry picked from commit 0ec65c0f4dc08d027f659256b09ae9cff10ab404) --- CHANGES/8146.feature.rst | 1 + tests/test_http_parser.py | 18 ------------------ vendor/llhttp | 2 +- 3 files changed, 2 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8146.feature.rst diff --git a/CHANGES/8146.feature.rst b/CHANGES/8146.feature.rst new file mode 100644 index 00000000000..9b0cc54206e --- /dev/null +++ b/CHANGES/8146.feature.rst @@ -0,0 +1 @@ +Upgraded *llhttp* to 9.2 -- by :user:`Dreamsorcerer`. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 3c47231e389..d306267c8bb 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -623,25 +623,9 @@ def test_headers_content_length_err_2(parser) -> None: } -@pytest.fixture -def xfail_c_parser_empty_header(request) -> None: - if not all( - (request.getfixturevalue(name) == b"") for name in ("pad1", "pad2", "hdr") - ): - return - if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): - return - request.node.add_marker( - pytest.mark.xfail( - reason="Regression test for Py parser. May match C behaviour later.", - ) - ) - - @pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"]) @pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) @pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) -@pytest.mark.usefixtures("xfail_c_parser_empty_header") def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) expectation = pytest.raises(http_exceptions.BadHttpMessage) @@ -653,8 +637,6 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> def test_empty_header_name(parser) -> None: - if not isinstance(parser, HttpRequestParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) diff --git a/vendor/llhttp b/vendor/llhttp index 9ab2afc85b2..533845688d1 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 9ab2afc85b2880d96a94d38afaee301c6a314049 +Subproject commit 533845688d173561b9cba33269130401add38567 From d00a32b4902ea67a425ba487b9b0f2eacb187a52 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 9 Feb 2024 12:34:10 +0000 Subject: [PATCH 0101/1511] Upgrade to llhttp 9.2 (#8146) (#8152) (cherry picked from commit 0ec65c0f4dc08d027f659256b09ae9cff10ab404) --- CHANGES/8146.feature.rst | 1 + tests/test_http_parser.py | 2 -- vendor/llhttp | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8146.feature.rst diff --git a/CHANGES/8146.feature.rst b/CHANGES/8146.feature.rst new file mode 100644 index 00000000000..9b0cc54206e --- /dev/null +++ b/CHANGES/8146.feature.rst @@ -0,0 +1 @@ +Upgraded *llhttp* to 9.2 -- by :user:`Dreamsorcerer`. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index b931730529d..3fb0ab77d98 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -629,8 +629,6 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> def test_empty_header_name(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) diff --git a/vendor/llhttp b/vendor/llhttp index 9ab2afc85b2..533845688d1 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 9ab2afc85b2880d96a94d38afaee301c6a314049 +Subproject commit 533845688d173561b9cba33269130401add38567 From 5e4f0b8cdbdfda71fa039fc6b55fb51cbc735c58 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Tue, 13 Feb 2024 19:49:02 -0500 Subject: [PATCH 0102/1511] [3.10] Create hello.txt.gz dynamically and improve related assertions (#8136) (#8156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: J. Nick Koston --- .gitattributes | 1 - CHANGES/8136.contrib.rst | 7 ++ tests/hello.txt.gz | Bin 44 -> 0 bytes tests/test_web_sendfile_functional.py | 90 ++++++++++++++++---------- 4 files changed, 63 insertions(+), 35 deletions(-) create mode 100644 CHANGES/8136.contrib.rst delete mode 100644 tests/hello.txt.gz diff --git a/.gitattributes b/.gitattributes index 1fdd659bbc9..3e8722104e7 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,2 @@ tests/data.unknown_mime_type binary -tests/hello.txt.gz binary tests/sample.* binary diff --git a/CHANGES/8136.contrib.rst b/CHANGES/8136.contrib.rst new file mode 100644 index 00000000000..69718a4e0ab --- /dev/null +++ b/CHANGES/8136.contrib.rst @@ -0,0 +1,7 @@ +A pytest fixture ``hello_txt`` was introduced to aid +static file serving tests in +:file:`test_web_sendfile_functional.py`. It dynamically +provisions ``hello.txt`` file variants shared across the +tests in the module. + +-- by :user:`steverep` diff --git a/tests/hello.txt.gz b/tests/hello.txt.gz deleted file mode 100644 index 272fee26eb260f3b3c10b8c723e31841c5131253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmb2|=HR$ss2a+^oRON7ldo4&QNnQAOV diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index d67d67743ba..57ac0849efa 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,8 +1,9 @@ import asyncio +import gzip import pathlib import socket import zlib -from typing import Any, Iterable +from typing import Any, Iterable, Optional import pytest @@ -15,6 +16,24 @@ ssl = None # type: ignore +HELLO_AIOHTTP = b"Hello aiohttp! :-)\n" + + +@pytest.fixture(scope="module") +def hello_txt(request, tmp_path_factory) -> pathlib.Path: + """Create a temp path with hello.txt and compressed versions. + + The uncompressed text file path is returned by default. Alternatively, an + indirect parameter can be passed with an encoding to get a compressed path. + """ + txt = tmp_path_factory.mktemp("hello-") / "hello.txt" + hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} + hello[None].write_bytes(HELLO_AIOHTTP) + hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + encoding = getattr(request, "param", None) + return hello[encoding] + + @pytest.fixture def loop_without_sendfile(loop): def sendfile(*args, **kwargs): @@ -201,11 +220,14 @@ async def handler(request): await client.close() -async def test_static_file_custom_content_type(aiohttp_client, sender) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" +@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +async def test_static_file_custom_content_type( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +) -> None: + """Test that custom type without encoding is returned for encoded request.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -215,22 +237,21 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - with filepath.open("rb") as f: - content = f.read() - assert content == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") is None + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == hello_txt.read_bytes() resp.close() await resp.release() await client.close() -async def test_static_file_custom_content_type_compress(aiohttp_client, sender): - filepath = pathlib.Path(__file__).parent / "hello.txt" +async def test_static_file_custom_content_type_compress( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +): + """Test that custom type with encoding is returned for unencoded requests.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -240,24 +261,26 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert b"hello aiohttp\n" == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() -async def test_static_file_with_gziped_counter_part_enable_compression( - aiohttp_client: Any, sender: Any +@pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) +async def test_static_file_with_encoding_and_enable_compression( + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + forced_compression: Optional[web.ContentCoding], ): - """Test that enable_compression does not double compress when a .gz file is also present.""" - filepath = pathlib.Path(__file__).parent / "hello.txt" + """Test that enable_compression does not double compress when an encoded file is also present.""" async def handler(request): - resp = sender(filepath) - resp.enable_compression() + resp = sender(hello_txt) + resp.enable_compression(forced_compression) return resp app = web.Application() @@ -266,35 +289,34 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert body == b"hello aiohttp\n" - assert resp.headers["Content-Type"] == "text/plain" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() +@pytest.mark.parametrize( + ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] +) async def test_static_file_with_content_encoding( - aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str ) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" + """Test requesting static compressed files returns the correct content type and encoding.""" async def handler(request): - return sender(filepath) + return sender(hello_txt) app = web.Application() app.router.add_get("/", handler) client = await aiohttp_client(app) resp = await client.get("/") - assert 200 == resp.status - body = await resp.read() - assert b"hello aiohttp\n" == body - ct = resp.headers["CONTENT-TYPE"] - assert "text/plain" == ct - encoding = resp.headers["CONTENT-ENCODING"] - assert "gzip" == encoding + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") == expect_encoding + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() From e45da11779a1824b13f2879bb599d5d894d0e452 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Tue, 13 Feb 2024 19:49:36 -0500 Subject: [PATCH 0103/1511] [3.9] Create hello.txt.gz dynamically and improve related assertions (#8136) (#8157) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: J. Nick Koston --- .gitattributes | 1 - CHANGES/8136.contrib.rst | 7 ++ tests/hello.txt.gz | Bin 44 -> 0 bytes tests/test_web_sendfile_functional.py | 90 ++++++++++++++++---------- 4 files changed, 63 insertions(+), 35 deletions(-) create mode 100644 CHANGES/8136.contrib.rst delete mode 100644 tests/hello.txt.gz diff --git a/.gitattributes b/.gitattributes index 1fdd659bbc9..3e8722104e7 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,2 @@ tests/data.unknown_mime_type binary -tests/hello.txt.gz binary tests/sample.* binary diff --git a/CHANGES/8136.contrib.rst b/CHANGES/8136.contrib.rst new file mode 100644 index 00000000000..69718a4e0ab --- /dev/null +++ b/CHANGES/8136.contrib.rst @@ -0,0 +1,7 @@ +A pytest fixture ``hello_txt`` was introduced to aid +static file serving tests in +:file:`test_web_sendfile_functional.py`. It dynamically +provisions ``hello.txt`` file variants shared across the +tests in the module. + +-- by :user:`steverep` diff --git a/tests/hello.txt.gz b/tests/hello.txt.gz deleted file mode 100644 index 272fee26eb260f3b3c10b8c723e31841c5131253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmb2|=HR$ss2a+^oRON7ldo4&QNnQAOV diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index d67d67743ba..57ac0849efa 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,8 +1,9 @@ import asyncio +import gzip import pathlib import socket import zlib -from typing import Any, Iterable +from typing import Any, Iterable, Optional import pytest @@ -15,6 +16,24 @@ ssl = None # type: ignore +HELLO_AIOHTTP = b"Hello aiohttp! :-)\n" + + +@pytest.fixture(scope="module") +def hello_txt(request, tmp_path_factory) -> pathlib.Path: + """Create a temp path with hello.txt and compressed versions. + + The uncompressed text file path is returned by default. Alternatively, an + indirect parameter can be passed with an encoding to get a compressed path. + """ + txt = tmp_path_factory.mktemp("hello-") / "hello.txt" + hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} + hello[None].write_bytes(HELLO_AIOHTTP) + hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + encoding = getattr(request, "param", None) + return hello[encoding] + + @pytest.fixture def loop_without_sendfile(loop): def sendfile(*args, **kwargs): @@ -201,11 +220,14 @@ async def handler(request): await client.close() -async def test_static_file_custom_content_type(aiohttp_client, sender) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" +@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +async def test_static_file_custom_content_type( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +) -> None: + """Test that custom type without encoding is returned for encoded request.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -215,22 +237,21 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - with filepath.open("rb") as f: - content = f.read() - assert content == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") is None + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == hello_txt.read_bytes() resp.close() await resp.release() await client.close() -async def test_static_file_custom_content_type_compress(aiohttp_client, sender): - filepath = pathlib.Path(__file__).parent / "hello.txt" +async def test_static_file_custom_content_type_compress( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +): + """Test that custom type with encoding is returned for unencoded requests.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -240,24 +261,26 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert b"hello aiohttp\n" == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() -async def test_static_file_with_gziped_counter_part_enable_compression( - aiohttp_client: Any, sender: Any +@pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) +async def test_static_file_with_encoding_and_enable_compression( + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + forced_compression: Optional[web.ContentCoding], ): - """Test that enable_compression does not double compress when a .gz file is also present.""" - filepath = pathlib.Path(__file__).parent / "hello.txt" + """Test that enable_compression does not double compress when an encoded file is also present.""" async def handler(request): - resp = sender(filepath) - resp.enable_compression() + resp = sender(hello_txt) + resp.enable_compression(forced_compression) return resp app = web.Application() @@ -266,35 +289,34 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert body == b"hello aiohttp\n" - assert resp.headers["Content-Type"] == "text/plain" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() +@pytest.mark.parametrize( + ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] +) async def test_static_file_with_content_encoding( - aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str ) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" + """Test requesting static compressed files returns the correct content type and encoding.""" async def handler(request): - return sender(filepath) + return sender(hello_txt) app = web.Application() app.router.add_get("/", handler) client = await aiohttp_client(app) resp = await client.get("/") - assert 200 == resp.status - body = await resp.read() - assert b"hello aiohttp\n" == body - ct = resp.headers["CONTENT-TYPE"] - assert "text/plain" == ct - encoding = resp.headers["CONTENT-ENCODING"] - assert "gzip" == encoding + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") == expect_encoding + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() From cda4a8b79e63d07474a9caf78ceb970350f0e09b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Setla?= <38295919+setla@users.noreply.github.com> Date: Wed, 14 Feb 2024 13:18:15 +0100 Subject: [PATCH 0104/1511] [PR #6722/fb465e15 backport][3.10] Implement granular URL error hierarchy in the HTTP client (#8158) **This is a backport of PR #6722 as merged into master (fb465e155b872f01489173d11e35f02ccbf3a940).** This patch introduces 5 granular user-facing exceptions that may occur when HTTP requests are made: * `InvalidUrlClientError` * `RedirectClientError` * `NonHttpUrlClientError` * `InvalidUrlRedirectClientError` * `NonHttpUrlRedirectClientError` Previously `ValueError` or `InvalidURL` was raised and screening out was complicated (a valid URL that redirects to invalid one raised the same error as an invalid URL). Ref: https://github.com/aio-libs/aiohttp/pull/6722#discussion_r1477103562 PR #6722 Resolves #2507 Resolves #2630 Resolves #3315 Co-authored-by: Sviatoslav Sydorenko (cherry picked from commit fb465e155b872f01489173d11e35f02ccbf3a940) --- CHANGES/2507.feature.rst | 1 + CHANGES/3315.feature.rst | 1 + CHANGES/6722.feature | 12 +++ CONTRIBUTORS.txt | 1 + aiohttp/__init__.py | 10 +++ aiohttp/client.py | 53 +++++++++--- aiohttp/client_exceptions.py | 54 +++++++++++-- docs/client_reference.rst | 49 ++++++++++++ tests/test_client_exceptions.py | 25 +++++- tests/test_client_functional.py | 137 +++++++++++++++++++++++++++++++- 10 files changed, 321 insertions(+), 22 deletions(-) create mode 120000 CHANGES/2507.feature.rst create mode 120000 CHANGES/3315.feature.rst create mode 100644 CHANGES/6722.feature diff --git a/CHANGES/2507.feature.rst b/CHANGES/2507.feature.rst new file mode 120000 index 00000000000..f569cd92882 --- /dev/null +++ b/CHANGES/2507.feature.rst @@ -0,0 +1 @@ +6722.feature \ No newline at end of file diff --git a/CHANGES/3315.feature.rst b/CHANGES/3315.feature.rst new file mode 120000 index 00000000000..f569cd92882 --- /dev/null +++ b/CHANGES/3315.feature.rst @@ -0,0 +1 @@ +6722.feature \ No newline at end of file diff --git a/CHANGES/6722.feature b/CHANGES/6722.feature new file mode 100644 index 00000000000..1dd253a0997 --- /dev/null +++ b/CHANGES/6722.feature @@ -0,0 +1,12 @@ +Added 5 new exceptions: :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, +:py:exc:`~aiohttp.NonHttpUrlClientError`, :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, +:py:exc:`~aiohttp.NonHttpUrlRedirectClientError` + +:py:exc:`~aiohttp.InvalidUrlRedirectClientError`, :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` +are raised instead of :py:exc:`ValueError` or :py:exc:`~aiohttp.InvalidURL` when the redirect URL is invalid. Classes +:py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, +:py:exc:`~aiohttp.NonHttpUrlClientError` are base for them. + +The :py:exc:`~aiohttp.InvalidURL` now exposes a ``description`` property with the text explanation of the error details. + +-- by :user:`setla` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 8df68497dbe..c7e18d955e5 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -366,5 +366,6 @@ Yuvi Panda Zainab Lawal Zeal Wierslee Zlatan Sičanica +Łukasz Setla Марк Коренберг Семён Марьясин diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 3f8b2728863..5064b043006 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -25,7 +25,12 @@ ContentTypeError, Fingerprint, InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, NamedPipeConnector, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, RequestInfo, ServerConnectionError, ServerDisconnectedError, @@ -137,6 +142,11 @@ "ContentTypeError", "Fingerprint", "InvalidURL", + "InvalidUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlClientError", + "NonHttpUrlRedirectClientError", + "RedirectClientError", "RequestInfo", "ServerConnectionError", "ServerDisconnectedError", diff --git a/aiohttp/client.py b/aiohttp/client.py index 36dbf6a7119..8d8d13f25f7 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -52,6 +52,11 @@ ConnectionTimeoutError, ContentTypeError, InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, ServerConnectionError, ServerDisconnectedError, ServerFingerprintMismatch, @@ -109,6 +114,11 @@ "ConnectionTimeoutError", "ContentTypeError", "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", "ServerConnectionError", "ServerDisconnectedError", "ServerFingerprintMismatch", @@ -168,6 +178,7 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) +HTTP_SCHEMA_SET = frozenset({"http", "https", ""}) _RetType = TypeVar("_RetType") _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -455,7 +466,10 @@ async def _request( try: url = self._build_url(str_or_url) except ValueError as e: - raise InvalidURL(str_or_url) from e + raise InvalidUrlClientError(str_or_url) from e + + if url.scheme not in HTTP_SCHEMA_SET: + raise NonHttpUrlClientError(url) skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: @@ -513,6 +527,15 @@ async def _request( retry_persistent_connection = method in IDEMPOTENT_METHODS while True: url, auth_from_url = strip_auth_from_url(url) + if not url.raw_host: + # NOTE: Bail early, otherwise, causes `InvalidURL` through + # NOTE: `self._request_class()` below. + err_exc_cls = ( + InvalidUrlRedirectClientError + if redirects + else InvalidUrlClientError + ) + raise err_exc_cls(url) if auth and auth_from_url: raise ValueError( "Cannot combine AUTH argument with " @@ -670,25 +693,35 @@ async def _request( resp.release() try: - parsed_url = URL( + parsed_redirect_url = URL( r_url, encoded=not self._requote_redirect_url ) - except ValueError as e: - raise InvalidURL(r_url) from e + raise InvalidUrlRedirectClientError( + r_url, + "Server attempted redirecting to a location that does not look like a URL", + ) from e - scheme = parsed_url.scheme - if scheme not in ("http", "https", ""): + scheme = parsed_redirect_url.scheme + if scheme not in HTTP_SCHEMA_SET: resp.close() - raise ValueError("Can redirect only to http or https") + raise NonHttpUrlRedirectClientError(r_url) elif not scheme: - parsed_url = url.join(parsed_url) + parsed_redirect_url = url.join(parsed_redirect_url) - if url.origin() != parsed_url.origin(): + try: + redirect_origin = parsed_redirect_url.origin() + except ValueError as origin_val_err: + raise InvalidUrlRedirectClientError( + parsed_redirect_url, + "Invalid redirect URL origin", + ) from origin_val_err + + if url.origin() != redirect_origin: auth = None headers.pop(hdrs.AUTHORIZATION, None) - url = parsed_url + url = parsed_redirect_url params = {} resp.release() continue diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 60bf058e887..f15a9ee3d3e 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -2,10 +2,10 @@ import asyncio import warnings -from typing import TYPE_CHECKING, Any, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Tuple, Union from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders +from .typedefs import LooseHeaders, StrOrURL try: import ssl @@ -41,6 +41,11 @@ "ContentTypeError", "ClientPayloadError", "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", ) @@ -281,17 +286,52 @@ class InvalidURL(ClientError, ValueError): # Derive from ValueError for backward compatibility - def __init__(self, url: Any) -> None: + def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: # The type of url is not yarl.URL because the exception can be raised # on URL(url) call - super().__init__(url) + self._url = url + self._description = description + + if description: + super().__init__(url, description) + else: + super().__init__(url) + + @property + def url(self) -> StrOrURL: + return self._url @property - def url(self) -> Any: - return self.args[0] + def description(self) -> "str | None": + return self._description def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.url}>" + return f"<{self.__class__.__name__} {self}>" + + def __str__(self) -> str: + if self._description: + return f"{self._url} - {self._description}" + return str(self._url) + + +class InvalidUrlClientError(InvalidURL): + """Invalid URL client error.""" + + +class RedirectClientError(ClientError): + """Client redirect error.""" + + +class NonHttpUrlClientError(ClientError): + """Non http URL client error.""" + + +class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError): + """Invalid URL redirect client error.""" + + +class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError): + """Non http URL redirect client error.""" class ClientSSLError(ClientConnectorError): diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 93b3459ba7c..838aee0c7d6 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2115,6 +2115,41 @@ All exceptions are available as members of *aiohttp* module. Invalid URL, :class:`yarl.URL` instance. + .. attribute:: description + + Invalid URL description, :class:`str` instance or :data:`None`. + +.. exception:: InvalidUrlClientError + + Base class for all errors related to client url. + + Derived from :exc:`InvalidURL` + +.. exception:: RedirectClientError + + Base class for all errors related to client redirects. + + Derived from :exc:`ClientError` + +.. exception:: NonHttpUrlClientError + + Base class for all errors related to non http client urls. + + Derived from :exc:`ClientError` + +.. exception:: InvalidUrlRedirectClientError + + Redirect URL is malformed, e.g. it does not contain host part. + + Derived from :exc:`InvalidUrlClientError` and :exc:`RedirectClientError` + +.. exception:: NonHttpUrlRedirectClientError + + Redirect URL does not contain http schema. + + Derived from :exc:`RedirectClientError` and :exc:`NonHttpUrlClientError` + + .. class:: ContentDisposition Represent Content-Disposition header @@ -2331,3 +2366,17 @@ Hierarchy of exceptions * :exc:`WSServerHandshakeError` * :exc:`InvalidURL` + + * :exc:`InvalidUrlClientError` + + * :exc:`InvalidUrlRedirectClientError` + + * :exc:`NonHttpUrlClientError` + + * :exc:`NonHttpUrlRedirectClientError` + + * :exc:`RedirectClientError` + + * :exc:`InvalidUrlRedirectClientError` + + * :exc:`NonHttpUrlRedirectClientError` diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index f70ba5d09a6..d863d6674a3 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -5,6 +5,7 @@ from unittest import mock import pytest +from yarl import URL from aiohttp import client, client_reqrep @@ -298,8 +299,9 @@ def test_repr(self) -> None: class TestInvalidURL: def test_ctor(self) -> None: - err = client.InvalidURL(url=":wrong:url:") + err = client.InvalidURL(url=":wrong:url:", description=":description:") assert err.url == ":wrong:url:" + assert err.description == ":description:" def test_pickle(self) -> None: err = client.InvalidURL(url=":wrong:url:") @@ -310,10 +312,27 @@ def test_pickle(self) -> None: assert err2.url == ":wrong:url:" assert err2.foo == "bar" - def test_repr(self) -> None: + def test_repr_no_description(self) -> None: err = client.InvalidURL(url=":wrong:url:") + assert err.args == (":wrong:url:",) assert repr(err) == "" - def test_str(self) -> None: + def test_repr_yarl_URL(self) -> None: + err = client.InvalidURL(url=URL(":wrong:url:")) + assert repr(err) == "" + + def test_repr_with_description(self) -> None: + err = client.InvalidURL(url=":wrong:url:", description=":description:") + assert repr(err) == "" + + def test_str_no_description(self) -> None: err = client.InvalidURL(url=":wrong:url:") assert str(err) == ":wrong:url:" + + def test_none_description(self) -> None: + err = client.InvalidURL(":wrong:url:") + assert err.description is None + + def test_str_with_description(self) -> None: + err = client.InvalidURL(url=":wrong:url:", description=":description:") + assert str(err) == ":wrong:url: - :description:" diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 654788afa72..4d804a31ddc 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -20,7 +20,14 @@ import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver -from aiohttp.client_exceptions import SocketTimeoutError, TooManyRedirects +from aiohttp.client_exceptions import ( + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + SocketTimeoutError, + TooManyRedirects, +) from aiohttp.pytest_plugin import AiohttpClient, TestClient from aiohttp.test_utils import unused_port @@ -1121,7 +1128,7 @@ async def redirect(request): app.router.add_get("/redirect", redirect) client = await aiohttp_client(app) - with pytest.raises(ValueError): + with pytest.raises(NonHttpUrlRedirectClientError): await client.get("/redirect") @@ -2497,6 +2504,132 @@ async def handler_redirect(request): await client.post("/", chunked=1024) +INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW = ( + # yarl.URL.__new__ raises ValueError + ("http://:/", "http://:/"), + ("http://example.org:non_int_port/", "http://example.org:non_int_port/"), +) + +INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN = ( + # # yarl.URL.origin raises ValueError + ("http:/", "http:///"), + ("http:/example.com", "http:///example.com"), + ("http:///example.com", "http:///example.com"), +) + +NON_HTTP_URL_WITH_ERROR_MESSAGE = ( + ("call:+380123456789", r"call:\+380123456789"), + ("skype:handle", "skype:handle"), + ("slack://instance/room", "slack://instance/room"), + ("steam:code", "steam:code"), + ("twitter://handle", "twitter://handle"), + ("bluesky://profile/d:i:d", "bluesky://profile/d:i:d"), +) + + +@pytest.mark.parametrize( + ("url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, InvalidUrlClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + ), + *( + (url, message, NonHttpUrlClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_and_non_http_url( + url: Any, error_message_url: Any, expected_exception_class: Any +) -> None: + async with aiohttp.ClientSession() as http_session: + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await http_session.get(url) + + +@pytest.mark.parametrize( + ("invalid_redirect_url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlRedirectClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + + INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, NonHttpUrlRedirectClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_redirect_url( + aiohttp_client: Any, + invalid_redirect_url: Any, + error_message_url: str, + expected_exception_class: Any, +) -> None: + headers = {hdrs.LOCATION: invalid_redirect_url} + + async def generate_redirecting_response(request): + return web.Response(status=301, headers=headers) + + app = web.Application() + app.router.add_get("/redirect", generate_redirecting_response) + client = await aiohttp_client(app) + + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await client.get("/redirect") + + +@pytest.mark.parametrize( + ("invalid_redirect_url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlRedirectClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + + INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, NonHttpUrlRedirectClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_redirect_url_multiple_redirects( + aiohttp_client: Any, + invalid_redirect_url: Any, + error_message_url: str, + expected_exception_class: Any, +) -> None: + app = web.Application() + + for path, location in [ + ("/redirect", "/redirect1"), + ("/redirect1", "/redirect2"), + ("/redirect2", invalid_redirect_url), + ]: + + async def generate_redirecting_response(request): + return web.Response(status=301, headers={hdrs.LOCATION: location}) + + app.router.add_get(path, generate_redirecting_response) + + client = await aiohttp_client(app) + + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await client.get("/redirect") + + @pytest.mark.parametrize( ("status", "expected_ok"), ( From 6cb21d15cce9eb63d7b94be19e63b8061f4f8a05 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Wed, 14 Feb 2024 09:02:12 -0500 Subject: [PATCH 0105/1511] [3.10] Add server capability to check for Brotli compressed static files (#8160) Currently server only checks if static routes have a `.gz` extension and serves them with `gzip` encoding. These changes do the same for `.br` files with `br` encoding. Brotli is prioritized over gzip if both exist and are supported by the client, as it should almost always be a smaller content length. I considered making a check for which is smaller if both exist, but figured it wouldn't be worth the extra file system call in the vast majority of cases (at least not for typical web formats). Users should simply use gzip if it's smaller than Brotli for any file. Resolves #8062 Co-authored-by: J. Nick Koston Co-authored-by: Sviatoslav Sydorenko (cherry picked from commit dfc92967d10eb83a8d726c02c3de90da15f8335f) --- CHANGES/8062.feature.rst | 1 + aiohttp/web_fileresponse.py | 57 ++++++++++++++++----------- aiohttp/web_response.py | 1 + docs/web_reference.rst | 5 ++- tests/test_web_sendfile.py | 8 ++-- tests/test_web_sendfile_functional.py | 40 +++++++++++++++---- 6 files changed, 76 insertions(+), 36 deletions(-) create mode 100644 CHANGES/8062.feature.rst diff --git a/CHANGES/8062.feature.rst b/CHANGES/8062.feature.rst new file mode 100644 index 00000000000..6e9814f09a0 --- /dev/null +++ b/CHANGES/8062.feature.rst @@ -0,0 +1 @@ +Added server capability to check for static files with Brotli compression via a ``.br`` extension -- by :user:`steverep`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7dbe50f0a5a..5b03bcc8350 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -2,6 +2,9 @@ import mimetypes import os import pathlib +import sys +from contextlib import suppress +from types import MappingProxyType from typing import ( # noqa IO, TYPE_CHECKING, @@ -40,6 +43,14 @@ NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) +if sys.version_info < (3, 9): + mimetypes.encodings_map[".br"] = "br" + +# File extension to IANA encodings map that will be checked in the order defined. +ENCODING_EXTENSIONS = MappingProxyType( + {ext: mimetypes.encodings_map[ext] for ext in (".br", ".gz")} +) + class FileResponse(StreamResponse): """A response object can be used to send files.""" @@ -124,34 +135,36 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _get_file_path_stat_and_gzip( - self, check_for_gzipped_file: bool - ) -> Tuple[pathlib.Path, os.stat_result, bool]: - """Return the file path, stat result, and gzip status. + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]: + """Return the file path, stat result, and encoding. + + If an uncompressed file is returned, the encoding is set to + :py:data:`None`. This method should be called from a thread executor since it calls os.stat which may block. """ - filepath = self._path - if check_for_gzipped_file: - gzip_path = filepath.with_name(filepath.name + ".gz") - try: - return gzip_path, gzip_path.stat(), True - except OSError: - # Fall through and try the non-gzipped file - pass + file_path = self._path + for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): + if file_encoding not in accept_encoding: + continue + + compressed_path = file_path.with_suffix(file_path.suffix + file_extension) + with suppress(OSError): + return compressed_path, compressed_path.stat(), file_encoding - return filepath, filepath.stat(), False + # Fallback to the uncompressed file + return file_path, file_path.stat(), None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - check_for_gzipped_file = ( - "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - ) - filepath, st, gzip = await loop.run_in_executor( - None, self._get_file_path_stat_and_gzip, check_for_gzipped_file + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + file_path, st, file_encoding = await loop.run_in_executor( + None, self._get_file_path_stat_encoding, accept_encoding ) etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" @@ -183,12 +196,12 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter return await self._not_modified(request, etag_value, last_modified) if hdrs.CONTENT_TYPE not in self.headers: - ct, encoding = mimetypes.guess_type(str(filepath)) + ct, encoding = mimetypes.guess_type(str(file_path)) if not ct: ct = "application/octet-stream" should_set_ct = True else: - encoding = "gzip" if gzip else None + encoding = file_encoding should_set_ct = False status = self._status @@ -269,7 +282,7 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.content_type = ct # type: ignore[assignment] if encoding: self.headers[hdrs.CONTENT_ENCODING] = encoding - if gzip: + if file_encoding: self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING # Disable compression if we are already sending # a compressed file since we don't want to double @@ -293,7 +306,7 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter if count == 0 or must_be_empty_body(request.method, self.status): return await super().prepare(request) - fobj = await loop.run_in_executor(None, filepath.open, "rb") + fobj = await loop.run_in_executor(None, file_path.open, "rb") if start: # be aware that start could be None or int=0 here. offset = start else: diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 40d6f01ecaa..07030305329 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -52,6 +52,7 @@ BaseClass = collections.abc.MutableMapping +# TODO(py311): Convert to StrEnum for wider use class ContentCoding(enum.Enum): # The content codings that we have support for. # diff --git a/docs/web_reference.rst b/docs/web_reference.rst index e0ebbae1851..05f8085842d 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1846,8 +1846,9 @@ Application and Router system call even if the platform supports it. This can be accomplished by by setting environment variable ``AIOHTTP_NOSENDFILE=1``. - If a gzip version of the static content exists at file path + ``.gz``, it - will be used for the response. + If a Brotli or gzip compressed version of the static content exists at + the requested path with the ``.br`` or ``.gz`` extension, it will be used + for the response. Brotli will be preferred over gzip if both files exist. .. warning:: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index d472c407b7a..ae4434e9ff6 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -20,7 +20,7 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath file_sender = FileResponse(filepath) file_sender._path = filepath @@ -41,7 +41,7 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 @@ -63,7 +63,7 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 @@ -87,7 +87,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 57ac0849efa..93645094ef7 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -10,6 +10,11 @@ import aiohttp from aiohttp import web +try: + import brotlicffi as brotli +except ImportError: + import brotli + try: import ssl except ImportError: @@ -27,9 +32,14 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: indirect parameter can be passed with an encoding to get a compressed path. """ txt = tmp_path_factory.mktemp("hello-") / "hello.txt" - hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} + hello = { + None: txt, + "gzip": txt.with_suffix(f"{txt.suffix}.gz"), + "br": txt.with_suffix(f"{txt.suffix}.br"), + } hello[None].write_bytes(HELLO_AIOHTTP) hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + hello["br"].write_bytes(brotli.compress(HELLO_AIOHTTP)) encoding = getattr(request, "param", None) return hello[encoding] @@ -220,7 +230,7 @@ async def handler(request): await client.close() -@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +@pytest.mark.parametrize("hello_txt", ["gzip", "br"], indirect=True) async def test_static_file_custom_content_type( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any ) -> None: @@ -245,8 +255,16 @@ async def handler(request): await client.close() +@pytest.mark.parametrize( + ("accept_encoding", "expect_encoding"), + [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], +) async def test_static_file_custom_content_type_compress( - hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + accept_encoding: str, + expect_encoding: str, ): """Test that custom type with encoding is returned for unencoded requests.""" @@ -259,9 +277,9 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - resp = await client.get("/") + resp = await client.get("/", headers={"Accept-Encoding": accept_encoding}) assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers.get("Content-Encoding") == expect_encoding assert resp.headers["Content-Type"] == "application/pdf" assert await resp.read() == HELLO_AIOHTTP resp.close() @@ -269,11 +287,17 @@ async def handler(request): await client.close() +@pytest.mark.parametrize( + ("accept_encoding", "expect_encoding"), + [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], +) @pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) async def test_static_file_with_encoding_and_enable_compression( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, + accept_encoding: str, + expect_encoding: str, forced_compression: Optional[web.ContentCoding], ): """Test that enable_compression does not double compress when an encoded file is also present.""" @@ -287,9 +311,9 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - resp = await client.get("/") + resp = await client.get("/", headers={"Accept-Encoding": accept_encoding}) assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers.get("Content-Encoding") == expect_encoding assert resp.headers["Content-Type"] == "text/plain" assert await resp.read() == HELLO_AIOHTTP resp.close() @@ -298,7 +322,7 @@ async def handler(request): @pytest.mark.parametrize( - ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] + ("hello_txt", "expect_encoding"), [["gzip"] * 2, ["br"] * 2], indirect=["hello_txt"] ) async def test_static_file_with_content_encoding( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str From 87e06976a25050117f38d6e9a64c4202d814f388 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Fri, 16 Feb 2024 05:42:37 +0100 Subject: [PATCH 0106/1511] =?UTF-8?q?[PR=20#8089/dc38630b=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=92=85=20Propagate=20error=20causes=20via=20asyncio=20?= =?UTF-8?q?protocols=20(#8162)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8089 as merged into master (dc38630b168a169139974617d75e176530c91696).** This is supposed to unify setting exceptions on the future objects, allowing to also attach their causes whenever available. It'll make possible for the end-users to see more detailed tracebacks. It's also supposed to help with tracking down what's happening with #4581. PR #8089 Co-Authored-By: J. Nick Koston Co-Authored-By: Sam Bull (cherry picked from commit dc38630b168a169139974617d75e176530c91696) --- CHANGES/8089.bugfix.rst | 3 ++ aiohttp/_http_parser.pyx | 12 ++++--- aiohttp/base_protocol.py | 7 +++- aiohttp/client_proto.py | 66 ++++++++++++++++++++++++++---------- aiohttp/client_reqrep.py | 34 ++++++++++++------- aiohttp/helpers.py | 36 ++++++++++++++++++-- aiohttp/http_parser.py | 27 ++++++++++----- aiohttp/http_websocket.py | 4 +-- aiohttp/streams.py | 32 +++++++++++++---- aiohttp/web_protocol.py | 4 +-- aiohttp/web_request.py | 3 +- aiohttp/web_ws.py | 4 +-- tests/test_base_protocol.py | 4 +-- tests/test_client_request.py | 6 ++-- tests/test_http_parser.py | 1 + 15 files changed, 177 insertions(+), 66 deletions(-) create mode 100644 CHANGES/8089.bugfix.rst diff --git a/CHANGES/8089.bugfix.rst b/CHANGES/8089.bugfix.rst new file mode 100644 index 00000000000..7f47448478d --- /dev/null +++ b/CHANGES/8089.bugfix.rst @@ -0,0 +1,3 @@ +The asynchronous internals now set the underlying causes +when assigning exceptions to the future objects +-- by :user:`webknjaz`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 3f28fbdab43..7ea9b32ca55 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -19,7 +19,7 @@ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiD from yarl import URL as _URL from aiohttp import hdrs -from aiohttp.helpers import DEBUG +from aiohttp.helpers import DEBUG, set_exception from .http_exceptions import ( BadHttpMessage, @@ -763,11 +763,13 @@ cdef int cb_on_body(cparser.llhttp_t* parser, cdef bytes body = at[:length] try: pyparser._payload.feed_data(body, length) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if pyparser._payload_exception is not None: - pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - else: - pyparser._payload.set_exception(exc) + reraised_exc = pyparser._payload_exception(str(underlying_exc)) + + set_exception(pyparser._payload, reraised_exc, underlying_exc) + pyparser._payload_error = 1 return -1 else: diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index 4c9f0a752e3..dc1f24f99cd 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -76,7 +77,11 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: if exc is None: waiter.set_result(None) else: - waiter.set_exception(exc) + set_exception( + waiter, + ConnectionError("Connection lost"), + exc, + ) async def _drain_helper(self) -> None: if not self.connected: diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index ca99808080d..723f5aae5f4 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -9,8 +9,14 @@ ServerDisconnectedError, ServerTimeoutError, ) -from .helpers import BaseTimerContext, status_code_must_be_empty_body +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + set_exception, + status_code_must_be_empty_body, +) from .http import HttpResponseParser, RawResponseMessage +from .http_exceptions import HttpProcessingError from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader @@ -73,28 +79,50 @@ def is_connected(self) -> bool: def connection_lost(self, exc: Optional[BaseException]) -> None: self._drop_timeout() + original_connection_error = exc + reraised_exc = original_connection_error + + connection_closed_cleanly = original_connection_error is None + if self._payload_parser is not None: - with suppress(Exception): + with suppress(Exception): # FIXME: log this somehow? self._payload_parser.feed_eof() uncompleted = None if self._parser is not None: try: uncompleted = self._parser.feed_eof() - except Exception as e: + except Exception as underlying_exc: if self._payload is not None: - exc = ClientPayloadError("Response payload is not completed") - exc.__cause__ = e - self._payload.set_exception(exc) + client_payload_exc_msg = ( + f"Response payload is not completed: {underlying_exc !r}" + ) + if not connection_closed_cleanly: + client_payload_exc_msg = ( + f"{client_payload_exc_msg !s}. " + f"{original_connection_error !r}" + ) + set_exception( + self._payload, + ClientPayloadError(client_payload_exc_msg), + underlying_exc, + ) if not self.is_eof(): - if isinstance(exc, OSError): - exc = ClientOSError(*exc.args) - if exc is None: - exc = ServerDisconnectedError(uncompleted) + if isinstance(original_connection_error, OSError): + reraised_exc = ClientOSError(*original_connection_error.args) + if connection_closed_cleanly: + reraised_exc = ServerDisconnectedError(uncompleted) # assigns self._should_close to True as side effect, # we do it anyway below - self.set_exception(exc) + underlying_non_eof_exc = ( + _EXC_SENTINEL + if connection_closed_cleanly + else original_connection_error + ) + assert underlying_non_eof_exc is not None + assert reraised_exc is not None + self.set_exception(reraised_exc, underlying_non_eof_exc) self._should_close = True self._parser = None @@ -102,7 +130,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: self._payload_parser = None self._reading_paused = False - super().connection_lost(exc) + super().connection_lost(reraised_exc) def eof_received(self) -> None: # should call parser.feed_eof() most likely @@ -116,10 +144,14 @@ def resume_reading(self) -> None: super().resume_reading() self._reschedule_timeout() - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._should_close = True self._drop_timeout() - super().set_exception(exc) + super().set_exception(exc, exc_cause) def set_parser(self, parser: Any, payload: Any) -> None: # TODO: actual types are: @@ -196,7 +228,7 @@ def _on_read_timeout(self) -> None: exc = ServerTimeoutError("Timeout on reading data from socket") self.set_exception(exc) if self._payload is not None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) def data_received(self, data: bytes) -> None: self._reschedule_timeout() @@ -222,14 +254,14 @@ def data_received(self, data: bytes) -> None: # parse http messages try: messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as exc: + except BaseException as underlying_exc: if self.transport is not None: # connection.release() could be called BEFORE # data_received(), the transport is already # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(exc) + self.set_exception(HttpProcessingError(), underlying_exc) return self._upgraded = upgraded diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e0de951a33a..afe719da16e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -50,6 +50,7 @@ netrc_from_env, noop, reify, + set_exception, set_result, ) from .http import ( @@ -630,20 +631,29 @@ async def write_bytes( for chunk in self.body: await writer.write(chunk) # type: ignore[arg-type] - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - protocol.set_exception(exc) - else: - new_exc = ClientOSError( - exc.errno, "Can not write request body for %s" % self.url + except OSError as underlying_exc: + reraised_exc = underlying_exc + + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", ) - new_exc.__context__ = exc - new_exc.__cause__ = exc - protocol.set_exception(new_exc) + + set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: await writer.write_eof() - except Exception as exc: - protocol.set_exception(exc) + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + f"Failed to send bytes into the underlying connection {conn !s}", + ), + underlying_exc, + ) else: await writer.write_eof() protocol.start_timeout() @@ -1086,7 +1096,7 @@ def _cleanup_writer(self) -> None: def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - content.set_exception(ClientConnectionError("Connection closed")) + set_exception(content, ClientConnectionError("Connection closed")) self._released = True async def wait_for_close(self) -> None: diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index a5c762ed795..284033b7a04 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -810,9 +810,39 @@ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: fut.set_result(result) -def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: - if not fut.done(): - fut.set_exception(exc) +_EXC_SENTINEL = BaseException() + + +class ErrorableProtocol(Protocol): + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = ..., + ) -> None: + ... # pragma: no cover + + +def set_exception( + fut: "asyncio.Future[_T] | ErrorableProtocol", + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, +) -> None: + """Set future exception. + + If the future is marked as complete, this function is a no-op. + + :param exc_cause: An exception that is a direct cause of ``exc``. + Only set if provided. + """ + if asyncio.isfuture(fut) and fut.done(): + return + + exc_is_sentinel = exc_cause is _EXC_SENTINEL + exc_causes_itself = exc is exc_cause + if not exc_is_sentinel and not exc_causes_itself: + exc.__cause__ = exc_cause + + fut.set_exception(exc) @functools.total_ordering diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1877f558308..1301f025810 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -28,10 +28,12 @@ from .base_protocol import BaseProtocol from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor from .helpers import ( + _EXC_SENTINEL, DEBUG, NO_EXTENSIONS, BaseTimerContext, method_must_be_empty_body, + set_exception, status_code_must_be_empty_body, ) from .http_exceptions import ( @@ -446,13 +448,16 @@ def get_content_length() -> Optional[int]: assert self._payload_parser is not None try: eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if self.payload_exception is not None: - self._payload_parser.payload.set_exception( - self.payload_exception(str(exc)) - ) - else: - self._payload_parser.payload.set_exception(exc) + reraised_exc = self.payload_exception(str(underlying_exc)) + + set_exception( + self._payload_parser.payload, + reraised_exc, + underlying_exc, + ) eof = True data = b"" @@ -834,7 +839,7 @@ def feed_data( exc = TransferEncodingError( chunk[:pos].decode("ascii", "surrogateescape") ) - self.payload.set_exception(exc) + set_exception(self.payload, exc) raise exc size = int(bytes(size_b), 16) @@ -939,8 +944,12 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: else: self.decompressor = ZLibDecompressor(encoding=encoding) - def set_exception(self, exc: BaseException) -> None: - self.out.set_exception(exc) + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + set_exception(self.out, exc, exc_cause) def feed_data(self, chunk: bytes, size: int) -> None: if not size: diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index b63453f99e5..39f2e4a5c15 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -25,7 +25,7 @@ from .base_protocol import BaseProtocol from .compression_utils import ZLibCompressor, ZLibDecompressor -from .helpers import NO_EXTENSIONS +from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue __all__ = ( @@ -314,7 +314,7 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return self._feed_data(data) except Exception as exc: self._exc = exc - self.queue.set_exception(exc) + set_exception(self.queue, exc) return True, b"" def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 3e4c355b5cb..b9b9c3fd96f 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -14,7 +14,13 @@ ) from .base_protocol import BaseProtocol -from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + TimerNoop, + set_exception, + set_result, +) from .log import internal_logger __all__ = ( @@ -146,19 +152,23 @@ def get_read_buffer_limits(self) -> Tuple[int, int]: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._exception = exc self._eof_callbacks.clear() waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) waiter = self._eof_waiter if waiter is not None: self._eof_waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def on_eof(self, callback: Callable[[], None]) -> None: if self._eof: @@ -513,7 +523,11 @@ def __repr__(self) -> str: def exception(self) -> Optional[BaseException]: return None - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: pass def on_eof(self, callback: Callable[[], None]) -> None: @@ -588,14 +602,18 @@ def at_eof(self) -> bool: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._eof = True self._exception = exc waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def feed_data(self, data: _T, size: int = 0) -> None: self._size += size diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index ec5856a0a22..f083b13eb0f 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -26,7 +26,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout +from .helpers import ceil_timeout, set_exception from .http import ( HttpProcessingError, HttpRequestParser, @@ -565,7 +565,7 @@ async def start(self) -> None: self.log_debug("Uncompleted request.") self.close() - payload.set_exception(PayloadAccessError()) + set_exception(payload, PayloadAccessError()) except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 61fc831b032..781713e5985 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -48,6 +48,7 @@ parse_http_date, reify, sentinel, + set_exception, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -814,7 +815,7 @@ async def _prepare_hook(self, response: StreamResponse) -> None: return def _cancel(self, exc: BaseException) -> None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) class Request(BaseRequest): diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 783377716f5..d20a26ca470 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_result +from .helpers import call_later, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -526,4 +526,4 @@ async def __anext__(self) -> WSMessage: def _cancel(self, exc: BaseException) -> None: if self._reader is not None: - self._reader.set_exception(exc) + set_exception(self._reader, exc) diff --git a/tests/test_base_protocol.py b/tests/test_base_protocol.py index b26011095e9..72c8c7c6b63 100644 --- a/tests/test_base_protocol.py +++ b/tests/test_base_protocol.py @@ -186,9 +186,9 @@ async def test_lost_drain_waited_exception() -> None: assert pr._drain_waiter is not None exc = RuntimeError() pr.connection_lost(exc) - with pytest.raises(RuntimeError) as cm: + with pytest.raises(ConnectionError, match=r"^Connection lost$") as cm: await t - assert cm.value is exc + assert cm.value.__cause__ is exc assert pr._drain_waiter is None diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c54e1828e34..6084f685405 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,6 +14,7 @@ import aiohttp from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, ClientResponse, @@ -1096,9 +1097,8 @@ async def throw_exc(): # assert connection.close.called assert conn.protocol.set_exception.called outer_exc = conn.protocol.set_exception.call_args[0][0] - assert isinstance(outer_exc, ValueError) - assert inner_exc is outer_exc - assert inner_exc is outer_exc + assert isinstance(outer_exc, ClientConnectionError) + assert outer_exc.__cause__ is inner_exc await req.close() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 3fb0ab77d98..a37a08632d7 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -280,6 +280,7 @@ def test_parse_headers_longline(parser: Any) -> None: header_name = b"Test" + invalid_unicode_byte + b"Header" + b"A" * 8192 text = b"GET /test HTTP/1.1\r\n" + header_name + b": test\r\n" + b"\r\n" + b"\r\n" with pytest.raises((http_exceptions.LineTooLong, http_exceptions.BadHttpMessage)): + # FIXME: `LineTooLong` doesn't seem to actually be happening parser.feed_data(text) From d4322e72f1eafc0a3c9513b966b1993fd73001dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Fri, 16 Feb 2024 05:45:10 +0100 Subject: [PATCH 0107/1511] =?UTF-8?q?[PR=20#8089/dc38630b=20backport][3.10?= =?UTF-8?q?]=20=F0=9F=92=85=20Propagate=20error=20causes=20via=20asyncio?= =?UTF-8?q?=20protocols=20(#8161)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8089 as merged into master (dc38630b168a169139974617d75e176530c91696).** This is supposed to unify setting exceptions on the future objects, allowing to also attach their causes whenever available. It'll make possible for the end-users to see more detailed tracebacks. It's also supposed to help with tracking down what's happening with #4581. PR #8089 Co-Authored-By: J. Nick Koston Co-Authored-By: Sam Bull (cherry picked from commit dc38630b168a169139974617d75e176530c91696) --- CHANGES/8089.bugfix.rst | 3 ++ aiohttp/_http_parser.pyx | 12 ++++--- aiohttp/base_protocol.py | 7 +++- aiohttp/client_proto.py | 66 ++++++++++++++++++++++++++---------- aiohttp/client_reqrep.py | 34 ++++++++++++------- aiohttp/helpers.py | 36 ++++++++++++++++++-- aiohttp/http_parser.py | 27 ++++++++++----- aiohttp/http_websocket.py | 4 +-- aiohttp/streams.py | 32 +++++++++++++---- aiohttp/web_protocol.py | 4 +-- aiohttp/web_request.py | 3 +- aiohttp/web_ws.py | 4 +-- tests/test_base_protocol.py | 4 +-- tests/test_client_request.py | 6 ++-- tests/test_http_parser.py | 1 + 15 files changed, 177 insertions(+), 66 deletions(-) create mode 100644 CHANGES/8089.bugfix.rst diff --git a/CHANGES/8089.bugfix.rst b/CHANGES/8089.bugfix.rst new file mode 100644 index 00000000000..7f47448478d --- /dev/null +++ b/CHANGES/8089.bugfix.rst @@ -0,0 +1,3 @@ +The asynchronous internals now set the underlying causes +when assigning exceptions to the future objects +-- by :user:`webknjaz`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 3f28fbdab43..7ea9b32ca55 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -19,7 +19,7 @@ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiD from yarl import URL as _URL from aiohttp import hdrs -from aiohttp.helpers import DEBUG +from aiohttp.helpers import DEBUG, set_exception from .http_exceptions import ( BadHttpMessage, @@ -763,11 +763,13 @@ cdef int cb_on_body(cparser.llhttp_t* parser, cdef bytes body = at[:length] try: pyparser._payload.feed_data(body, length) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if pyparser._payload_exception is not None: - pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - else: - pyparser._payload.set_exception(exc) + reraised_exc = pyparser._payload_exception(str(underlying_exc)) + + set_exception(pyparser._payload, reraised_exc, underlying_exc) + pyparser._payload_error = 1 return -1 else: diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index 4c9f0a752e3..dc1f24f99cd 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -76,7 +77,11 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: if exc is None: waiter.set_result(None) else: - waiter.set_exception(exc) + set_exception( + waiter, + ConnectionError("Connection lost"), + exc, + ) async def _drain_helper(self) -> None: if not self.connected: diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 1ab8acd27b0..28e9d3cd9e5 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -9,8 +9,14 @@ ServerDisconnectedError, SocketTimeoutError, ) -from .helpers import BaseTimerContext, status_code_must_be_empty_body +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + set_exception, + status_code_must_be_empty_body, +) from .http import HttpResponseParser, RawResponseMessage +from .http_exceptions import HttpProcessingError from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader @@ -73,28 +79,50 @@ def is_connected(self) -> bool: def connection_lost(self, exc: Optional[BaseException]) -> None: self._drop_timeout() + original_connection_error = exc + reraised_exc = original_connection_error + + connection_closed_cleanly = original_connection_error is None + if self._payload_parser is not None: - with suppress(Exception): + with suppress(Exception): # FIXME: log this somehow? self._payload_parser.feed_eof() uncompleted = None if self._parser is not None: try: uncompleted = self._parser.feed_eof() - except Exception as e: + except Exception as underlying_exc: if self._payload is not None: - exc = ClientPayloadError("Response payload is not completed") - exc.__cause__ = e - self._payload.set_exception(exc) + client_payload_exc_msg = ( + f"Response payload is not completed: {underlying_exc !r}" + ) + if not connection_closed_cleanly: + client_payload_exc_msg = ( + f"{client_payload_exc_msg !s}. " + f"{original_connection_error !r}" + ) + set_exception( + self._payload, + ClientPayloadError(client_payload_exc_msg), + underlying_exc, + ) if not self.is_eof(): - if isinstance(exc, OSError): - exc = ClientOSError(*exc.args) - if exc is None: - exc = ServerDisconnectedError(uncompleted) + if isinstance(original_connection_error, OSError): + reraised_exc = ClientOSError(*original_connection_error.args) + if connection_closed_cleanly: + reraised_exc = ServerDisconnectedError(uncompleted) # assigns self._should_close to True as side effect, # we do it anyway below - self.set_exception(exc) + underlying_non_eof_exc = ( + _EXC_SENTINEL + if connection_closed_cleanly + else original_connection_error + ) + assert underlying_non_eof_exc is not None + assert reraised_exc is not None + self.set_exception(reraised_exc, underlying_non_eof_exc) self._should_close = True self._parser = None @@ -102,7 +130,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: self._payload_parser = None self._reading_paused = False - super().connection_lost(exc) + super().connection_lost(reraised_exc) def eof_received(self) -> None: # should call parser.feed_eof() most likely @@ -116,10 +144,14 @@ def resume_reading(self) -> None: super().resume_reading() self._reschedule_timeout() - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._should_close = True self._drop_timeout() - super().set_exception(exc) + super().set_exception(exc, exc_cause) def set_parser(self, parser: Any, payload: Any) -> None: # TODO: actual types are: @@ -196,7 +228,7 @@ def _on_read_timeout(self) -> None: exc = SocketTimeoutError("Timeout on reading data from socket") self.set_exception(exc) if self._payload is not None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) def data_received(self, data: bytes) -> None: self._reschedule_timeout() @@ -222,14 +254,14 @@ def data_received(self, data: bytes) -> None: # parse http messages try: messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as exc: + except BaseException as underlying_exc: if self.transport is not None: # connection.release() could be called BEFORE # data_received(), the transport is already # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(exc) + self.set_exception(HttpProcessingError(), underlying_exc) return self._upgraded = upgraded diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e0de951a33a..afe719da16e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -50,6 +50,7 @@ netrc_from_env, noop, reify, + set_exception, set_result, ) from .http import ( @@ -630,20 +631,29 @@ async def write_bytes( for chunk in self.body: await writer.write(chunk) # type: ignore[arg-type] - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - protocol.set_exception(exc) - else: - new_exc = ClientOSError( - exc.errno, "Can not write request body for %s" % self.url + except OSError as underlying_exc: + reraised_exc = underlying_exc + + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", ) - new_exc.__context__ = exc - new_exc.__cause__ = exc - protocol.set_exception(new_exc) + + set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: await writer.write_eof() - except Exception as exc: - protocol.set_exception(exc) + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + f"Failed to send bytes into the underlying connection {conn !s}", + ), + underlying_exc, + ) else: await writer.write_eof() protocol.start_timeout() @@ -1086,7 +1096,7 @@ def _cleanup_writer(self) -> None: def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - content.set_exception(ClientConnectionError("Connection closed")) + set_exception(content, ClientConnectionError("Connection closed")) self._released = True async def wait_for_close(self) -> None: diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index a5c762ed795..284033b7a04 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -810,9 +810,39 @@ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: fut.set_result(result) -def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: - if not fut.done(): - fut.set_exception(exc) +_EXC_SENTINEL = BaseException() + + +class ErrorableProtocol(Protocol): + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = ..., + ) -> None: + ... # pragma: no cover + + +def set_exception( + fut: "asyncio.Future[_T] | ErrorableProtocol", + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, +) -> None: + """Set future exception. + + If the future is marked as complete, this function is a no-op. + + :param exc_cause: An exception that is a direct cause of ``exc``. + Only set if provided. + """ + if asyncio.isfuture(fut) and fut.done(): + return + + exc_is_sentinel = exc_cause is _EXC_SENTINEL + exc_causes_itself = exc is exc_cause + if not exc_is_sentinel and not exc_causes_itself: + exc.__cause__ = exc_cause + + fut.set_exception(exc) @functools.total_ordering diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1877f558308..1301f025810 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -28,10 +28,12 @@ from .base_protocol import BaseProtocol from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor from .helpers import ( + _EXC_SENTINEL, DEBUG, NO_EXTENSIONS, BaseTimerContext, method_must_be_empty_body, + set_exception, status_code_must_be_empty_body, ) from .http_exceptions import ( @@ -446,13 +448,16 @@ def get_content_length() -> Optional[int]: assert self._payload_parser is not None try: eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if self.payload_exception is not None: - self._payload_parser.payload.set_exception( - self.payload_exception(str(exc)) - ) - else: - self._payload_parser.payload.set_exception(exc) + reraised_exc = self.payload_exception(str(underlying_exc)) + + set_exception( + self._payload_parser.payload, + reraised_exc, + underlying_exc, + ) eof = True data = b"" @@ -834,7 +839,7 @@ def feed_data( exc = TransferEncodingError( chunk[:pos].decode("ascii", "surrogateescape") ) - self.payload.set_exception(exc) + set_exception(self.payload, exc) raise exc size = int(bytes(size_b), 16) @@ -939,8 +944,12 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: else: self.decompressor = ZLibDecompressor(encoding=encoding) - def set_exception(self, exc: BaseException) -> None: - self.out.set_exception(exc) + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + set_exception(self.out, exc, exc_cause) def feed_data(self, chunk: bytes, size: int) -> None: if not size: diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index b63453f99e5..39f2e4a5c15 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -25,7 +25,7 @@ from .base_protocol import BaseProtocol from .compression_utils import ZLibCompressor, ZLibDecompressor -from .helpers import NO_EXTENSIONS +from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue __all__ = ( @@ -314,7 +314,7 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return self._feed_data(data) except Exception as exc: self._exc = exc - self.queue.set_exception(exc) + set_exception(self.queue, exc) return True, b"" def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 3e4c355b5cb..b9b9c3fd96f 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -14,7 +14,13 @@ ) from .base_protocol import BaseProtocol -from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + TimerNoop, + set_exception, + set_result, +) from .log import internal_logger __all__ = ( @@ -146,19 +152,23 @@ def get_read_buffer_limits(self) -> Tuple[int, int]: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._exception = exc self._eof_callbacks.clear() waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) waiter = self._eof_waiter if waiter is not None: self._eof_waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def on_eof(self, callback: Callable[[], None]) -> None: if self._eof: @@ -513,7 +523,11 @@ def __repr__(self) -> str: def exception(self) -> Optional[BaseException]: return None - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: pass def on_eof(self, callback: Callable[[], None]) -> None: @@ -588,14 +602,18 @@ def at_eof(self) -> bool: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._eof = True self._exception = exc waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def feed_data(self, data: _T, size: int = 0) -> None: self._size += size diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index ec5856a0a22..f083b13eb0f 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -26,7 +26,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout +from .helpers import ceil_timeout, set_exception from .http import ( HttpProcessingError, HttpRequestParser, @@ -565,7 +565,7 @@ async def start(self) -> None: self.log_debug("Uncompleted request.") self.close() - payload.set_exception(PayloadAccessError()) + set_exception(payload, PayloadAccessError()) except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 61fc831b032..781713e5985 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -48,6 +48,7 @@ parse_http_date, reify, sentinel, + set_exception, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -814,7 +815,7 @@ async def _prepare_hook(self, response: StreamResponse) -> None: return def _cancel(self, exc: BaseException) -> None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) class Request(BaseRequest): diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 783377716f5..d20a26ca470 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_result +from .helpers import call_later, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -526,4 +526,4 @@ async def __anext__(self) -> WSMessage: def _cancel(self, exc: BaseException) -> None: if self._reader is not None: - self._reader.set_exception(exc) + set_exception(self._reader, exc) diff --git a/tests/test_base_protocol.py b/tests/test_base_protocol.py index b26011095e9..72c8c7c6b63 100644 --- a/tests/test_base_protocol.py +++ b/tests/test_base_protocol.py @@ -186,9 +186,9 @@ async def test_lost_drain_waited_exception() -> None: assert pr._drain_waiter is not None exc = RuntimeError() pr.connection_lost(exc) - with pytest.raises(RuntimeError) as cm: + with pytest.raises(ConnectionError, match=r"^Connection lost$") as cm: await t - assert cm.value is exc + assert cm.value.__cause__ is exc assert pr._drain_waiter is None diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c54e1828e34..6084f685405 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,6 +14,7 @@ import aiohttp from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, ClientResponse, @@ -1096,9 +1097,8 @@ async def throw_exc(): # assert connection.close.called assert conn.protocol.set_exception.called outer_exc = conn.protocol.set_exception.call_args[0][0] - assert isinstance(outer_exc, ValueError) - assert inner_exc is outer_exc - assert inner_exc is outer_exc + assert isinstance(outer_exc, ClientConnectionError) + assert outer_exc.__cause__ is inner_exc await req.close() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index d306267c8bb..da7f1182b3a 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -280,6 +280,7 @@ def test_parse_headers_longline(parser: Any) -> None: header_name = b"Test" + invalid_unicode_byte + b"Header" + b"A" * 8192 text = b"GET /test HTTP/1.1\r\n" + header_name + b": test\r\n" + b"\r\n" + b"\r\n" with pytest.raises((http_exceptions.LineTooLong, http_exceptions.BadHttpMessage)): + # FIXME: `LineTooLong` doesn't seem to actually be happening parser.feed_data(text) From 6b5cdef308a7b59b837b6936288531063256139d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 11:38:37 +0000 Subject: [PATCH 0108/1511] Bump pip-tools from 7.3.0 to 7.4.0 (#8169) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pip-tools](https://github.com/jazzband/pip-tools) from 7.3.0 to 7.4.0.
Release notes

Sourced from pip-tools's releases.

7.4.0

Features:

Bug Fixes:

  • Fix for src-files not being used when specified in a config file (#2015). Thanks @​csalerno-asml
  • Fix ignorance of inverted CLI options in config for pip-sync (#1989). Thanks @​chrysle
  • Filter out origin ireqs for extra requirements before writing output annotations (#2011). Thanks @​chrysle
  • Make BacktrackingResolver ignore extras when dropping existing constraints (#1984). Thanks @​chludwig-haufe
  • Display pyproject.toml's metatada parsing errors in verbose mode (#1979). Thanks @​szobov

Other Changes:

  • Add mention of pip-compile-multi in Other useful tools README section (#1986). Thanks @​peterdemin
Changelog

Sourced from pip-tools's changelog.

v7.4.0

Features:

Bug Fixes:

  • Fix for src-files not being used when specified in a config file (#2015). Thanks @​csalerno-asml
  • Fix ignorance of inverted CLI options in config for pip-sync (#1989). Thanks @​chrysle
  • Filter out origin ireqs for extra requirements before writing output annotations (#2011). Thanks @​chrysle
  • Make BacktrackingResolver ignore extras when dropping existing constraints (#1984). Thanks @​chludwig-haufe
  • Display pyproject.toml's metatada parsing errors in verbose mode (#1979). Thanks @​szobov

Other Changes:

  • Add mention of pip-compile-multi in Other useful tools README section (#1986). Thanks @​peterdemin
Commits
  • 1397bfa Merge pull request #2043 from jazzband/dependabot/pip/docs/jinja2-3.1.3
  • 355a04e Bump jinja2 from 3.1.2 to 3.1.3 in /docs
  • c8f2988 Merge pull request #2055 from atugushev/fix-failing-test-on-pip-24
  • 598845a Fix failing test on pip-24.0
  • a8688d7 Merge pull request #2029 from jazzband/pre-commit-ci-update-config
  • 39cbff8 [pre-commit.ci] auto fixes from pre-commit.com hooks
  • 644ac8a [pre-commit.ci] pre-commit autoupdate
  • d673c8e Merge pull request #2015 from csalerno-asml/fix-2006
  • e216ad5 rm submodule
  • 64d1de3 comments addressed
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip-tools&package-manager=pip&previous-version=7.3.0&new-version=7.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 16 ++++++++++------ requirements/dev.txt | 12 ++++++++---- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1e3fb385de6..2d6e55bca9a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -32,7 +32,7 @@ blockdiag==2.0.1 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==0.9.0 +build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests @@ -94,7 +94,9 @@ idna==3.3 imagesize==1.3.0 # via sphinx importlib-metadata==7.0.0 - # via sphinx + # via + # build + # sphinx importlib-resources==6.1.1 # via towncrier incremental==22.10.0 @@ -126,13 +128,11 @@ packaging==21.2 # gunicorn # pytest # sphinx -pep517==0.12.0 - # via build pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.3.0 +pip-tools==7.4.0 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv @@ -160,6 +160,10 @@ pyjwt==2.3.0 # pyjwt pyparsing==2.4.7 # via packaging +pyproject-hooks==1.0.0 + # via + # build + # pip-tools pytest==7.4.4 # via # -r requirements/lint.in @@ -227,8 +231,8 @@ tomli==2.0.1 # cherry-picker # coverage # mypy - # pep517 # pip-tools + # pyproject-hooks # pytest # slotscheck # towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index d9197e86828..51aed8b65b8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==0.10.0 +build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests @@ -90,7 +90,9 @@ idna==3.4 imagesize==1.4.1 # via sphinx importlib-metadata==7.0.0 - # via sphinx + # via + # build + # sphinx importlib-resources==6.1.1 # via towncrier incremental==22.10.0 @@ -125,7 +127,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.3.0 +pip-tools==7.4.0 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv @@ -150,7 +152,9 @@ pyjwt==2.8.0 # gidgethub # pyjwt pyproject-hooks==1.0.0 - # via build + # via + # build + # pip-tools pytest==7.4.4 # via # -r requirements/lint.in From e74a4a02893c2b0a90ddcd8935ac9cdb946f2f1c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:04:27 -0600 Subject: [PATCH 0109/1511] [PR #8163/006fbe03 backport][3.9] Avoid creating a task to do DNS resolution if there is no throttle (#8172) Co-authored-by: J. Nick Koston Fixes #123'). --> --- CHANGES/8163.bugfix.rst | 5 +++++ aiohttp/connector.py | 50 +++++++++++++++++++++++++++++------------ tests/test_connector.py | 6 +++++ 3 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8163.bugfix.rst diff --git a/CHANGES/8163.bugfix.rst b/CHANGES/8163.bugfix.rst new file mode 100644 index 00000000000..8bfb10260c6 --- /dev/null +++ b/CHANGES/8163.bugfix.rst @@ -0,0 +1,5 @@ +Improved the DNS resolution performance on cache hit +-- by :user:`bdraco`. + +This is achieved by avoiding an :mod:`asyncio` task creation +in this case. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 3b9841dd094..f95ebe84c66 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -825,6 +825,7 @@ def clear_dns_cache( async def _resolve_host( self, host: str, port: int, traces: Optional[List["Trace"]] = None ) -> List[Dict[str, Any]]: + """Resolve host and return list of addresses.""" if is_ip_address(host): return [ { @@ -852,8 +853,7 @@ async def _resolve_host( return res key = (host, port) - - if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + if key in self._cached_hosts and not self._cached_hosts.expired(key): # get result early, before any await (#4014) result = self._cached_hosts.next_addrs(key) @@ -862,6 +862,39 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + # + # If multiple connectors are resolving the same host, we wait + # for the first one to resolve and then use the result for all of them. + # We use a throttle event to ensure that we only resolve the host once + # and then use the result for all the waiters. + # + # In this case we need to create a task to ensure that we can shield + # the task from cancellation as cancelling this lookup should not cancel + # the underlying lookup or else the cancel event will get broadcast to + # all the waiters across all connections. + # + resolved_host_task = asyncio.create_task( + self._resolve_host_with_throttle(key, host, port, traces) + ) + try: + return await asyncio.shield(resolved_host_task) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + resolved_host_task.add_done_callback(drop_exception) + raise + + async def _resolve_host_with_throttle( + self, + key: Tuple[str, int], + host: str, + port: int, + traces: Optional[List["Trace"]], + ) -> List[Dict[str, Any]]: + """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: # get event early, before any await (#4014) event = self._throttle_dns_events[key] @@ -1163,22 +1196,11 @@ async def _create_direct_connection( host = host.rstrip(".") + "." port = req.port assert port is not None - host_resolved = asyncio.ensure_future( - self._resolve_host(host, port, traces=traces), loop=self._loop - ) try: # Cancelling this lookup should not cancel the underlying lookup # or else the cancel event will get broadcast to all the waiters # across all connections. - hosts = await asyncio.shield(host_resolved) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - host_resolved.add_done_callback(drop_exception) - raise + hosts = await self._resolve_host(host, port, traces=traces) except OSError as exc: if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise diff --git a/tests/test_connector.py b/tests/test_connector.py index 142abab3c15..02e48bc108b 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -767,6 +767,7 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: loop.create_task(conn._resolve_host("localhost", 8080)) loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) @@ -778,6 +779,9 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non r1 = loop.create_task(conn._resolve_host("localhost", 8080)) r2 = loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) assert r1.exception() == e assert r2.exception() == e @@ -792,6 +796,7 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop.create_task(conn._resolve_host("localhost", 8080)) f = loop.create_task(conn._resolve_host("localhost", 8080)) + await asyncio.sleep(0) await asyncio.sleep(0) await conn.close() @@ -956,6 +961,7 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) await asyncio.sleep(0) + await asyncio.sleep(0) on_dns_cache_hit.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost") ) From 88d224f7c53dd1ff410a5dccada6a778039adba7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:04:39 -0600 Subject: [PATCH 0110/1511] [PR #8163/006fbe03 backport][3.10] Avoid creating a task to do DNS resolution if there is no throttle (#8173) Co-authored-by: J. Nick Koston --- CHANGES/8163.bugfix.rst | 5 +++++ aiohttp/connector.py | 50 +++++++++++++++++++++++++++++------------ tests/test_connector.py | 6 +++++ 3 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8163.bugfix.rst diff --git a/CHANGES/8163.bugfix.rst b/CHANGES/8163.bugfix.rst new file mode 100644 index 00000000000..8bfb10260c6 --- /dev/null +++ b/CHANGES/8163.bugfix.rst @@ -0,0 +1,5 @@ +Improved the DNS resolution performance on cache hit +-- by :user:`bdraco`. + +This is achieved by avoiding an :mod:`asyncio` task creation +in this case. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d0954355244..64c678d4b78 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -835,6 +835,7 @@ def clear_dns_cache( async def _resolve_host( self, host: str, port: int, traces: Optional[List["Trace"]] = None ) -> List[Dict[str, Any]]: + """Resolve host and return list of addresses.""" if is_ip_address(host): return [ { @@ -862,8 +863,7 @@ async def _resolve_host( return res key = (host, port) - - if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + if key in self._cached_hosts and not self._cached_hosts.expired(key): # get result early, before any await (#4014) result = self._cached_hosts.next_addrs(key) @@ -872,6 +872,39 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + # + # If multiple connectors are resolving the same host, we wait + # for the first one to resolve and then use the result for all of them. + # We use a throttle event to ensure that we only resolve the host once + # and then use the result for all the waiters. + # + # In this case we need to create a task to ensure that we can shield + # the task from cancellation as cancelling this lookup should not cancel + # the underlying lookup or else the cancel event will get broadcast to + # all the waiters across all connections. + # + resolved_host_task = asyncio.create_task( + self._resolve_host_with_throttle(key, host, port, traces) + ) + try: + return await asyncio.shield(resolved_host_task) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + resolved_host_task.add_done_callback(drop_exception) + raise + + async def _resolve_host_with_throttle( + self, + key: Tuple[str, int], + host: str, + port: int, + traces: Optional[List["Trace"]], + ) -> List[Dict[str, Any]]: + """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: # get event early, before any await (#4014) event = self._throttle_dns_events[key] @@ -1224,22 +1257,11 @@ async def _create_direct_connection( host = host.rstrip(".") + "." port = req.port assert port is not None - host_resolved = asyncio.ensure_future( - self._resolve_host(host, port, traces=traces), loop=self._loop - ) try: # Cancelling this lookup should not cancel the underlying lookup # or else the cancel event will get broadcast to all the waiters # across all connections. - hosts = await asyncio.shield(host_resolved) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - host_resolved.add_done_callback(drop_exception) - raise + hosts = await self._resolve_host(host, port, traces=traces) except OSError as exc: if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise diff --git a/tests/test_connector.py b/tests/test_connector.py index fe027df896c..58f6c6a116d 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1015,6 +1015,7 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: loop.create_task(conn._resolve_host("localhost", 8080)) loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) @@ -1026,6 +1027,9 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non r1 = loop.create_task(conn._resolve_host("localhost", 8080)) r2 = loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) assert r1.exception() == e assert r2.exception() == e @@ -1040,6 +1044,7 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop.create_task(conn._resolve_host("localhost", 8080)) f = loop.create_task(conn._resolve_host("localhost", 8080)) + await asyncio.sleep(0) await asyncio.sleep(0) await conn.close() @@ -1204,6 +1209,7 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) await asyncio.sleep(0) + await asyncio.sleep(0) on_dns_cache_hit.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost") ) From 46675be68c0f60a414ca97fd72d7951624c99cb2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 12:25:47 +0000 Subject: [PATCH 0111/1511] Bump coverage from 7.4.1 to 7.4.2 (#8177) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.1 to 7.4.2.
Changelog

Sourced from coverage's changelog.

Version 7.4.2 — 2024-02-20

  • Fix: setting COVERAGE_CORE=sysmon no longer errors on 3.11 and lower, thanks Hugo van Kemenade <pull 1747_>_. It now issues a warning that sys.monitoring is not available and falls back to the default core instead.

.. _pull 1747: nedbat/coveragepy#1747

.. _changes_7-4-1:

Commits
  • 5d69334 test: if a test fails randomly, let it retry with @​flaky
  • 65d686c docs: sample HTML for 7.4.2
  • 026dca7 docs: prep for 7.4.2
  • a7d1022 build: some Makefile targets only make sense from the release steps
  • d365814 chore: make upgrade
  • c56c3a2 fix: issue a warning if we can't use sysmon as requested.
  • 8b0e039 fix: only use "sysmon" core when available (Python 3.12+) (#1747)
  • 575a44c build: temporarily pin to 3.13.0a3 for Windows (#1751)
  • 628c1c5 fix: avoid a dict-changed-size error
  • 8412054 chore: make upgrade
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.4.1&new-version=7.4.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2d6e55bca9a..18a8baa1193 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.4.1 +coverage==7.4.2 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 51aed8b65b8..04f527639ad 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.4.1 +coverage==7.4.2 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 29021aecde1..8793b24283e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.4.1 +coverage==7.4.2 # via # -r requirements/test.in # pytest-cov From 73d15c481395701ab73b474076f9eff838c4cb8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 11:08:46 +0000 Subject: [PATCH 0112/1511] Bump typing-extensions from 4.9.0 to 4.10.0 (#8189) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.9.0 to 4.10.0.
Release notes

Sourced from typing-extensions's releases.

4.10.0rc1

Release 4.10.0rc1 (February 17, 2024)

  • Add support for PEP 728, supporting the closed keyword argument and the special __extra_items__ key for TypedDict. Patch by Zixuan James Li.
  • Add support for PEP 742, adding typing_extensions.TypeIs. Patch by Jelle Zijlstra.
  • Drop runtime error when a read-only TypedDict item overrides a mutable one. Type checkers should still flag this as an error. Patch by Jelle Zijlstra.
  • Speedup issubclass() checks against simple runtime-checkable protocols by around 6% (backporting python/cpython#112717, by Alex Waygood).
  • Fix a regression in the implementation of protocols where typing.Protocol classes that were not marked as @runtime_checkable would be unnecessarily introspected, potentially causing exceptions to be raised if the protocol had problematic members. Patch by Alex Waygood, backporting python/cpython#113401.
Changelog

Sourced from typing-extensions's changelog.

Release 4.10.0 (February 24, 2024)

This feature release adds support for PEP 728 (TypedDict with extra items) and PEP 742 (TypeIs).

There are no changes since 4.10.0rc1.

Release 4.10.0rc1 (February 17, 2024)

  • Add support for PEP 728, supporting the closed keyword argument and the special __extra_items__ key for TypedDict. Patch by Zixuan James Li.
  • Add support for PEP 742, adding typing_extensions.TypeIs. Patch by Jelle Zijlstra.
  • Drop runtime error when a read-only TypedDict item overrides a mutable one. Type checkers should still flag this as an error. Patch by Jelle Zijlstra.
  • Speedup issubclass() checks against simple runtime-checkable protocols by around 6% (backporting python/cpython#112717, by Alex Waygood).
  • Fix a regression in the implementation of protocols where typing.Protocol classes that were not marked as @runtime_checkable would be unnecessarily introspected, potentially causing exceptions to be raised if the protocol had problematic members. Patch by Alex Waygood, backporting python/cpython#113401.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.9.0&new-version=4.10.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- requirements/typing-extensions.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e10f80a9cca..6bc448debd4 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 18a8baa1193..794783b353c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -246,7 +246,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/cython.txt b/requirements/cython.txt index 201da88c351..28456c88452 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.8 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 04f527639ad..eee4c68f3ae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -232,7 +232,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/lint.txt b/requirements/lint.txt index 1976da1d1ba..85af8510de9 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/test.txt b/requirements/test.txt index 8793b24283e..26ff36ee802 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -111,7 +111,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # annotated-types diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt index 8ea8d0d4d08..a7d80f5a0fd 100644 --- a/requirements/typing-extensions.txt +++ b/requirements/typing-extensions.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in # -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via -r requirements/typing-extensions.in From a75ed655d7200102bb0129445469a6f5b6fc7f45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 11:32:28 +0000 Subject: [PATCH 0113/1511] Bump python-on-whales from 0.68.0 to 0.69.0 (#8168) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.68.0 to 0.69.0.
Release notes

Sourced from python-on-whales's releases.

v0.69.0

What's Changed

Since this is a big release, I'll only put here the commits which have an impact on end users and remove internal refactoring.

New Contributors

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.68.0...v0.69.0

Commits
  • 4ed016d Bump version to 0.69.0
  • 6e5a7a9 Add support for podman --env-host flag when creating containers (#551)
  • ffb4e47 :sparkles: add platforms parameter to buildx.create (#541)
  • ae158ac Fixed pydantic error on service model (#548)
  • d8420f9 Add tty and interactive flags to container create and start (#545)
  • f9d129b Various tidyup for container cli_wrapper.py (#538)
  • 7775d2a Add some rules to ensure an healthy contributor experience
  • f2eb701 Tidy up ReloadableObjectFromJson._fetch_inspect_result_json() to return a loa...
  • 26302ea Move most of the remaining tests to use docker_client or ctr_client fixture (...
  • 3c3d0cb Convert test_network.py, not passing with podman yet (#533)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.68.0&new-version=0.69.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 794783b353c..280821c8f54 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.68.0 +python-on-whales==0.69.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index eee4c68f3ae..7906a47e8de 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.68.0 +python-on-whales==0.69.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index 26ff36ee802..00b974fe385 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.68.0 +python-on-whales==0.69.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From f92a3e39d95c40a0d95f3571f72674b06c98bec3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 10:31:41 +0000 Subject: [PATCH 0114/1511] Bump actions/cache from 4.0.0 to 4.0.1 (#8195) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.0.0 to 4.0.1.
Release notes

Sourced from actions/cache's releases.

v4.0.1

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v4...v4.0.1

Changelog

Sourced from actions/cache's changelog.

4.0.1

  • Updated isGhes check
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.0.0&new-version=4.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a0492bccd4a..765b4cb79dd 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.9 - name: Cache PyPI - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.1 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.1 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -184,7 +184,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.1 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 01531454b673be006fbe32f924e16b89652b427d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 22:22:29 +0000 Subject: [PATCH 0115/1511] [PR #8197/0e91eb0b backport][3.9] fix base_url param documentation (#8198) **This is a backport of PR #8197 as merged into master (0e91eb0bd94298e501084f245df44cc1772c49e5).** Co-authored-by: Alexis B <43278953+alexis974@users.noreply.github.com> --- CHANGES/8197.doc | 1 + docs/client_reference.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8197.doc diff --git a/CHANGES/8197.doc b/CHANGES/8197.doc new file mode 100644 index 00000000000..ba4117768e8 --- /dev/null +++ b/CHANGES/8197.doc @@ -0,0 +1 @@ +Fixed false behavior of base_url param for ClientSession in client documentation -- by :user:`alexis974`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d0348d70ca8..fdf66e1bef0 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -59,8 +59,8 @@ The client session supports the context manager protocol for self closing. :param base_url: Base part of the URL (optional) If set, it allows to skip the base part (https://docs.aiohttp.org) in - request calls. If base_url includes a path (as in - https://docs.aiohttp.org/en/stable) the path is ignored/discarded. + request calls. It must not include a path (as in + https://docs.aiohttp.org/en/stable). .. versionadded:: 3.8 From e229a3a149bc1f7bfb6bfcd2bb095642cd1a9d3d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 22:34:25 +0000 Subject: [PATCH 0116/1511] [PR #8197/0e91eb0b backport][3.10] fix base_url param documentation (#8199) **This is a backport of PR #8197 as merged into master (0e91eb0bd94298e501084f245df44cc1772c49e5).** Co-authored-by: Alexis B <43278953+alexis974@users.noreply.github.com> --- CHANGES/8197.doc | 1 + docs/client_reference.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8197.doc diff --git a/CHANGES/8197.doc b/CHANGES/8197.doc new file mode 100644 index 00000000000..ba4117768e8 --- /dev/null +++ b/CHANGES/8197.doc @@ -0,0 +1 @@ +Fixed false behavior of base_url param for ClientSession in client documentation -- by :user:`alexis974`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 838aee0c7d6..d25f381e03c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -59,8 +59,8 @@ The client session supports the context manager protocol for self closing. :param base_url: Base part of the URL (optional) If set, it allows to skip the base part (https://docs.aiohttp.org) in - request calls. If base_url includes a path (as in - https://docs.aiohttp.org/en/stable) the path is ignored/discarded. + request calls. It must not include a path (as in + https://docs.aiohttp.org/en/stable). .. versionadded:: 3.8 From 0854d33b46351c380eb06cdff4eb37f4a00b79be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:27:21 +0000 Subject: [PATCH 0117/1511] Bump cython from 3.0.8 to 3.0.9 (#8208) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cython](https://github.com/cython/cython) from 3.0.8 to 3.0.9.
Changelog

Sourced from cython's changelog.

3.0.9 (2024-03-05)

Features added

  • Assigning const values to non-const variables now issues a warning. (Github issue :issue:5639)

  • Using noexcept on a function returning Python objects now issues a warning. (Github issue :issue:5661)

  • Some C-API usage was updated for the upcoming CPython 3.13. Patches by Victor Stinner et al. (Github issues :issue:6003, :issue:6020)

  • The deprecated Py_UNICODE type is no longer used, unless required by user code. (Github issue :issue:5982)

  • std::string.replace() declarations were added to libcpp.string. Patch by Kieran Geary. (Github issue :issue:6037)

Bugs fixed

  • Cython generates incorrect (but harmless) self-casts when directly calling final methods of subtypes. Lacking a better solution, the errors that recent gcc versions produce have been silenced for the time being. Original patch by Michał Górny. (Github issue :issue:2747)

  • Unused variable warnings about clineno were fixed when C lines in tracebacks are disabled. (Github issue :issue:6035)

  • Subclass deallocation of extern classes could crash if the base class uses GC. Original patch by Jason Fried. (Github issue :issue:5971)

  • Type checks for Python memoryview could use an invalid C function. Patch by Xenia Lu. (Github issue :issue:5988)

  • Calling final fused functions could generate invalid C code. (Github issue :issue:5989)

  • Declaring extern enums multiple times could generate invalid C code. (Github issue :issue:5905)

  • pyximport used relative paths incorrectly. Patch by Stefano Rivera. (Github issue :issue:5957)

  • Running Cython with globbing characters ([]*?) in the module search path could fail. Patch by eewanco. (Github issue :issue:5942)

... (truncated)

Commits
  • 31d4058 Update release date.
  • 9d9189b Build: Update cibuildwheel version.
  • 3cb9358 Update changelog.
  • 5689c9f Disable gcc warnings/errors about wrong self casts in final function calls (G...
  • 51d1ad0 Add fixing of warning in 3.0.9 to CHANGES.rst (#6041)
  • ec2a67d Update changelog.
  • 2da931d Disable GCC warnings/errors about wrong self casts in final function calls.
  • ba47941 Prepare release of 3.0.9.
  • bc683a4 Add missing replace functions to C++ std::string declarations (GH-6037)
  • a0a5e8d Update changelog.
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.8&new-version=3.0.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 280821c8f54..75645baa2d3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.8 +cython==3.0.9 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 28456c88452..f7fecfb3b95 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.8 +cython==3.0.9 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in From c427e6d8c30ba462d6a30129802bd4acde233216 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Mar 2024 12:53:20 +0000 Subject: [PATCH 0118/1511] [PR #8211/7725f5a2 backport][3.9] Fix type annotations on MultipartWriter.append (#8214) **This is a backport of PR #8211 as merged into master (7725f5a22f4ca64dfb01478d640763910b036192).** Co-authored-by: Daniel Golding --- CHANGES/7741.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 8 ++++---- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 CHANGES/7741.bugfix.rst diff --git a/CHANGES/7741.bugfix.rst b/CHANGES/7741.bugfix.rst new file mode 100644 index 00000000000..9134e920c14 --- /dev/null +++ b/CHANGES/7741.bugfix.rst @@ -0,0 +1,3 @@ +Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, +:meth:`aiohttp.MultipartWriter.append_json` and +:meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index be4a3ad48d4..ab889685fc8 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -80,6 +80,7 @@ Damien Nadé Dan King Dan Xu Daniel García +Daniel Golding Daniel Grossmann-Kavanagh Daniel Nelson Danny Song diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 602a6b67457..4471dd4bb7e 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -25,7 +25,7 @@ ) from urllib.parse import parse_qsl, unquote, urlencode -from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping +from multidict import CIMultiDict, CIMultiDictProxy from .compression_utils import ZLibCompressor, ZLibDecompressor from .hdrs import ( @@ -791,7 +791,7 @@ def _boundary_value(self) -> str: def boundary(self) -> str: return self._boundary.decode("ascii") - def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: if headers is None: headers = CIMultiDict() @@ -839,7 +839,7 @@ def append_payload(self, payload: Payload) -> Payload: return payload def append_json( - self, obj: Any, headers: Optional[MultiMapping[str]] = None + self, obj: Any, headers: Optional[Mapping[str, str]] = None ) -> Payload: """Helper to append JSON part.""" if headers is None: @@ -850,7 +850,7 @@ def append_json( def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[MultiMapping[str]] = None, + headers: Optional[Mapping[str, str]] = None, ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) From 5fdb5c6ec13195b4bc63fbf120101c43970131e3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Mar 2024 12:25:42 +0000 Subject: [PATCH 0119/1511] [PR #8211/7725f5a2 backport][3.10] Fix type annotations on MultipartWriter.append (#8215) **This is a backport of PR #8211 as merged into master (7725f5a22f4ca64dfb01478d640763910b036192).** Co-authored-by: Daniel Golding --- CHANGES/7741.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 8 ++++---- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 CHANGES/7741.bugfix.rst diff --git a/CHANGES/7741.bugfix.rst b/CHANGES/7741.bugfix.rst new file mode 100644 index 00000000000..9134e920c14 --- /dev/null +++ b/CHANGES/7741.bugfix.rst @@ -0,0 +1,3 @@ +Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, +:meth:`aiohttp.MultipartWriter.append_json` and +:meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c7e18d955e5..6b53b5ad9c9 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -80,6 +80,7 @@ Damien Nadé Dan King Dan Xu Daniel García +Daniel Golding Daniel Grossmann-Kavanagh Daniel Nelson Danny Song diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 602a6b67457..4471dd4bb7e 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -25,7 +25,7 @@ ) from urllib.parse import parse_qsl, unquote, urlencode -from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping +from multidict import CIMultiDict, CIMultiDictProxy from .compression_utils import ZLibCompressor, ZLibDecompressor from .hdrs import ( @@ -791,7 +791,7 @@ def _boundary_value(self) -> str: def boundary(self) -> str: return self._boundary.decode("ascii") - def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: if headers is None: headers = CIMultiDict() @@ -839,7 +839,7 @@ def append_payload(self, payload: Payload) -> Payload: return payload def append_json( - self, obj: Any, headers: Optional[MultiMapping[str]] = None + self, obj: Any, headers: Optional[Mapping[str, str]] = None ) -> Payload: """Helper to append JSON part.""" if headers is None: @@ -850,7 +850,7 @@ def append_json( def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[MultiMapping[str]] = None, + headers: Optional[Mapping[str, str]] = None, ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) From 7fefba199329758c303a59997beeb7298183f381 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 11:01:37 +0000 Subject: [PATCH 0120/1511] Bump python-on-whales from 0.69.0 to 0.70.0 (#8220) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.69.0 to 0.70.0.
Release notes

Sourced from python-on-whales's releases.

v0.70.0

This update contains (hopefully) all fixes for the breaking changes introduced in docker compose v2.24.7. If you still encounter issues, file a issue here and downgrade docker compose to v2.24.6 in the meantime.

What's Changed

New Contributors

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.69.0...v0.70.0

Commits
  • 6573bf3 Bump version to 0.70.0
  • 68efb49 :bug: Fix parsing of compose file configs with compose v2.24.7 (#563)
  • e45f0cd feat(service): Add docker service ls --filter option (#561)
  • bf1c60d :sparkles: docker.compose.down() can take str as service arg (#562)
  • 20aebfb Fixpodman.image.exists (#558)
  • 4433a02 Add detach_keys argument for start and exec (#554)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.69.0&new-version=0.70.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 75645baa2d3..4c861d3674c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.69.0 +python-on-whales==0.70.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index 7906a47e8de..78fb9dcd256 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.69.0 +python-on-whales==0.70.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index 00b974fe385..520b4193417 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.69.0 +python-on-whales==0.70.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From 4a3bfa6183fc613033261970bb88d298387ad57d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 11:12:23 +0000 Subject: [PATCH 0121/1511] Bump mypy from 1.8.0 to 1.9.0 (#8222) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.8.0 to 1.9.0.
Changelog

Sourced from mypy's changelog.

Mypy Release Notes

Mypy 1.9

We’ve just uploaded mypy 1.9 to the Python Package Index (PyPI). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:

python3 -m pip install -U mypy

You can read the full documentation for this release on Read the Docs.

Breaking Changes

Because the version of typeshed we use in mypy 1.9 doesn't support 3.7, neither does mypy 1.9. (Jared Hance, PR 16883)

We are planning to enable local partial types (enabled via the --local-partial-types flag) later this year by default. This change was announced years ago, but now it's finally happening. This is a major backward-incompatible change, so we'll probably include it as part of the upcoming mypy 2.0 release. This makes daemon and non-daemon mypy runs have the same behavior by default.

Local partial types can also be enabled in the mypy config file:

local_partial_types = True

We are looking at providing a tool to make it easier to migrate projects to use --local-partial-types, but it's not yet clear whether this is practical. The migration usually involves adding some explicit type annotations to module-level and class-level variables.

Basic Support for Type Parameter Defaults (PEP 696)

This release contains new experimental support for type parameter defaults (PEP 696). Please try it out! This feature was contributed by Marc Mueller.

Since this feature will be officially introduced in the next Python feature release (3.13), you will need to import TypeVar, ParamSpec or TypeVarTuple from typing_extensions to use defaults for now.

This example adapted from the PEP defines a default for BotT:

from typing import Generic
from typing_extensions import TypeVar

class Bot: ...

BotT = TypeVar("BotT", bound=Bot, default=Bot) </tr></table>

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.8.0&new-version=1.9.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4c861d3674c..93b8f57862a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -114,7 +114,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 78fb9dcd256..87b696f50b2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 85af8510de9..37aeb9da0f9 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ identify==2.5.26 # via pre-commit iniconfig==2.0.0 # via pytest -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index 520b4193417..57bd4431402 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From 2ae85757644c1962522fe7f86466b7d50dbbd32f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 10:59:43 +0000 Subject: [PATCH 0122/1511] Bump pypa/cibuildwheel from 2.16.5 to 2.17.0 (#8226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.5 to 2.17.0.
Release notes

Sourced from pypa/cibuildwheel's releases.

v2.17.0

  • 🌟 Adds the ability to inherit configuration in TOML overrides. This makes certain configurations much simpler. If you're overriding an option like before-build or environment, and you just want to add an extra command or environment variable, you can just append (or prepend) to the previous config. See the docs for more information. (#1730)
  • 🌟 Adds official support for native arm64 macOS GitHub runners. To use them, just specify macos-14 as an os of your job in your workflow file. You can also keep macos-13 in your build matrix to build x86_64. Check out the new GitHub Actions example config.
  • ✨ You no longer need to specify --platform to run cibuildwheel locally! Instead it will detect your platform automatically. This was a safety feature, no longer necessary. (#1727)
  • 🛠 Removed setuptools and wheel pinned versions. This only affects old-style projects without a pyproject.toml, projects with pyproject.toml are already getting fresh versions of their build-system.requires installed into an isolated environment. (#1725)
  • 🛠 Improve how the GitHub Action passes arguments (#1757)
  • 🛠 Remove a system-wide install of pipx in the GitHub Action (#1745)
  • 🐛 No longer will cibuildwheel override the PIP_CONSTRAINT environment variable when using the build frontend. Instead it will be extended. (#1675)
  • 🐛 Fix a bug where building and testing both x86_86 and arm64 wheels on the same runner caused the wrong architectures in the test environment (#1750)
  • 🐛 Fix a bug that prevented testing a CPython 3.8 wheel targeting macOS 11+ on x86_64 (#1768)
  • 📚 Moved the docs onto the official PyPA domain - they're now available at https://cibuildwheel.pypa.io . (#1775)
  • 📚 Docs and examples improvements (#1762, #1734)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v2.17.0

11 March 2024

  • 🌟 Adds the ability to inherit configuration in TOML overrides. This makes certain configurations much simpler. If you're overriding an option like before-build or environment, and you just want to add an extra command or environment variable, you can just append (or prepend) to the previous config. See the docs for more information. (#1730)
  • 🌟 Adds official support for native arm64 macOS GitHub runners. To use them, just specify macos-14 as an os of your job in your workflow file. You can also keep macos-13 in your build matrix to build x86_64. Check out the new GitHub Actions example config.
  • ✨ You no longer need to specify --platform to run cibuildwheel locally! Instead it will detect your platform automatically. This was a safety feature, no longer necessary. (#1727)
  • 🛠 Removed setuptools and wheel pinned versions. This only affects old-style projects without a pyproject.toml, projects with pyproject.toml are already getting fresh versions of their build-system.requires installed into an isolated environment. (#1725)
  • 🛠 Improve how the GitHub Action passes arguments (#1757)
  • 🛠 Remove a system-wide install of pipx in the GitHub Action (#1745)
  • 🐛 No longer will cibuildwheel override the PIP_CONSTRAINT environment variable when using the build frontend. Instead it will be extended. (#1675)
  • 🐛 Fix a bug where building and testing both x86_86 and arm64 wheels on the same runner caused the wrong architectures in the test environment (#1750)
  • 🐛 Fix a bug that prevented testing a CPython 3.8 wheel targeting macOS 11+ on x86_64 (#1768)
  • 📚 Moved the docs onto the official PyPA domain - they're now available at https://cibuildwheel.pypa.io . (#1775)
  • 📚 Docs and examples improvements (#1762, #1734)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.16.5&new-version=2.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 765b4cb79dd..153422eb80a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -370,7 +370,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.17.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 30e5c04f3d252e2b46e58f918473d2c84db30c42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 11:04:49 +0000 Subject: [PATCH 0123/1511] Bump coverage from 7.4.2 to 7.4.4 (#8230) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.2 to 7.4.4.
Changelog

Sourced from coverage's changelog.

Version 7.4.4 — 2024-03-14

  • Fix: in some cases, even with [run] relative_files=True, a data file could be created with absolute path names. When combined with other relative data files, it was random whether the absolute file names would be made relative or not. If they weren't, then a file would be listed twice in reports, as detailed in issue 1752_. This is now fixed: absolute file names are always made relative when combining. Thanks to Bruno Rodrigues dos Santos for support.

  • Fix: the last case of a match/case statement had an incorrect message if the branch was missed. It said the pattern never matched, when actually the branch is missed if the last case always matched.

  • Fix: clicking a line number in the HTML report now positions more accurately.

  • Fix: the report:format setting was defined as a boolean, but should be a string. Thanks, Tanaydin Sirin <pull 1754_>_. It is also now documented on the :ref:configuration page <config_report_format>.

.. _issue 1752: nedbat/coveragepy#1752 .. _pull 1754: nedbat/coveragepy#1754

.. _changes_7-4-3:

Version 7.4.3 — 2024-02-23

  • Fix: in some cases, coverage could fail with a RuntimeError: "Set changed size during iteration." This is now fixed, closing issue 1733_.

.. _issue 1733: nedbat/coveragepy#1733

.. _changes_7-4-2:

Commits
  • bc5e2d7 docs: sample HTML for 7.4.4
  • 9b0008b docs: prep for 7.4.4
  • a536161 docs: thanks, Bruno Rodrigues dos Santos
  • e06e4f9 chore: make doc_upgrade
  • f30818e chore: make upgrade
  • 1b19799 fix: ensure absolute paths are relative when combined #1752
  • 1ef020d build: more cheats for convenient URLs
  • 3d57a07 docs: document the report:format setting
  • 8e30221 fix: correct the type of report:format in config.py (#1754)
  • 6289be8 refactor: use dataclasses, no namedtuple
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.4.2&new-version=7.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 93b8f57862a..38d52814b5a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.4.2 +coverage==7.4.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 87b696f50b2..a31efef499e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.4.2 +coverage==7.4.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 57bd4431402..c646c2e6c40 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.4.2 +coverage==7.4.4 # via # -r requirements/test.in # pytest-cov From a459459bbf502eafd12d66903327376fa15dcf0f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 10:24:21 +0000 Subject: [PATCH 0124/1511] Bump dependabot/fetch-metadata from 1 to 2 (#8237) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dependabot/fetch-metadata](https://github.com/dependabot/fetch-metadata) from 1 to 2.
Release notes

Sourced from dependabot/fetch-metadata's releases.

v2.0.0 - Switch to node20

What's Changed

Full Changelog: https://github.com/dependabot/fetch-metadata/compare/v1.7.0...v2.0.0

v1.7.0

What's Changed

New Contributors

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dependabot/fetch-metadata&package-manager=github_actions&previous-version=1&new-version=2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/auto-merge.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 0b3e9c7c846..56575750fe1 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -12,7 +12,7 @@ jobs: steps: - name: Dependabot metadata id: metadata - uses: dependabot/fetch-metadata@v1 + uses: dependabot/fetch-metadata@v2 with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Enable auto-merge for Dependabot PRs From e2afa78815f8e430d8e944e4f826b1d7bf42c599 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:06:00 +0000 Subject: [PATCH 0125/1511] Bump pytest-mock from 3.12.0 to 3.14.0 (#8238) Bumps [pytest-mock](https://github.com/pytest-dev/pytest-mock) from 3.12.0 to 3.14.0.
Release notes

Sourced from pytest-mock's releases.

v3.14.0

  • #415: MockType and AsyncMockType can be imported from pytest_mock for type annotation purposes.
  • #420: Fixed a regression which would cause mocker.patch.object to not being properly cleared between tests.

v3.13.0

  • #417: spy now has spy_return_list, which is a list containing all the values returned by the spied function.
  • pytest-mock now requires pytest>=6.2.5.
  • #410: pytest-mock's setup.py file is removed. If you relied on this file, e.g. to install pytest using setup.py install, please see Why you shouldn't invoke setup.py directly for alternatives.
Changelog

Sourced from pytest-mock's changelog.

3.14.0 (2024-03-21)

  • [#415](https://github.com/pytest-dev/pytest-mock/issues/415) <https://github.com/pytest-dev/pytest-mock/pull/415>_: MockType and AsyncMockType can be imported from pytest_mock for type annotation purposes.

  • [#420](https://github.com/pytest-dev/pytest-mock/issues/420) <https://github.com/pytest-dev/pytest-mock/issues/420>_: Fixed a regression which would cause mocker.patch.object to not being properly cleared between tests.

3.13.0 (2024-03-21)

  • [#417](https://github.com/pytest-dev/pytest-mock/issues/417) <https://github.com/pytest-dev/pytest-mock/pull/417>_: spy now has spy_return_list, which is a list containing all the values returned by the spied function.
  • pytest-mock now requires pytest>=6.2.5.
  • [#410](https://github.com/pytest-dev/pytest-mock/issues/410) <https://github.com/pytest-dev/pytest-mock/pull/410>: pytest-mock's setup.py file is removed. If you relied on this file, e.g. to install pytest using setup.py install, please see Why you shouldn't invoke setup.py directly <https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html#summary> for alternatives.
Commits
  • 8733134 Update CHANGELOG for 3.14.0
  • 5257e3c Refactor MockCache to have a narrow interface
  • 4faf92a Fix regression with mocker.patch not being undone correctly
  • 6bd8712 Drop pre-Python 3.8 support code
  • 366966b Export MockType/AsyncMockType for type annotations (#415)
  • 852116b Merge pull request #418 from pytest-dev/release-3.13.0
  • ef9461b Add instructions on how to start deploy from command-line
  • 5b9d285 Release 3.13.0
  • 6d5d6dc Implement spy_return_list (#417)
  • dc28a0e [pre-commit.ci] pre-commit autoupdate (#416)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-mock&package-manager=pip&previous-version=3.12.0&new-version=3.14.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 38d52814b5a..e8c769d99c4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -172,7 +172,7 @@ pytest==7.4.4 # pytest-mock pytest-cov==4.1.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun diff --git a/requirements/dev.txt b/requirements/dev.txt index a31efef499e..66ab25a3135 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -163,7 +163,7 @@ pytest==7.4.4 # pytest-mock pytest-cov==4.1.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun diff --git a/requirements/test.txt b/requirements/test.txt index c646c2e6c40..ff88321f859 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -84,7 +84,7 @@ pytest==7.4.4 # pytest-mock pytest-cov==4.1.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun From 560c5ba7a07c1eac66fd37248b4507df2e950221 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:28:33 +0000 Subject: [PATCH 0126/1511] Bump python-on-whales from 0.70.0 to 0.70.1 (#8241) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.70.0 to 0.70.1.
Release notes

Sourced from python-on-whales's releases.

v0.70.1

What's Changed

New Contributors

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.0...v0.70.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.70.0&new-version=0.70.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e8c769d99c4..b595363e681 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.0 +python-on-whales==0.70.1 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index 66ab25a3135..6216ee4a21d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.0 +python-on-whales==0.70.1 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index ff88321f859..fc8bb645447 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.0 +python-on-whales==0.70.1 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From 8e91d5c225e89a90d2f736acd893df1ee3db8034 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:43:42 +0000 Subject: [PATCH 0127/1511] Bump slotscheck from 0.17.3 to 0.18.0 (#8244) Bumps [slotscheck](https://github.com/ariebovenberg/slotscheck) from 0.17.3 to 0.18.0.
Release notes

Sourced from slotscheck's releases.

0.18.0

Improved robustness of importing of namespace packages and built-in modules (#227)

Changelog

Sourced from slotscheck's changelog.

0.18.0 (2024-03-21)

  • Improved robustness of importing of namespace packages and built-in modules (#227)
Commits
  • 8f0d153 prepare next release
  • eb42767 Merge pull request #227 from eltoder/feature/namespace-packages
  • 30f57c0 Fix importing of namespace packages and built-in modules
  • 2c2561c Merge pull request #226 from ariebovenberg/dependabot/pip/black-24.3.0
  • 5da086b Bump black from 24.2.0 to 24.3.0
  • f4fb06f Merge pull request #225 from ariebovenberg/dependabot/pip/pytest-8.1.1
  • 1cca361 Bump pytest from 8.1.0 to 8.1.1
  • 5a667c4 Merge pull request #224 from ariebovenberg/dependabot/pip/mypy-1.9.0
  • bca5db0 Bump mypy from 1.8.0 to 1.9.0
  • 8fece90 Merge pull request #223 from ariebovenberg/dependabot/pip/pytest-8.1.0
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.17.3&new-version=0.18.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b595363e681..d18ad046631 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -197,7 +197,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.17.3 +slotscheck==0.18.0 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 6216ee4a21d..a5cd9432e6d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -186,7 +186,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.17.3 +slotscheck==0.18.0 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 37aeb9da0f9..618ecb20c42 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.17.3 +slotscheck==0.18.0 # via -r requirements/lint.in tomli==2.0.1 # via From dd3815165a16554e3edf9233e45c7e3a934605f7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:27:59 -1000 Subject: [PATCH 0128/1511] [PR #8252/8f237126 backport][3.10] Fix handling of unsupported upgrades with the pure python http parser (#8255) Co-authored-by: J. Nick Koston --- CHANGES/8252.bugfix.rst | 2 ++ aiohttp/http_parser.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8252.bugfix.rst diff --git a/CHANGES/8252.bugfix.rst b/CHANGES/8252.bugfix.rst new file mode 100644 index 00000000000..e932eb9c7ed --- /dev/null +++ b/CHANGES/8252.bugfix.rst @@ -0,0 +1,2 @@ +Fixed content not being read when an upgrade request was not supported with the pure Python implementation. +-- by :user:`bdraco`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1301f025810..8bd8519ff6b 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -228,6 +228,11 @@ def parse_headers( return (CIMultiDictProxy(headers), tuple(raw_headers)) +def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" + return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + + class HttpParser(abc.ABC, Generic[_MsgT]): lax: ClassVar[bool] = False @@ -354,7 +359,9 @@ def get_content_length() -> Optional[int]: if SEC_WEBSOCKET_KEY1 in msg.headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) - self._upgraded = msg.upgrade + self._upgraded = msg.upgrade and _is_supported_upgrade( + msg.headers + ) method = getattr(msg, "method", self.method) # code is only present on responses @@ -366,9 +373,8 @@ def get_content_length() -> Optional[int]: method and method_must_be_empty_body(method) ) if not empty_body and ( - (length is not None and length > 0) - or msg.chunked - and not msg.upgrade + ((length is not None and length > 0) or msg.chunked) + and not self._upgraded ): payload = StreamReader( self.protocol, From 6e8f63cc7fd773276d9226195784f4eb132d5d65 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:35:57 -1000 Subject: [PATCH 0129/1511] [PR #8252/8f237126 backport][3.9] Fix handling of unsupported upgrades with the pure python http parser (#8254) Co-authored-by: J. Nick Koston --- CHANGES/8252.bugfix.rst | 2 ++ aiohttp/http_parser.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8252.bugfix.rst diff --git a/CHANGES/8252.bugfix.rst b/CHANGES/8252.bugfix.rst new file mode 100644 index 00000000000..e932eb9c7ed --- /dev/null +++ b/CHANGES/8252.bugfix.rst @@ -0,0 +1,2 @@ +Fixed content not being read when an upgrade request was not supported with the pure Python implementation. +-- by :user:`bdraco`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1301f025810..8bd8519ff6b 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -228,6 +228,11 @@ def parse_headers( return (CIMultiDictProxy(headers), tuple(raw_headers)) +def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" + return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + + class HttpParser(abc.ABC, Generic[_MsgT]): lax: ClassVar[bool] = False @@ -354,7 +359,9 @@ def get_content_length() -> Optional[int]: if SEC_WEBSOCKET_KEY1 in msg.headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) - self._upgraded = msg.upgrade + self._upgraded = msg.upgrade and _is_supported_upgrade( + msg.headers + ) method = getattr(msg, "method", self.method) # code is only present on responses @@ -366,9 +373,8 @@ def get_content_length() -> Optional[int]: method and method_must_be_empty_body(method) ) if not empty_body and ( - (length is not None and length > 0) - or msg.chunked - and not msg.upgrade + ((length is not None and length > 0) or msg.chunked) + and not self._upgraded ): payload = StreamReader( self.protocol, From 73dfe9850e7e6419ab764f7f4cac15632085d137 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:20:13 +0000 Subject: [PATCH 0130/1511] Bump actions/setup-python from 4 to 5 (#7949) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5.
Release notes

Sourced from actions/setup-python's releases.

v5.0.0

What's Changed

In scope of this release, we update node version runtime from node16 to node20 (actions/setup-python#772). Besides, we update dependencies to the latest versions.

Full Changelog: https://github.com/actions/setup-python/compare/v4.8.0...v5.0.0

v4.8.0

What's Changed

In scope of this release we added support for GraalPy (actions/setup-python#694). You can use this snippet to set up GraalPy:

steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
  with:
    python-version: 'graalpy-22.3'
- run: python my_script.py

Besides, the release contains such changes as:

New Contributors

Full Changelog: https://github.com/actions/setup-python/compare/v4...v4.8.0

v4.7.1

What's Changed

Full Changelog: https://github.com/actions/setup-python/compare/v4...v4.7.1

v4.7.0

In scope of this release, the support for reading python version from pyproject.toml was added (actions/setup-python#669).

      - name: Setup Python
        uses: actions/setup-python@v4
</tr></table>

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-python&package-manager=github_actions&previous-version=4&new-version=5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 153422eb80a..b47f93bb2c8 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -43,7 +43,7 @@ jobs: make sync-direct-runtime-deps git diff --exit-code -- requirements/runtime-deps.in - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Cache PyPI @@ -175,7 +175,7 @@ jobs: submodules: true - name: Setup Python ${{ matrix.pyver }} id: python-install - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: allow-prereleases: true python-version: ${{ matrix.pyver }} @@ -290,7 +290,7 @@ jobs: with: submodules: true - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 - name: Update pip, wheel, setuptools, build, twine run: | python -m pip install -U pip wheel setuptools build twine @@ -351,7 +351,7 @@ jobs: fi shell: bash - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x - name: Update pip, wheel, setuptools, build, twine From b18f7b0a0e2852708446bb8b5a1d80e8cc24450a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:20:20 +0000 Subject: [PATCH 0131/1511] Bump pip-tools from 7.4.0 to 7.4.1 (#8210) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pip-tools](https://github.com/jazzband/pip-tools) from 7.4.0 to 7.4.1.
Release notes

Sourced from pip-tools's releases.

7.4.1

Bug Fixes:

Changelog

Sourced from pip-tools's changelog.

v7.4.1

05 Mar 2024

Bug Fixes:

Commits
  • 60ebdf5 Merge pull request #2067 from atugushev/release-7.4.1
  • c671ea0 Strip emoji from changelog line
  • f825385 Release 7.4.1
  • 1197151 Merge pull request #2038 from honnix/patch-1
  • 1f00154 Merge pull request #2061 from chrysle/pip-compile-docs-changedir
  • d99493c Skip constraint path check
  • 35b06db Change directory in pip-compile-docs tox session
  • a8beb7a Merge pull request #1981 from dragly/fix-all-extras
  • 7caff1a Merge branch 'main' into fix-all-extras
  • e0afb79 Merge pull request #2057 from jazzband/pre-commit-ci-update-config
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip-tools&package-manager=pip&previous-version=7.4.0&new-version=7.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d18ad046631..c2d84b3c07c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -132,7 +132,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.4.0 +pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv diff --git a/requirements/dev.txt b/requirements/dev.txt index a5cd9432e6d..4658947d038 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -127,7 +127,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.4.0 +pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv From 0689e6d5fc6f31ee24f242292b0599076f295a5f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:21:29 +0000 Subject: [PATCH 0132/1511] Bump pytest-cov from 4.1.0 to 5.0.0 (#8246) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.1.0 to 5.0.0.
Changelog

Sourced from pytest-cov's changelog.

5.0.0 (2024-03-24)

  • Removed support for xdist rsync (now deprecated). Contributed by Matthias Reichenbach in [#623](https://github.com/pytest-dev/pytest-cov/issues/623) <https://github.com/pytest-dev/pytest-cov/pull/623>_.
  • Switched docs theme to Furo.
  • Various legacy Python cleanup and CI improvements. Contributed by Christian Clauss and Hugo van Kemenade in [#630](https://github.com/pytest-dev/pytest-cov/issues/630) <https://github.com/pytest-dev/pytest-cov/pull/630>, [#631](https://github.com/pytest-dev/pytest-cov/issues/631) <https://github.com/pytest-dev/pytest-cov/pull/631>, [#632](https://github.com/pytest-dev/pytest-cov/issues/632) <https://github.com/pytest-dev/pytest-cov/pull/632>_ and [#633](https://github.com/pytest-dev/pytest-cov/issues/633) <https://github.com/pytest-dev/pytest-cov/pull/633>_.
  • Added a pyproject.toml example in the docs. Contributed by Dawn James in [#626](https://github.com/pytest-dev/pytest-cov/issues/626) <https://github.com/pytest-dev/pytest-cov/pull/626>_.
  • Modernized project's pre-commit hooks to use ruff. Initial POC contributed by Christian Clauss in [#584](https://github.com/pytest-dev/pytest-cov/issues/584) <https://github.com/pytest-dev/pytest-cov/pull/584>_.
Commits
  • 5295ce0 Bump version: 4.1.0 → 5.0.0
  • 1181b06 Update changelog.
  • 9757222 Fix a minor grammar error (#636)
  • 9f5cd81 Cleanup releasing instructions. Closes #616.
  • 93b5047 Add test for pyproject.toml loading without explicit --cov-config. Ref #508.
  • ff50860 docs: add config instructions for pyproject.toml.
  • 4a5a4b5 Keep GitHub Actions up to date with GitHub's Dependabot
  • 1d7f559 Fix or remove URLs that are causing docs tests to fail
  • 6a5af8e Update changelog.
  • d9fe8df Switch to furo. Closes #618.
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=4.1.0&new-version=5.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c2d84b3c07c..921d847d019 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -170,7 +170,7 @@ pytest==7.4.4 # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 4658947d038..b170e76688e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -161,7 +161,7 @@ pytest==7.4.4 # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in diff --git a/requirements/test.txt b/requirements/test.txt index fc8bb645447..6f24fe2b1fa 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -82,7 +82,7 @@ pytest==7.4.4 # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in From 8c39b439573a39c46556fdd2872794f7a6ac2d41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:23:09 +0000 Subject: [PATCH 0133/1511] Bump slotscheck from 0.18.0 to 0.19.0 (#8249) Bumps [slotscheck](https://github.com/ariebovenberg/slotscheck) from 0.18.0 to 0.19.0.
Release notes

Sourced from slotscheck's releases.

0.19.0

Improved support for implicit/native namespace packages (#228, #230)

Changelog

Sourced from slotscheck's changelog.

0.19.0 (2024-03-25)

  • Improved support for implicit/native namespace packages (#228, #230)
Commits
  • e784c87 prepare next release
  • f5b1782 Merge pull request #231 from ariebovenberg/dependabot/pip/pytest-cov-5.0.0
  • 1d47c42 Bump pytest-cov from 4.1.0 to 5.0.0
  • 3884ca7 Merge pull request #230 from eltoder/feature/packages-multiple-paths
  • 462dc95 Support packages that span multiple directories
  • bd1b80c Merge pull request #228 from eltoder/feature/discover-namespace-packages
  • 9f60d08 Amend docs about subdirectory module discovery
  • 908f6d4 Address review comments
  • 8626cc2 improve error message on module not found on path
  • 9d6ae69 Update documentation
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.18.0&new-version=0.19.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 921d847d019..fa5adf12e1a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -197,7 +197,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.18.0 +slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index b170e76688e..7df659854e5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -186,7 +186,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.18.0 +slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 618ecb20c42..59da6563db3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.18.0 +slotscheck==0.19.0 # via -r requirements/lint.in tomli==2.0.1 # via From 58dec1d0477c2d807e3eb0f42c29340c8fcd9e09 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 28 Mar 2024 12:14:22 -1000 Subject: [PATCH 0134/1511] Ensure websocket transport is closed when client does not close it (#8200) (#8256) --- CHANGES/8200.bugfix.rst | 6 ++ aiohttp/web_ws.py | 21 ++++-- tests/test_web_websocket.py | 89 +++++++++++++++++++++++++- tests/test_web_websocket_functional.py | 33 +++++++++- 4 files changed, 141 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8200.bugfix.rst diff --git a/CHANGES/8200.bugfix.rst b/CHANGES/8200.bugfix.rst new file mode 100644 index 00000000000..e4492a8a84c --- /dev/null +++ b/CHANGES/8200.bugfix.rst @@ -0,0 +1,6 @@ +Ensure websocket transport is closed when client does not close it +-- by :user:`bdraco`. + +The transport could remain open if the client did not close it. This +change ensures the transport is closed when the client does not close +it. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index d20a26ca470..52604d8a1eb 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -395,6 +395,7 @@ async def close( return True if self._closing: + self._close_transport() return True reader = self._reader @@ -418,9 +419,18 @@ async def close( self._exception = asyncio.TimeoutError() return True + def _set_closing(self, code: WSCloseCode) -> None: + """Set the close code and mark the connection as closing.""" + self._closing = True + self._close_code = code + def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" self._close_code = code + self._close_transport() + + def _close_transport(self) -> None: + """Close the transport.""" if self._req is not None and self._req.transport is not None: self._req.transport.close() @@ -465,14 +475,12 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_closing(WSCloseCode.ABNORMAL_CLOSURE) await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data + self._set_closing(msg.data) # Could be closed while awaiting reader. if not self._closed and self._autoclose: # The client is likely going to close the @@ -481,7 +489,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: # likely result writing to a broken pipe. await self.close(drain=False) elif msg.type == WSMsgType.CLOSING: - self._closing = True + self._set_closing(WSCloseCode.OK) elif msg.type == WSMsgType.PING and self._autoping: await self.pong(msg.data) continue @@ -525,5 +533,8 @@ async def __anext__(self) -> WSMessage: return msg def _cancel(self, exc: BaseException) -> None: + # web_protocol calls this from connection_lost + # or when the server is shutting down. + self._closing = True if self._reader is not None: set_exception(self._reader, exc) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index d0aca0c019a..543fe91db07 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -371,7 +371,94 @@ async def test_receive_eofstream_in_reader(make_request, loop) -> None: assert ws.closed -async def test_receive_timeouterror(make_request, loop) -> None: +async def test_receive_exception_in_reader(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + + ws._reader = mock.Mock() + exc = Exception() + res = loop.create_future() + res.set_exception(exc) + ws._reader.read = make_mocked_coro(res) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.ERROR + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_close_but_left_open(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + close_message = WSMessage(WSMsgType.CLOSE, 1000, "close") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=close_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + read_mock = mock.AsyncMock(return_value=closing_message) + ws._reader.read = read_mock + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + ws._cancel(ConnectionResetError("Connection lost")) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + +async def test_close_after_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=closing_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + assert len(ws._req.transport.close.mock_calls) == 0 + + await ws.close() + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index da855a4b7c1..7af4c6091eb 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,7 @@ # HTTP websocket server functional tests import asyncio +from typing import Any, Optional import pytest @@ -258,7 +259,7 @@ async def handler(request): assert "reply" == (await ws.receive_str()) # The server closes here. Then the client sends bogus messages with an - # internval shorter than server-side close timeout, to make the server + # interval shorter than server-side close timeout, to make the server # hanging indefinitely. await asyncio.sleep(0.08) msg = await ws._reader.read() @@ -310,8 +311,36 @@ async def handler(request): assert msg.type == WSMsgType.CLOSED -async def test_auto_pong_with_closing_by_peer(loop, aiohttp_client) -> None: +async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: + srv_ws: Optional[web.WebSocketResponse] = None + async def handler(request): + nonlocal srv_ws + ws = srv_ws = web.WebSocketResponse(protocols=("foo", "bar")) + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + await asyncio.sleep(0) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws: web.WebSocketResponse = await client.ws_connect("/", protocols=("eggs", "bar")) + + await ws._writer._send_frame(b"", WSMsgType.CLOSE) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + + await asyncio.sleep(0) + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + +async def test_auto_pong_with_closing_by_peer(loop: Any, aiohttp_client: Any) -> None: closed = loop.create_future() async def handler(request): From 1746d2d24eb9d748d81d71e59d8b282b335d9dc8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 22:54:44 +0000 Subject: [PATCH 0135/1511] [PR #8259/f78ef6f8 backport][3.9] Flag python 3.11/12 support (#8261) **This is a backport of PR #8259 as merged into master (f78ef6f8e9e3a9503f3a17357cb4aad63c0df43d).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 24 ++---------------------- setup.cfg | 2 ++ 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b47f93bb2c8..36f7b9a137f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,7 +125,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10'] + pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -141,28 +141,8 @@ jobs: no-extensions: 'Y' os: ubuntu experimental: false - - os: macos - pyver: "3.11" - experimental: true - no-extensions: 'Y' - os: ubuntu - pyver: "3.11" - experimental: false - no-extensions: 'Y' - - os: windows - pyver: "3.11" - experimental: true - no-extensions: 'Y' - - os: ubuntu - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: macos - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: windows - pyver: "3.12" + pyver: "3.13" experimental: true no-extensions: 'Y' fail-fast: true diff --git a/setup.cfg b/setup.cfg index 83da3961014..c514bab9f94 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,6 +36,8 @@ classifiers = Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Topic :: Internet :: WWW/HTTP From 259293f8c392b4ca276360d5434627f7c8fd3e34 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 28 Mar 2024 13:16:31 -1000 Subject: [PATCH 0136/1511] Ensure websocket transport is closed when client does not close it (#8200) (#8257) --- CHANGES/8200.bugfix.rst | 6 ++ aiohttp/web_ws.py | 21 ++++-- tests/test_web_websocket.py | 89 +++++++++++++++++++++++++- tests/test_web_websocket_functional.py | 33 +++++++++- 4 files changed, 141 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8200.bugfix.rst diff --git a/CHANGES/8200.bugfix.rst b/CHANGES/8200.bugfix.rst new file mode 100644 index 00000000000..e4492a8a84c --- /dev/null +++ b/CHANGES/8200.bugfix.rst @@ -0,0 +1,6 @@ +Ensure websocket transport is closed when client does not close it +-- by :user:`bdraco`. + +The transport could remain open if the client did not close it. This +change ensures the transport is closed when the client does not close +it. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index d20a26ca470..52604d8a1eb 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -395,6 +395,7 @@ async def close( return True if self._closing: + self._close_transport() return True reader = self._reader @@ -418,9 +419,18 @@ async def close( self._exception = asyncio.TimeoutError() return True + def _set_closing(self, code: WSCloseCode) -> None: + """Set the close code and mark the connection as closing.""" + self._closing = True + self._close_code = code + def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" self._close_code = code + self._close_transport() + + def _close_transport(self) -> None: + """Close the transport.""" if self._req is not None and self._req.transport is not None: self._req.transport.close() @@ -465,14 +475,12 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_closing(WSCloseCode.ABNORMAL_CLOSURE) await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data + self._set_closing(msg.data) # Could be closed while awaiting reader. if not self._closed and self._autoclose: # The client is likely going to close the @@ -481,7 +489,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: # likely result writing to a broken pipe. await self.close(drain=False) elif msg.type == WSMsgType.CLOSING: - self._closing = True + self._set_closing(WSCloseCode.OK) elif msg.type == WSMsgType.PING and self._autoping: await self.pong(msg.data) continue @@ -525,5 +533,8 @@ async def __anext__(self) -> WSMessage: return msg def _cancel(self, exc: BaseException) -> None: + # web_protocol calls this from connection_lost + # or when the server is shutting down. + self._closing = True if self._reader is not None: set_exception(self._reader, exc) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index d0aca0c019a..543fe91db07 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -371,7 +371,94 @@ async def test_receive_eofstream_in_reader(make_request, loop) -> None: assert ws.closed -async def test_receive_timeouterror(make_request, loop) -> None: +async def test_receive_exception_in_reader(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + + ws._reader = mock.Mock() + exc = Exception() + res = loop.create_future() + res.set_exception(exc) + ws._reader.read = make_mocked_coro(res) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.ERROR + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_close_but_left_open(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + close_message = WSMessage(WSMsgType.CLOSE, 1000, "close") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=close_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + read_mock = mock.AsyncMock(return_value=closing_message) + ws._reader.read = read_mock + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + ws._cancel(ConnectionResetError("Connection lost")) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + +async def test_close_after_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=closing_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + assert len(ws._req.transport.close.mock_calls) == 0 + + await ws.close() + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index da855a4b7c1..7af4c6091eb 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,7 @@ # HTTP websocket server functional tests import asyncio +from typing import Any, Optional import pytest @@ -258,7 +259,7 @@ async def handler(request): assert "reply" == (await ws.receive_str()) # The server closes here. Then the client sends bogus messages with an - # internval shorter than server-side close timeout, to make the server + # interval shorter than server-side close timeout, to make the server # hanging indefinitely. await asyncio.sleep(0.08) msg = await ws._reader.read() @@ -310,8 +311,36 @@ async def handler(request): assert msg.type == WSMsgType.CLOSED -async def test_auto_pong_with_closing_by_peer(loop, aiohttp_client) -> None: +async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: + srv_ws: Optional[web.WebSocketResponse] = None + async def handler(request): + nonlocal srv_ws + ws = srv_ws = web.WebSocketResponse(protocols=("foo", "bar")) + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + await asyncio.sleep(0) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws: web.WebSocketResponse = await client.ws_connect("/", protocols=("eggs", "bar")) + + await ws._writer._send_frame(b"", WSMsgType.CLOSE) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + + await asyncio.sleep(0) + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + +async def test_auto_pong_with_closing_by_peer(loop: Any, aiohttp_client: Any) -> None: closed = loop.create_future() async def handler(request): From 039da6f66fb6681ab2931770352f1dec661db02b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 23:58:06 +0000 Subject: [PATCH 0137/1511] Bump actions/cache from 4.0.1 to 4.0.2 (#8236) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.0.1 to 4.0.2.
Release notes

Sourced from actions/cache's releases.

v4.0.2

What's Changed

Full Changelog: https://github.com/actions/cache/compare/v4.0.1...v4.0.2

Changelog

Sourced from actions/cache's changelog.

4.0.2

  • Fixed restore fail-on-cache-miss not working.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.0.1&new-version=4.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 36f7b9a137f..0b9c1dbcb96 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.9 - name: Cache PyPI - uses: actions/cache@v4.0.1 + uses: actions/cache@v4.0.2 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.0.1 + uses: actions/cache@v4.0.2 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -164,7 +164,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v4.0.1 + uses: actions/cache@v4.0.2 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 5c248fcc8c6d39c01ebec60418c8a0ae500ce094 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 23:39:49 +0000 Subject: [PATCH 0138/1511] [PR #8260/2434bfe8 backport][3.9] Adjust import timings test for python 3.12 (#8265) Co-authored-by: J. Nick Koston --- tests/test_imports.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index b98a2763458..7d0869d46c4 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -28,6 +28,11 @@ def test_web___all__(pytester: pytest.Pytester) -> None: result.assert_outcomes(passed=0, errors=0) +_TARGET_TIMINGS_BY_PYTHON_VERSION = { + "3.12": 250, # 3.12 is expected to be a bit slower due to performance trade-offs +} + + @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", @@ -59,4 +64,7 @@ def test_import_time(pytester: pytest.Pytester) -> None: else: os.environ["PYTHONPATH"] = old_path - assert best_time_ms < 200 + expected_time = _TARGET_TIMINGS_BY_PYTHON_VERSION.get( + f"{sys.version_info.major}.{sys.version_info.minor}", 200 + ) + assert best_time_ms < expected_time From 286f50ca69b69d2f0377fbb68482ad9791beb42e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 23:46:41 +0000 Subject: [PATCH 0139/1511] [PR #8260/2434bfe8 backport][3.10] Adjust import timings test for python 3.12 (#8266) Co-authored-by: J. Nick Koston --- tests/test_imports.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index b98a2763458..7d0869d46c4 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -28,6 +28,11 @@ def test_web___all__(pytester: pytest.Pytester) -> None: result.assert_outcomes(passed=0, errors=0) +_TARGET_TIMINGS_BY_PYTHON_VERSION = { + "3.12": 250, # 3.12 is expected to be a bit slower due to performance trade-offs +} + + @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", @@ -59,4 +64,7 @@ def test_import_time(pytester: pytest.Pytester) -> None: else: os.environ["PYTHONPATH"] = old_path - assert best_time_ms < 200 + expected_time = _TARGET_TIMINGS_BY_PYTHON_VERSION.get( + f"{sys.version_info.major}.{sys.version_info.minor}", 200 + ) + assert best_time_ms < expected_time From 0b98984216ca4dd25e5db428c49b1a9a859827cf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 00:29:48 +0000 Subject: [PATCH 0140/1511] [PR #8259/f78ef6f8 backport][3.10] Flag python 3.11/12 support (#8262) **This is a backport of PR #8259 as merged into master (f78ef6f8e9e3a9503f3a17357cb4aad63c0df43d).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 24 ++---------------------- setup.cfg | 2 ++ 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d78c4b56304..cb0ab872df6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,7 +125,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10'] + pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -141,28 +141,8 @@ jobs: no-extensions: 'Y' os: ubuntu experimental: false - - os: macos - pyver: "3.11" - experimental: true - no-extensions: 'Y' - os: ubuntu - pyver: "3.11" - experimental: false - no-extensions: 'Y' - - os: windows - pyver: "3.11" - experimental: true - no-extensions: 'Y' - - os: ubuntu - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: macos - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: windows - pyver: "3.12" + pyver: "3.13" experimental: true no-extensions: 'Y' fail-fast: true diff --git a/setup.cfg b/setup.cfg index c291057ec7a..f407fbf901d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,6 +36,8 @@ classifiers = Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Topic :: Internet :: WWW/HTTP From 111f3702118c39cafe54b9fe74bcd47dcd471a5a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 00:47:37 +0000 Subject: [PATCH 0141/1511] [PR #8251/c21b76d0 backport][3.10] Leave websocket transport open if receive times out or is cancelled (#8264) Co-authored-by: J. Nick Koston --- CHANGES/8251.bugfix.rst | 4 ++ aiohttp/web_ws.py | 3 +- tests/test_web_websocket.py | 3 +- tests/test_web_websocket_functional.py | 93 ++++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8251.bugfix.rst diff --git a/CHANGES/8251.bugfix.rst b/CHANGES/8251.bugfix.rst new file mode 100644 index 00000000000..6fc6507cfe2 --- /dev/null +++ b/CHANGES/8251.bugfix.rst @@ -0,0 +1,4 @@ +Leave websocket transport open if receive times out or is cancelled +-- by :user:`bdraco`. + +This restores the behavior prior to the change in #7978. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 52604d8a1eb..9fe66527539 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -462,8 +462,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: waiter = self._waiting set_result(waiter, True) self._waiting = None - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + except asyncio.TimeoutError: raise except EofStream: self._close_code = WSCloseCode.OK diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 543fe91db07..d9eeda3d1d2 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -472,7 +472,8 @@ async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: with pytest.raises(asyncio.TimeoutError): await ws.receive() - assert len(ws._req.transport.close.mock_calls) == 1 + # Should not close the connection on timeout + assert len(ws._req.transport.close.mock_calls) == 0 async def test_multiple_receive_on_close_connection(make_request) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 7af4c6091eb..b471b131c1e 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,8 @@ # HTTP websocket server functional tests import asyncio +import contextlib +import sys from typing import Any, Optional import pytest @@ -797,3 +799,94 @@ async def ws_handler(request): resp = await client.get("/api/null", timeout=1) assert (await resp.json()) == {"err": None} resp.close() + + +async def test_receive_being_cancelled_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive()) + await asyncio.sleep(0) + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await asyncio.sleep(0) + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed + + +async def test_receive_timeout_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + timed_out = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive(sys.float_info.min)) + with contextlib.suppress(asyncio.TimeoutError): + await task + + timed_out.set_result(None) + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await timed_out + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed From eaddd9c7a36600d504e28f5d9d6e35cd683a06d8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 01:08:15 +0000 Subject: [PATCH 0142/1511] [PR #8251/c21b76d0 backport][3.9] Leave websocket transport open if receive times out or is cancelled (#8263) Co-authored-by: J. Nick Koston --- CHANGES/8251.bugfix.rst | 4 ++ aiohttp/web_ws.py | 3 +- tests/test_web_websocket.py | 3 +- tests/test_web_websocket_functional.py | 93 ++++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8251.bugfix.rst diff --git a/CHANGES/8251.bugfix.rst b/CHANGES/8251.bugfix.rst new file mode 100644 index 00000000000..6fc6507cfe2 --- /dev/null +++ b/CHANGES/8251.bugfix.rst @@ -0,0 +1,4 @@ +Leave websocket transport open if receive times out or is cancelled +-- by :user:`bdraco`. + +This restores the behavior prior to the change in #7978. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 52604d8a1eb..9fe66527539 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -462,8 +462,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: waiter = self._waiting set_result(waiter, True) self._waiting = None - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + except asyncio.TimeoutError: raise except EofStream: self._close_code = WSCloseCode.OK diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 543fe91db07..d9eeda3d1d2 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -472,7 +472,8 @@ async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: with pytest.raises(asyncio.TimeoutError): await ws.receive() - assert len(ws._req.transport.close.mock_calls) == 1 + # Should not close the connection on timeout + assert len(ws._req.transport.close.mock_calls) == 0 async def test_multiple_receive_on_close_connection(make_request) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 7af4c6091eb..b471b131c1e 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,8 @@ # HTTP websocket server functional tests import asyncio +import contextlib +import sys from typing import Any, Optional import pytest @@ -797,3 +799,94 @@ async def ws_handler(request): resp = await client.get("/api/null", timeout=1) assert (await resp.json()) == {"err": None} resp.close() + + +async def test_receive_being_cancelled_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive()) + await asyncio.sleep(0) + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await asyncio.sleep(0) + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed + + +async def test_receive_timeout_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + timed_out = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive(sys.float_info.min)) + with contextlib.suppress(asyncio.TimeoutError): + await task + + timed_out.set_result(None) + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await timed_out + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed From 3a13bd56a1d027a662390c9ee5fb4b8c006751de Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 01:51:48 +0000 Subject: [PATCH 0143/1511] [PR #8267/509fb269 backport][3.9] Add note about tasks that should be cancelled on shutdown (#8268) **This is a backport of PR #8267 as merged into master (509fb269ac7ed0fe9ad387eccf7d1112aa9c0e65).** Co-authored-by: Sam Bull --- CHANGES/8267.doc.rst | 1 + docs/web_advanced.rst | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 CHANGES/8267.doc.rst diff --git a/CHANGES/8267.doc.rst b/CHANGES/8267.doc.rst new file mode 100644 index 00000000000..69f11d37560 --- /dev/null +++ b/CHANGES/8267.doc.rst @@ -0,0 +1 @@ +Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 3549a5c7e36..d2ba3013e30 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -952,6 +952,14 @@ steps 4 and 7). :ref:`cleanup contexts`. 7. Cancel any remaining tasks and wait on them to complete. +.. note:: + + When creating new tasks in a handler which _should_ be cancelled on server shutdown, + then it is important to keep track of those tasks and explicitly cancel them in a + :attr:`Application.on_shutdown` callback. As we can see from the above steps, + without this the server will wait on those new tasks to complete before it continues + with server shutdown. + Websocket shutdown ^^^^^^^^^^^^^^^^^^ From d10bd0a8742a3824f030236ef34f05ea84cf2496 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 01:52:10 +0000 Subject: [PATCH 0144/1511] [PR #8267/509fb269 backport][3.10] Add note about tasks that should be cancelled on shutdown (#8269) **This is a backport of PR #8267 as merged into master (509fb269ac7ed0fe9ad387eccf7d1112aa9c0e65).** Co-authored-by: Sam Bull --- CHANGES/8267.doc.rst | 1 + docs/web_advanced.rst | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 CHANGES/8267.doc.rst diff --git a/CHANGES/8267.doc.rst b/CHANGES/8267.doc.rst new file mode 100644 index 00000000000..69f11d37560 --- /dev/null +++ b/CHANGES/8267.doc.rst @@ -0,0 +1 @@ +Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 3549a5c7e36..d2ba3013e30 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -952,6 +952,14 @@ steps 4 and 7). :ref:`cleanup contexts`. 7. Cancel any remaining tasks and wait on them to complete. +.. note:: + + When creating new tasks in a handler which _should_ be cancelled on server shutdown, + then it is important to keep track of those tasks and explicitly cancel them in a + :attr:`Application.on_shutdown` callback. As we can see from the above steps, + without this the server will wait on those new tasks to complete before it continues + with server shutdown. + Websocket shutdown ^^^^^^^^^^^^^^^^^^ From 46e8f64b2155971ed9f35c25f22fbfe8d90bfb90 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 31 Mar 2024 02:41:31 +0100 Subject: [PATCH 0145/1511] [PR #8271/e0d9d3ab backport][3.9] Fix race condition with request handlers on shutdown (#8272) **This is a backport of PR #8271 as merged into master (e0d9d3ab9417cea6d25a37a75d12d3a9c1755a8b).** Co-authored-by: Sam Bull --- CHANGES/8271.bugfix.rst | 1 + aiohttp/web_runner.py | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/8271.bugfix.rst diff --git a/CHANGES/8271.bugfix.rst b/CHANGES/8271.bugfix.rst new file mode 100644 index 00000000000..9d572ba2fe6 --- /dev/null +++ b/CHANGES/8271.bugfix.rst @@ -0,0 +1 @@ +Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 6999b5c5feb..19a4441658f 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -307,6 +307,9 @@ async def cleanup(self) -> None: await site.stop() if self._server: # If setup succeeded + # Yield to event loop to ensure incoming requests prior to stopping the sites + # have all started to be handled before we proceed to close idle connections. + await asyncio.sleep(0) self._server.pre_shutdown() await self.shutdown() From f74026ab443f3fc48a296e98801f7bce3b37c308 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 31 Mar 2024 02:41:56 +0100 Subject: [PATCH 0146/1511] [PR #8271/e0d9d3ab backport][3.10] Fix race condition with request handlers on shutdown (#8273) **This is a backport of PR #8271 as merged into master (e0d9d3ab9417cea6d25a37a75d12d3a9c1755a8b).** Co-authored-by: Sam Bull --- CHANGES/8271.bugfix.rst | 1 + aiohttp/web_runner.py | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/8271.bugfix.rst diff --git a/CHANGES/8271.bugfix.rst b/CHANGES/8271.bugfix.rst new file mode 100644 index 00000000000..9d572ba2fe6 --- /dev/null +++ b/CHANGES/8271.bugfix.rst @@ -0,0 +1 @@ +Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 6999b5c5feb..19a4441658f 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -307,6 +307,9 @@ async def cleanup(self) -> None: await site.stop() if self._server: # If setup succeeded + # Yield to event loop to ensure incoming requests prior to stopping the sites + # have all started to be handled before we proceed to close idle connections. + await asyncio.sleep(0) self._server.pre_shutdown() await self.shutdown() From f91cb32a570f65543c05fb2c17e616b520899646 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 10:37:10 +0000 Subject: [PATCH 0147/1511] Bump cython from 3.0.9 to 3.0.10 (#8276) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cython](https://github.com/cython/cython) from 3.0.9 to 3.0.10.
Changelog

Sourced from cython's changelog.

3.0.10 (2024-03-30)

Bugs fixed

  • Cython generated incorrect self-casts when directly calling final methods of subtypes. Patch by Lisandro Dalcin. (Github issue :issue:2747)

  • Internal C names generated from C function signatures could become too long for MSVC. (Github issue :issue:6052)

  • The noexcept warnings could be misleading in some cases. Patch by Gonzalo Tornaría. (Github issue :issue:6087)

  • The @cython.ufunc implementation could generate incomplete C code. (Github issue :issue:6064)

  • The libcpp.complex declarations could result in incorrect C++ code. Patch by Raffi Enficiaud. (Github issue :issue:6037)

  • Several tests were adapted to work with both NumPy 1.x and 2.0. Patch by Matti Picus. (Github issues :issue:6076, :issue:6100)

  • C compiler warnings when the freelist implementation is disabled (e.g. on PyPy) were fixed. It can now be disabled explicitly with the C macro guard CYTHON_USE_FREELISTS=0. (Github issue :issue:6099)

  • Some C macro guards for feature flags were missing from the NOGIL Python configuration.

  • Some recently added builtins were unconditionally looked up at module import time (if used by user code) that weren't available on all Python versions and could thus fail the import.

  • A performance hint regarding exported pxd declarations was improved. (Github issue :issue:6001)

Commits
  • 7ae8531 Build: Upgrade action version.
  • 71c5775 Fix Py3-ism in Py2/3 code.
  • 6cab0d6 Prepare release of 3.0.10.
  • 4e842a9 Remove a useless 'f' string prefix.
  • 8aba690 Improve performance hints for nogil + pxd (#6088)
  • ae120d5 Disable pstats tests also in 3.13 as long as they wait for a new C-API in CPy...
  • 27364ef Update changelog.
  • e178fc2 Reduce cap length of cnames generated from long function argument lists (GH-...
  • 03e8393 Update changelog.
  • 5fc42b1 Fix self cast when calling final methods (GH-6085)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.9&new-version=3.0.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fa5adf12e1a..4c46b1e5745 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.9 +cython==3.0.10 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index f7fecfb3b95..63bcc1ac3b5 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.9 +cython==3.0.10 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in From 836f4a743b3230567aebe69dfb1e8112e60acfc1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 10:47:12 +0000 Subject: [PATCH 0148/1511] Bump aiodns from 3.1.1 to 3.2.0 (#8277) Bumps [aiodns](https://github.com/saghul/aiodns) from 3.1.1 to 3.2.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiodns&package-manager=pip&previous-version=3.1.1&new-version=3.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 6bc448debd4..977f97dd5da 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4c46b1e5745..f13a0f83ef3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 7df659854e5..6ea71482ea4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4a968058d61..d1efee2aecf 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 6f24fe2b1fa..19edd509007 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in From 3f79241bcbc02c9850348fc04c064fcbc980e8f0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 18:59:05 +0100 Subject: [PATCH 0149/1511] [PR #8286/28f1fd88 backport][3.9] docs: remove repetitive word in comment (#8287) **This is a backport of PR #8286 as merged into master (28f1fd88cbe44d81487d5e9b23a3302d032811bd).** ## What do these changes do? ## Are there changes in behavior for the user? ## Is it a substantial burden for the maintainers to support this? ## Related issue number ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: crazehang <165746307+crazehang@users.noreply.github.com> --- docs/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing.rst b/docs/testing.rst index 15f163107f2..027ba63a039 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -775,7 +775,7 @@ Utilities mocked.assert_called_with(1, 2) - :param return_value: A value that the the mock object will return when + :param return_value: A value that the mock object will return when called. :returns: A mock object that behaves as a coroutine which returns *return_value* when called. From 6643115c6978f052abc293085b633e47d35061a0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 19:25:02 +0100 Subject: [PATCH 0150/1511] [PR #8286/28f1fd88 backport][3.10] docs: remove repetitive word in comment (#8288) **This is a backport of PR #8286 as merged into master (28f1fd88cbe44d81487d5e9b23a3302d032811bd).** Co-authored-by: crazehang <165746307+crazehang@users.noreply.github.com> --- docs/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing.rst b/docs/testing.rst index 15f163107f2..027ba63a039 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -775,7 +775,7 @@ Utilities mocked.assert_called_with(1, 2) - :param return_value: A value that the the mock object will return when + :param return_value: A value that the mock object will return when called. :returns: A mock object that behaves as a coroutine which returns *return_value* when called. From bb231059b14277c34a8a0331e51406d5abe4f424 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 03:58:05 +0000 Subject: [PATCH 0151/1511] [PR #8283/54e13b0a backport][3.9] Fix blocking I/O in the event loop while processing files in a post request (#8293) Co-authored-by: J. Nick Koston --- CHANGES/8283.bugfix.rst | 2 ++ aiohttp/test_utils.py | 11 +++++++++-- aiohttp/web_request.py | 10 ++++++---- 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 CHANGES/8283.bugfix.rst diff --git a/CHANGES/8283.bugfix.rst b/CHANGES/8283.bugfix.rst new file mode 100644 index 00000000000..d456d59ba8e --- /dev/null +++ b/CHANGES/8283.bugfix.rst @@ -0,0 +1,2 @@ +Fixed blocking I/O in the event loop while processing files in a POST request +-- by :user:`bdraco`. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index b5821a7fb84..a36e8599689 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -594,8 +594,15 @@ def make_mocked_request( """ task = mock.Mock() if loop is ...: - loop = mock.Mock() - loop.create_future.return_value = () + # no loop passed, try to get the current one if + # its is running as we need a real loop to create + # executor jobs to be able to do testing + # with a real executor + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = mock.Mock() + loop.create_future.return_value = () if version < HttpVersion(1, 1): closing = True diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 781713e5985..4bc670a798c 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -726,19 +726,21 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": # https://tools.ietf.org/html/rfc7578#section-4.4 if field.filename: # store file in temp file - tmp = tempfile.TemporaryFile() + tmp = await self._loop.run_in_executor( + None, tempfile.TemporaryFile + ) chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) - tmp.write(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) size += len(chunk) if 0 < max_size < size: - tmp.close() + await self._loop.run_in_executor(None, tmp.close) raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) - tmp.seek(0) + await self._loop.run_in_executor(None, tmp.seek, 0) if field_ct is None: field_ct = "application/octet-stream" From ef06656568de70520be7c4a555f7ea896d43194b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 04:04:38 +0000 Subject: [PATCH 0152/1511] [PR #8283/54e13b0a backport][3.10] Fix blocking I/O in the event loop while processing files in a post request (#8294) Co-authored-by: J. Nick Koston --- CHANGES/8283.bugfix.rst | 2 ++ aiohttp/test_utils.py | 11 +++++++++-- aiohttp/web_request.py | 10 ++++++---- 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 CHANGES/8283.bugfix.rst diff --git a/CHANGES/8283.bugfix.rst b/CHANGES/8283.bugfix.rst new file mode 100644 index 00000000000..d456d59ba8e --- /dev/null +++ b/CHANGES/8283.bugfix.rst @@ -0,0 +1,2 @@ +Fixed blocking I/O in the event loop while processing files in a POST request +-- by :user:`bdraco`. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index b5821a7fb84..a36e8599689 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -594,8 +594,15 @@ def make_mocked_request( """ task = mock.Mock() if loop is ...: - loop = mock.Mock() - loop.create_future.return_value = () + # no loop passed, try to get the current one if + # its is running as we need a real loop to create + # executor jobs to be able to do testing + # with a real executor + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = mock.Mock() + loop.create_future.return_value = () if version < HttpVersion(1, 1): closing = True diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 781713e5985..4bc670a798c 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -726,19 +726,21 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": # https://tools.ietf.org/html/rfc7578#section-4.4 if field.filename: # store file in temp file - tmp = tempfile.TemporaryFile() + tmp = await self._loop.run_in_executor( + None, tempfile.TemporaryFile + ) chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) - tmp.write(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) size += len(chunk) if 0 < max_size < size: - tmp.close() + await self._loop.run_in_executor(None, tmp.close) raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) - tmp.seek(0) + await self._loop.run_in_executor(None, tmp.seek, 0) if field_ct is None: field_ct = "application/octet-stream" From 38dd9b8557f35bdfc1376e5833fb8e235c9d49ba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Apr 2024 18:11:40 -1000 Subject: [PATCH 0153/1511] Fix AsyncResolver to match ThreadedResolver behavior (#8270) (#8295) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/8270.bugfix.rst | 9 ++ aiohttp/abc.py | 28 ++++- aiohttp/connector.py | 16 +-- aiohttp/resolver.py | 94 +++++++++----- docs/abc.rst | 54 ++++++++ docs/conf.py | 3 +- examples/fake_server.py | 10 +- requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- tests/test_resolver.py | 236 +++++++++++++++++++++++++---------- 10 files changed, 343 insertions(+), 111 deletions(-) create mode 100644 CHANGES/8270.bugfix.rst diff --git a/CHANGES/8270.bugfix.rst b/CHANGES/8270.bugfix.rst new file mode 100644 index 00000000000..bda77223959 --- /dev/null +++ b/CHANGES/8270.bugfix.rst @@ -0,0 +1,9 @@ +Fix ``AsyncResolver`` to match ``ThreadedResolver`` behavior +-- by :user:`bdraco`. + +On system with IPv6 support, the :py:class:`~aiohttp.resolver.AsyncResolver` would not fallback +to providing A records when AAAA records were not available. +Additionally, unlike the :py:class:`~aiohttp.resolver.ThreadedResolver`, the :py:class:`~aiohttp.resolver.AsyncResolver` +did not handle link-local addresses correctly. + +This change makes the behavior consistent with the :py:class:`~aiohttp.resolver.ThreadedResolver`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index ee838998997..b6c0514a615 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -1,5 +1,6 @@ import asyncio import logging +import socket from abc import ABC, abstractmethod from collections.abc import Sized from http.cookies import BaseCookie, Morsel @@ -14,6 +15,7 @@ List, Optional, Tuple, + TypedDict, ) from multidict import CIMultiDict @@ -119,11 +121,35 @@ def __await__(self) -> Generator[Any, None, StreamResponse]: """Execute the view handler.""" +class ResolveResult(TypedDict): + """Resolve result. + + This is the result returned from an AbstractResolver's + resolve method. + + :param hostname: The hostname that was provided. + :param host: The IP address that was resolved. + :param port: The port that was resolved. + :param family: The address family that was resolved. + :param proto: The protocol that was resolved. + :param flags: The flags that were resolved. + """ + + hostname: str + host: str + port: int + family: int + proto: int + flags: int + + class AbstractResolver(ABC): """Abstract DNS resolver.""" @abstractmethod - async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: + async def resolve( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[ResolveResult]: """Return IP address for given hostname""" @abstractmethod diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 64c678d4b78..90f7c3bb8c0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -34,7 +34,7 @@ import attr from . import hdrs, helpers -from .abc import AbstractResolver +from .abc import AbstractResolver, ResolveResult from .client_exceptions import ( ClientConnectionError, ClientConnectorCertificateError, @@ -693,14 +693,14 @@ async def _create_connection( class _DNSCacheTable: def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} self._timestamps: Dict[Tuple[str, int], float] = {} self._ttl = ttl def __contains__(self, host: object) -> bool: return host in self._addrs_rr - def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: self._addrs_rr[key] = (cycle(addrs), len(addrs)) if self._ttl is not None: @@ -716,7 +716,7 @@ def clear(self) -> None: self._addrs_rr.clear() self._timestamps.clear() - def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: loop, length = self._addrs_rr[key] addrs = list(islice(loop, length)) # Consume one more element to shift internal state of `cycle` @@ -834,7 +834,7 @@ def clear_dns_cache( async def _resolve_host( self, host: str, port: int, traces: Optional[List["Trace"]] = None - ) -> List[Dict[str, Any]]: + ) -> List[ResolveResult]: """Resolve host and return list of addresses.""" if is_ip_address(host): return [ @@ -890,7 +890,7 @@ async def _resolve_host( return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: with suppress(Exception, asyncio.CancelledError): fut.result() @@ -903,7 +903,7 @@ async def _resolve_host_with_throttle( host: str, port: int, traces: Optional[List["Trace"]], - ) -> List[Dict[str, Any]]: + ) -> List[ResolveResult]: """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: # get event early, before any await (#4014) @@ -1217,7 +1217,7 @@ async def _start_tls_connection( return tls_transport, tls_proto def _convert_hosts_to_addr_infos( - self, hosts: List[Dict[str, Any]] + self, hosts: List[ResolveResult] ) -> List[aiohappyeyeballs.AddrInfoType]: """Converts the list of hosts to a list of addr_infos. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 6c17b1e7e89..ad502c5e5c8 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,8 +1,9 @@ import asyncio import socket -from typing import Any, Dict, List, Optional, Type, Union +import sys +from typing import Any, Dict, List, Optional, Tuple, Type, Union -from .abc import AbstractResolver +from .abc import AbstractResolver, ResolveResult from .helpers import get_running_loop __all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") @@ -10,12 +11,16 @@ try: import aiodns - # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') + # aiodns_default = hasattr(aiodns.DNSResolver, 'getaddrinfo') except ImportError: # pragma: no cover aiodns = None + aiodns_default = False +_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + class ThreadedResolver(AbstractResolver): """Threaded resolver. @@ -28,45 +33,45 @@ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: self._loop = get_running_loop(loop) async def resolve( - self, hostname: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[ResolveResult]: infos = await self._loop.getaddrinfo( - hostname, + host, port, type=socket.SOCK_STREAM, family=family, flags=socket.AI_ADDRCONFIG, ) - hosts = [] + hosts: List[ResolveResult] = [] for family, _, proto, _, address in infos: if family == socket.AF_INET6: if len(address) < 3: # IPv6 is not supported by Python build, # or IPv6 is not enabled in the host continue - if address[3]: + if address[3] and _SUPPORTS_SCOPE_ID: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. - host, _port = socket.getnameinfo( - address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV + resolved_host, _port = await self._loop.getnameinfo( + address, _NUMERIC_SOCKET_FLAGS ) port = int(_port) else: - host, port = address[:2] + resolved_host, port = address[:2] else: # IPv4 assert family == socket.AF_INET - host, port = address # type: ignore[misc] + resolved_host, port = address # type: ignore[misc] hosts.append( - { - "hostname": hostname, - "host": host, - "port": port, - "family": family, - "proto": proto, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=proto, + flags=_NUMERIC_SOCKET_FLAGS, + ) ) return hosts @@ -96,23 +101,48 @@ def __init__( async def resolve( self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + ) -> List[ResolveResult]: try: - resp = await self._resolver.gethostbyname(host, family) + resp = await self._resolver.getaddrinfo( + host, + port=port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" raise OSError(msg) from exc - hosts = [] - for address in resp.addresses: + hosts: List[ResolveResult] = [] + for node in resp.nodes: + address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr + family = node.family + if family == socket.AF_INET6: + if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + result = await self._resolver.getnameinfo( + (address[0].decode("ascii"), *address[1:]), + _NUMERIC_SOCKET_FLAGS, + ) + resolved_host = result.node + else: + resolved_host = address[0].decode("ascii") + port = address[1] + else: # IPv4 + assert family == socket.AF_INET + resolved_host = address[0].decode("ascii") + port = address[1] hosts.append( - { - "hostname": host, - "host": address, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=0, + flags=_NUMERIC_SOCKET_FLAGS, + ) ) if not hosts: diff --git a/docs/abc.rst b/docs/abc.rst index d2695673fcf..4eea6715991 100644 --- a/docs/abc.rst +++ b/docs/abc.rst @@ -181,3 +181,57 @@ Abstract Access Logger :param response: :class:`aiohttp.web.Response` object. :param float time: Time taken to serve the request. + + +Abstract Resolver +------------------------------- + +.. class:: AbstractResolver + + An abstract class, base for all resolver implementations. + + Method ``resolve`` should be overridden. + + .. method:: resolve(host, port, family) + + Resolve host name to IP address. + + :param str host: host name to resolve. + + :param int port: port number. + + :param int family: socket family. + + :return: list of :class:`aiohttp.abc.ResolveResult` instances. + + .. method:: close() + + Release resolver. + +.. class:: ResolveResult + + Result of host name resolution. + + .. attribute:: hostname + + The host name that was provided. + + .. attribute:: host + + The IP address that was resolved. + + .. attribute:: port + + The port that was resolved. + + .. attribute:: family + + The address family that was resolved. + + .. attribute:: proto + + The protocol that was resolved. + + .. attribute:: flags + + The flags that were resolved. diff --git a/docs/conf.py b/docs/conf.py index f21366fb488..c834296ceeb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -394,7 +394,8 @@ ("py:class", "aiohttp.protocol.HttpVersion"), # undocumented ("py:class", "aiohttp.ClientRequest"), # undocumented ("py:class", "aiohttp.payload.Payload"), # undocumented - ("py:class", "aiohttp.abc.AbstractResolver"), # undocumented + ("py:class", "aiohttp.resolver.AsyncResolver"), # undocumented + ("py:class", "aiohttp.resolver.ThreadedResolver"), # undocumented ("py:func", "aiohttp.ws_connect"), # undocumented ("py:meth", "start"), # undocumented ("py:exc", "aiohttp.ClientHttpProxyError"), # undocumented diff --git a/examples/fake_server.py b/examples/fake_server.py index 3157bab658c..4f796d42386 100755 --- a/examples/fake_server.py +++ b/examples/fake_server.py @@ -3,10 +3,11 @@ import pathlib import socket import ssl +from typing import List, Union import aiohttp from aiohttp import web -from aiohttp.abc import AbstractResolver +from aiohttp.abc import AbstractResolver, ResolveResult from aiohttp.resolver import DefaultResolver from aiohttp.test_utils import unused_port @@ -19,7 +20,12 @@ def __init__(self, fakes, *, loop): self._fakes = fakes self._resolver = DefaultResolver(loop=loop) - async def resolve(self, host, port=0, family=socket.AF_INET): + async def resolve( + self, + host: str, + port: int = 0, + family: Union[socket.AddressFamily, int] = socket.AF_INET, + ) -> List[ResolveResult]: fake_port = self._fakes.get(host) if fake_port is not None: return [ diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 70bd75bd99d..2299584a463 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,6 +1,6 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` -aiodns; sys_platform=="linux" or sys_platform=="darwin" +aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" diff --git a/setup.cfg b/setup.cfg index f407fbf901d..02a5d54d114 100644 --- a/setup.cfg +++ b/setup.cfg @@ -65,7 +65,7 @@ install_requires = [options.extras_require] speedups = # required c-ares (aiodns' backend) will not build on windows - aiodns; sys_platform=="linux" or sys_platform=="darwin" + aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 1b389f3601b..2650ccadd6e 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -1,25 +1,57 @@ import asyncio import ipaddress import socket -from typing import Any, List +from ipaddress import ip_address +from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union from unittest.mock import Mock, patch import pytest -from aiohttp.resolver import AsyncResolver, DefaultResolver, ThreadedResolver +from aiohttp.resolver import ( + _NUMERIC_SOCKET_FLAGS, + _SUPPORTS_SCOPE_ID, + AsyncResolver, + DefaultResolver, + ThreadedResolver, +) try: import aiodns - gethostbyname = hasattr(aiodns.DNSResolver, "gethostbyname") + getaddrinfo: Any = hasattr(aiodns.DNSResolver, "getaddrinfo") except ImportError: aiodns = None - gethostbyname = False + getaddrinfo = False -class FakeResult: - def __init__(self, addresses): - self.addresses = addresses +class FakeAIODNSAddrInfoNode(NamedTuple): + + family: int + addr: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] + + +class FakeAIODNSAddrInfoIPv4Result: + def __init__(self, hosts: Collection[str]) -> None: + self.nodes = [ + FakeAIODNSAddrInfoNode(socket.AF_INET, (h.encode(), 0)) for h in hosts + ] + + +class FakeAIODNSAddrInfoIPv6Result: + def __init__(self, hosts: Collection[str]) -> None: + self.nodes = [ + FakeAIODNSAddrInfoNode( + socket.AF_INET6, + (h.encode(), 0, 0, 3 if ip_address(h).is_link_local else 0), + ) + for h in hosts + ] + + +class FakeAIODNSNameInfoIPv6Result: + def __init__(self, host: str) -> None: + self.node = host + self.service = None class FakeQueryResult: @@ -27,16 +59,30 @@ def __init__(self, host): self.host = host -async def fake_result(addresses): - return FakeResult(addresses=tuple(addresses)) +async def fake_aiodns_getaddrinfo_ipv4_result( + hosts: Collection[str], +) -> FakeAIODNSAddrInfoIPv4Result: + return FakeAIODNSAddrInfoIPv4Result(hosts=hosts) + + +async def fake_aiodns_getaddrinfo_ipv6_result( + hosts: Collection[str], +) -> FakeAIODNSAddrInfoIPv6Result: + return FakeAIODNSAddrInfoIPv6Result(hosts=hosts) + + +async def fake_aiodns_getnameinfo_ipv6_result( + host: str, +) -> FakeAIODNSNameInfoIPv6Result: + return FakeAIODNSNameInfoIPv6Result(host) async def fake_query_result(result): return [FakeQueryResult(host=h) for h in result] -def fake_addrinfo(hosts): - async def fake(*args, **kwargs): +def fake_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> List[Any]: if not hosts: raise socket.gaierror @@ -45,33 +91,83 @@ async def fake(*args, **kwargs): return fake -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_positive_lookup(loop) -> None: +def fake_ipv6_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> List[Any]: + if not hosts: + raise socket.gaierror + + return [ + ( + socket.AF_INET6, + None, + socket.SOCK_STREAM, + None, + (h, 0, 0, 3 if ip_address(h).is_link_local else 0), + ) + for h in hosts + ] + + return fake + + +def fake_ipv6_nameinfo(host: str) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> Tuple[str, int]: + return host, 0 + + return fake + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result(["127.0.0.1"]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result( + ["127.0.0.1"] + ) + resolver = AsyncResolver() real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().gethostbyname.assert_called_with("www.python.org", socket.AF_INET) - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_positive_lookup(loop) -> None: + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.skipif( + not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" +) +async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.return_value = fake_query_result(["127.0.0.1"]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result( + ["fe80::1"] + ) + mock().getnameinfo.return_value = fake_aiodns_getnameinfo_ipv6_result( + "fe80::1%eth0" + ) + resolver = AsyncResolver() real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().query.assert_called_with("www.python.org", "A") - - -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_multiple_replies(loop) -> None: + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) + mock().getnameinfo.assert_called_with( + ("fe80::1", 0, 0, 3), _NUMERIC_SOCKET_FLAGS + ) + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_multiple_replies(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] - mock().gethostbyname.return_value = fake_result(ips) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result(ips) + resolver = AsyncResolver() real = await resolver.resolve("www.google.com") ips = [ipaddress.ip_address(x["host"]) for x in real] assert len(ips) > 3, "Expecting multiple addresses" @@ -88,40 +184,20 @@ async def test_async_resolver_query_multiple_replies(loop) -> None: ips = [ipaddress.ip_address(x["host"]) for x in real] -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_negative_lookup(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.side_effect = aiodns.error.DNSError() - resolver = AsyncResolver(loop=loop) - with pytest.raises(OSError): - await resolver.resolve("doesnotexist.bla") - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_negative_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_negative_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.side_effect = aiodns.error.DNSError() - resolver = AsyncResolver(loop=loop) - with pytest.raises(OSError): - await resolver.resolve("doesnotexist.bla") - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_no_hosts_in_query(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.return_value = fake_query_result([]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.side_effect = aiodns.error.DNSError() + resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_no_hosts_in_gethostbyname(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_no_hosts_in_getaddrinfo(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result([]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result([]) + resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") @@ -135,6 +211,20 @@ async def test_threaded_resolver_positive_lookup() -> None: ipaddress.ip_address(real[0]["host"]) +@pytest.mark.skipif( + not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" +) +async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: + loop = Mock() + loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) + loop.getnameinfo = fake_ipv6_nameinfo("fe80::1%eth0") + resolver = ThreadedResolver() + resolver._loop = loop + real = await resolver.resolve("www.python.org") + assert real[0]["hostname"] == "www.python.org" + ipaddress.ip_address(real[0]["host"]) + + async def test_threaded_resolver_multiple_replies() -> None: loop = Mock() ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] @@ -154,6 +244,16 @@ async def test_threaded_negative_lookup() -> None: await resolver.resolve("doesnotexist.bla") +async def test_threaded_negative_ipv6_lookup() -> None: + loop = Mock() + ips: List[Any] = [] + loop.getaddrinfo = fake_ipv6_addrinfo(ips) + resolver = ThreadedResolver() + resolver._loop = loop + with pytest.raises(socket.gaierror): + await resolver.resolve("doesnotexist.bla") + + async def test_threaded_negative_lookup_with_unknown_result() -> None: loop = Mock() @@ -202,14 +302,20 @@ async def test_default_loop_for_async_resolver(loop) -> None: assert resolver._loop is loop -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_ipv6_positive_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_ipv6_positive_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result(["::1"]) - resolver = AsyncResolver(loop=loop) - real = await resolver.resolve("www.python.org", family=socket.AF_INET6) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result(["::1"]) + resolver = AsyncResolver() + real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().gethostbyname.assert_called_with("www.python.org", socket.AF_INET6) + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) @pytest.mark.skipif(aiodns is None, reason="aiodns required") @@ -230,7 +336,7 @@ async def test_async_resolver_aiodns_not_present(loop, monkeypatch) -> None: def test_default_resolver() -> None: - # if gethostbyname: + # if getaddrinfo: # assert DefaultResolver is AsyncResolver # else: # assert DefaultResolver is ThreadedResolver From d15f07cfbbdecf3de8ca1db10ca5d182ace7b09c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 5 Apr 2024 19:50:29 +0100 Subject: [PATCH 0154/1511] Upgrade to llhttp 9.2.1 (#8292) (#8297) Fixes #8291. (cherry picked from commit 4d72dca6869072fb073621f8b752225e216a92d9) --- CHANGES/8292.feature.rst | 1 + aiohttp/http_parser.py | 8 +++-- tests/test_http_parser.py | 65 +++++++++++++++++++++++++++++---------- vendor/llhttp | 2 +- 4 files changed, 57 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8292.feature.rst diff --git a/CHANGES/8292.feature.rst b/CHANGES/8292.feature.rst new file mode 100644 index 00000000000..6ca82503143 --- /dev/null +++ b/CHANGES/8292.feature.rst @@ -0,0 +1 @@ +Upgraded to LLHTTP 9.2.1, and started rejecting obsolete line folding in Python parser to match -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 8bd8519ff6b..cce0b788d46 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -128,10 +128,12 @@ def __init__( max_line_size: int = 8190, max_headers: int = 32768, max_field_size: int = 8190, + lax: bool = False, ) -> None: self.max_line_size = max_line_size self.max_headers = max_headers self.max_field_size = max_field_size + self._lax = lax def parse_headers( self, lines: List[bytes] @@ -178,7 +180,7 @@ def parse_headers( line = lines[lines_idx] # consume continuation lines - continuation = line and line[0] in (32, 9) # (' ', '\t') + continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding if continuation: @@ -273,7 +275,9 @@ def __init__( self._payload_parser: Optional[HttpPayloadParser] = None self._auto_decompress = auto_decompress self._limit = limit - self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + self._headers_parser = HeadersParser( + max_line_size, max_headers, max_field_size, self.lax + ) @abc.abstractmethod def parse_message(self, lines: List[bytes]) -> _MsgT: diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index da7f1182b3a..04b254c0ae8 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -108,8 +108,7 @@ def test_c_parser_loaded(): def test_parse_headers(parser: Any) -> None: text = b"""GET /test HTTP/1.1\r -test: line\r - continue\r +test: a line\r test2: data\r \r """ @@ -117,13 +116,24 @@ def test_parse_headers(parser: Any) -> None: assert len(messages) == 1 msg = messages[0][0] - assert list(msg.headers.items()) == [("test", "line continue"), ("test2", "data")] - assert msg.raw_headers == ((b"test", b"line continue"), (b"test2", b"data")) + assert list(msg.headers.items()) == [("test", "a line"), ("test2", "data")] + assert msg.raw_headers == ((b"test", b"a line"), (b"test2", b"data")) assert not msg.should_close assert msg.compression is None assert not msg.upgrade +def test_reject_obsolete_line_folding(parser: Any) -> None: + text = b"""GET /test HTTP/1.1\r +test: line\r + Content-Length: 48\r +test2: data\r +\r +""" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text) + + @pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") def test_invalid_character(loop: Any, protocol: Any, request: Any) -> None: parser = HttpRequestParserC( @@ -353,8 +363,8 @@ def test_parse_delayed(parser) -> None: def test_headers_multi_feed(parser) -> None: text1 = b"GET /test HTTP/1.1\r\n" - text2 = b"test: line\r" - text3 = b"\n continue\r\n\r\n" + text2 = b"test: line" + text3 = b" continue\r\n\r\n" messages, upgrade, tail = parser.feed_data(text1) assert len(messages) == 0 @@ -713,31 +723,30 @@ def test_max_header_value_size_under_limit(parser) -> None: @pytest.mark.parametrize("size", [40965, 8191]) -def test_max_header_value_size_continuation(parser, size) -> None: +def test_max_header_value_size_continuation(response, size) -> None: name = b"T" * (size - 5) - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + name + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + name + b"\r\n\r\n" match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): - parser.feed_data(text) + response.feed_data(text) -def test_max_header_value_size_continuation_under_limit(parser) -> None: +def test_max_header_value_size_continuation_under_limit(response) -> None: value = b"A" * 8185 - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + value + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + value + b"\r\n\r\n" - messages, upgrade, tail = parser.feed_data(text) + messages, upgrade, tail = response.feed_data(text) msg = messages[0][0] - assert msg.method == "GET" - assert msg.path == "/test" + assert msg.code == 200 + assert msg.reason == "Ok" assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - assert not msg.should_close + # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url == URL("/test") def test_http_request_parser(parser) -> None: @@ -991,6 +1000,30 @@ def test_http_response_parser_utf8_without_reason(response: Any) -> None: assert not tail +def test_http_response_parser_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + messages, upgraded, tail = response.feed_data(text) + assert len(messages) == 1 + msg = messages[0][0] + + assert msg.version == (1, 1) + assert msg.code == 200 + assert msg.reason == "Ok" + assert msg.headers == CIMultiDict([("TEST", "line continue")]) + assert msg.raw_headers == ((b"test", b"line continue"),) + assert not upgraded + assert not tail + + +@pytest.mark.dev_mode +def test_http_response_parser_strict_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + with pytest.raises(http_exceptions.BadHttpMessage): + response.feed_data(text) + + @pytest.mark.parametrize("size", [40962, 8191]) def test_http_response_parser_bad_status_line_too_long(response, size) -> None: reason = b"t" * (size - 2) diff --git a/vendor/llhttp b/vendor/llhttp index 533845688d1..b0b279fb5a6 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 533845688d173561b9cba33269130401add38567 +Subproject commit b0b279fb5a617ab3bc2fc11c5f8bd937aac687c1 From 270ae9cf6a9e6159b5e29a950deb6ff7600aebc5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 21:31:25 +0100 Subject: [PATCH 0155/1511] [PR #8297/d15f07cf backport][3.9] Upgrade to llhttp 9.2.1 (#8292) (#8298) **This is a backport of PR #8297 as merged into 3.10 (d15f07cfbbdecf3de8ca1db10ca5d182ace7b09c).** Fixes #8291. (cherry picked from commit 4d72dca6869072fb073621f8b752225e216a92d9) Co-authored-by: Sam Bull --- CHANGES/8292.feature.rst | 1 + aiohttp/http_parser.py | 8 +++-- tests/test_http_parser.py | 65 +++++++++++++++++++++++++++++---------- vendor/llhttp | 2 +- 4 files changed, 57 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8292.feature.rst diff --git a/CHANGES/8292.feature.rst b/CHANGES/8292.feature.rst new file mode 100644 index 00000000000..6ca82503143 --- /dev/null +++ b/CHANGES/8292.feature.rst @@ -0,0 +1 @@ +Upgraded to LLHTTP 9.2.1, and started rejecting obsolete line folding in Python parser to match -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 8bd8519ff6b..cce0b788d46 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -128,10 +128,12 @@ def __init__( max_line_size: int = 8190, max_headers: int = 32768, max_field_size: int = 8190, + lax: bool = False, ) -> None: self.max_line_size = max_line_size self.max_headers = max_headers self.max_field_size = max_field_size + self._lax = lax def parse_headers( self, lines: List[bytes] @@ -178,7 +180,7 @@ def parse_headers( line = lines[lines_idx] # consume continuation lines - continuation = line and line[0] in (32, 9) # (' ', '\t') + continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding if continuation: @@ -273,7 +275,9 @@ def __init__( self._payload_parser: Optional[HttpPayloadParser] = None self._auto_decompress = auto_decompress self._limit = limit - self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + self._headers_parser = HeadersParser( + max_line_size, max_headers, max_field_size, self.lax + ) @abc.abstractmethod def parse_message(self, lines: List[bytes]) -> _MsgT: diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index a37a08632d7..d76bb64bab5 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -108,8 +108,7 @@ def test_c_parser_loaded(): def test_parse_headers(parser: Any) -> None: text = b"""GET /test HTTP/1.1\r -test: line\r - continue\r +test: a line\r test2: data\r \r """ @@ -117,13 +116,24 @@ def test_parse_headers(parser: Any) -> None: assert len(messages) == 1 msg = messages[0][0] - assert list(msg.headers.items()) == [("test", "line continue"), ("test2", "data")] - assert msg.raw_headers == ((b"test", b"line continue"), (b"test2", b"data")) + assert list(msg.headers.items()) == [("test", "a line"), ("test2", "data")] + assert msg.raw_headers == ((b"test", b"a line"), (b"test2", b"data")) assert not msg.should_close assert msg.compression is None assert not msg.upgrade +def test_reject_obsolete_line_folding(parser: Any) -> None: + text = b"""GET /test HTTP/1.1\r +test: line\r + Content-Length: 48\r +test2: data\r +\r +""" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text) + + @pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") def test_invalid_character(loop: Any, protocol: Any, request: Any) -> None: parser = HttpRequestParserC( @@ -342,8 +352,8 @@ def test_parse_delayed(parser) -> None: def test_headers_multi_feed(parser) -> None: text1 = b"GET /test HTTP/1.1\r\n" - text2 = b"test: line\r" - text3 = b"\n continue\r\n\r\n" + text2 = b"test: line" + text3 = b" continue\r\n\r\n" messages, upgrade, tail = parser.feed_data(text1) assert len(messages) == 0 @@ -705,31 +715,30 @@ def test_max_header_value_size_under_limit(parser) -> None: @pytest.mark.parametrize("size", [40965, 8191]) -def test_max_header_value_size_continuation(parser, size) -> None: +def test_max_header_value_size_continuation(response, size) -> None: name = b"T" * (size - 5) - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + name + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + name + b"\r\n\r\n" match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): - parser.feed_data(text) + response.feed_data(text) -def test_max_header_value_size_continuation_under_limit(parser) -> None: +def test_max_header_value_size_continuation_under_limit(response) -> None: value = b"A" * 8185 - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + value + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + value + b"\r\n\r\n" - messages, upgrade, tail = parser.feed_data(text) + messages, upgrade, tail = response.feed_data(text) msg = messages[0][0] - assert msg.method == "GET" - assert msg.path == "/test" + assert msg.code == 200 + assert msg.reason == "Ok" assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - assert not msg.should_close + # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url == URL("/test") def test_http_request_parser(parser) -> None: @@ -970,6 +979,30 @@ def test_http_response_parser_utf8_without_reason(response: Any) -> None: assert not tail +def test_http_response_parser_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + messages, upgraded, tail = response.feed_data(text) + assert len(messages) == 1 + msg = messages[0][0] + + assert msg.version == (1, 1) + assert msg.code == 200 + assert msg.reason == "Ok" + assert msg.headers == CIMultiDict([("TEST", "line continue")]) + assert msg.raw_headers == ((b"test", b"line continue"),) + assert not upgraded + assert not tail + + +@pytest.mark.dev_mode +def test_http_response_parser_strict_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + with pytest.raises(http_exceptions.BadHttpMessage): + response.feed_data(text) + + @pytest.mark.parametrize("size", [40962, 8191]) def test_http_response_parser_bad_status_line_too_long(response, size) -> None: reason = b"t" * (size - 2) diff --git a/vendor/llhttp b/vendor/llhttp index 533845688d1..b0b279fb5a6 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 533845688d173561b9cba33269130401add38567 +Subproject commit b0b279fb5a617ab3bc2fc11c5f8bd937aac687c1 From 767c413093fff1406d01f914a78c39b0cc7ae622 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 7 Apr 2024 12:50:53 +0100 Subject: [PATCH 0156/1511] Fix handling of multipart/form-data (#8280) (#8301) https://datatracker.ietf.org/doc/html/rfc7578 (cherry picked from commit 7d0be3fee540a3d4161ac7dc76422f1f5ea60104) --- CHANGES/8280.bugfix.rst | 1 + CHANGES/8280.deprecation.rst | 2 + aiohttp/formdata.py | 12 +++- aiohttp/multipart.py | 121 +++++++++++++++++++++----------- tests/test_client_functional.py | 44 +----------- tests/test_multipart.py | 68 ++++++++++++++---- tests/test_web_functional.py | 27 ++----- 7 files changed, 155 insertions(+), 120 deletions(-) create mode 100644 CHANGES/8280.bugfix.rst create mode 100644 CHANGES/8280.deprecation.rst diff --git a/CHANGES/8280.bugfix.rst b/CHANGES/8280.bugfix.rst new file mode 100644 index 00000000000..3aebe36fe9e --- /dev/null +++ b/CHANGES/8280.bugfix.rst @@ -0,0 +1 @@ +Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.deprecation.rst b/CHANGES/8280.deprecation.rst new file mode 100644 index 00000000000..302dbb2fe2a --- /dev/null +++ b/CHANGES/8280.deprecation.rst @@ -0,0 +1,2 @@ +Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() +` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index e7cd24ca9f7..2b75b3de72c 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -1,4 +1,5 @@ import io +import warnings from typing import Any, Iterable, List, Optional from urllib.parse import urlencode @@ -53,7 +54,12 @@ def add_field( if isinstance(value, io.IOBase): self._is_multipart = True elif isinstance(value, (bytes, bytearray, memoryview)): + msg = ( + "In v4, passing bytes will no longer create a file field. " + "Please explicitly use the filename parameter or pass a BytesIO object." + ) if filename is None and content_transfer_encoding is None: + warnings.warn(msg, DeprecationWarning) filename = name type_options: MultiDict[str] = MultiDict({"name": name}) @@ -81,7 +87,11 @@ def add_field( "content_transfer_encoding must be an instance" " of str. Got: %s" % content_transfer_encoding ) - headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + msg = ( + "content_transfer_encoding is deprecated. " + "To maintain compatibility with v4 please pass a BytesPayload." + ) + warnings.warn(msg, DeprecationWarning) self._is_multipart = True self._fields.append((type_options, headers, value)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 4471dd4bb7e..a43ec545713 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -256,13 +256,22 @@ class BodyPartReader: chunk_size = 8192 def __init__( - self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + self, + boundary: bytes, + headers: "CIMultiDictProxy[str]", + content: StreamReader, + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, ) -> None: self.headers = headers self._boundary = boundary self._content = content + self._default_charset = default_charset self._at_eof = False - length = self.headers.get(CONTENT_LENGTH, None) + self._is_form_data = subtype == "form-data" + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) self._length = int(length) if length is not None else None self._read_bytes = 0 self._unread: Deque[bytes] = deque() @@ -329,6 +338,8 @@ async def _read_chunk_from_length(self, size: int) -> bytes: assert self._length is not None, "Content-Length required for chunked read" chunk_size = min(size, self._length - self._read_bytes) chunk = await self._content.read(chunk_size) + if self._content.at_eof(): + self._at_eof = True return chunk async def _read_chunk_from_stream(self, size: int) -> bytes: @@ -449,7 +460,8 @@ def decode(self, data: bytes) -> bytes: """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) - if CONTENT_ENCODING in self.headers: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: return self._decode_content(data) return data @@ -483,7 +495,7 @@ def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, "") mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", default) + return mimetype.parameters.get("charset", self._default_charset or default) @reify def name(self) -> Optional[str]: @@ -538,9 +550,17 @@ class MultipartReader: part_reader_cls = BodyPartReader def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) + assert self._mimetype.type == "multipart", "multipart/* content type expected" + if "boundary" not in self._mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] + ) + self.headers = headers self._boundary = ("--" + self._get_boundary()).encode() self._content = content + self._default_charset: Optional[str] = None self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None self._at_eof = False self._at_bof = True @@ -592,7 +612,24 @@ async def next( await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return None - self._last_part = await self.fetch_next_part() + + part = await self.fetch_next_part() + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 + if ( + self._last_part is None + and self._mimetype.subtype == "form-data" + and isinstance(part, BodyPartReader) + ): + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) + if params.get("name") == "_charset_": + # Longest encoding in https://encoding.spec.whatwg.org/encodings.json + # is 19 characters, so 32 should be more than enough for any valid encoding. + charset = await part.read_chunk(32) + if len(charset) > 31: + raise RuntimeError("Invalid default charset") + self._default_charset = charset.strip().decode() + part = await self.fetch_next_part() + self._last_part = part return self._last_part async def release(self) -> None: @@ -628,19 +665,16 @@ def _get_part_reader( return type(self)(headers, self._content) return self.multipart_reader_cls(headers, self._content) else: - return self.part_reader_cls(self._boundary, headers, self._content) - - def _get_boundary(self) -> str: - mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) - - assert mimetype.type == "multipart", "multipart/* content type expected" - - if "boundary" not in mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + return self.part_reader_cls( + self._boundary, + headers, + self._content, + subtype=self._mimetype.subtype, + default_charset=self._default_charset, ) - boundary = mimetype.parameters["boundary"] + def _get_boundary(self) -> str: + boundary = self._mimetype.parameters["boundary"] if len(boundary) > 70: raise ValueError("boundary %r is too long (70 chars max)" % boundary) @@ -731,6 +765,7 @@ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> No super().__init__(None, content_type=ctype) self._parts: List[_Part] = [] + self._is_form_data = subtype == "form-data" def __enter__(self) -> "MultipartWriter": return self @@ -808,32 +843,36 @@ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Paylo def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" - # compression - encoding: Optional[str] = payload.headers.get( - CONTENT_ENCODING, - "", - ).lower() - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding: Optional[str] = payload.headers.get( - CONTENT_TRANSFER_ENCODING, - "", - ).lower() - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(te_encoding) + encoding: Optional[str] = None + te_encoding: Optional[str] = None + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + assert CONTENT_DISPOSITION in payload.headers + assert "name=" in payload.headers[CONTENT_DISPOSITION] + assert ( + not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} + & payload.headers.keys() ) - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) + else: + # compression + encoding = payload.headers.get(CONTENT_ENCODING, "").lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] return payload diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 4d804a31ddc..4a24196a28e 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -1387,48 +1387,6 @@ async def handler(request): resp.close() -async def test_POST_DATA_with_context_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(text=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field("name", "text", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - -async def test_POST_DATA_with_content_type_context_transfer_encoding(aiohttp_client): - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(body=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field( - "name", "text", content_type="text/plain", content_transfer_encoding="base64" - ) - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - async def test_POST_MultiDict(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -1480,7 +1438,7 @@ async def handler(request): with fname.open("rb") as f: async with client.post( - "/", data={"some": f, "test": b"data"}, chunked=True + "/", data={"some": f, "test": io.BytesIO(b"data")}, chunked=True ) as resp: assert 200 == resp.status diff --git a/tests/test_multipart.py b/tests/test_multipart.py index f9d130e7949..dbfaf74b9b7 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -944,6 +944,58 @@ async def test_reading_skips_prelude(self) -> None: assert first.at_eof() assert not second.at_eof() + async def test_read_form_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"ascii" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b"Content-Type: text/plain;charset=UTF-8\r\n" + b'Content-Disposition: form-data; name="field2"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field3"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + field1 = await reader.next() + assert field1.name == "field1" + assert field1.get_charset("default") == "ascii" + field2 = await reader.next() + assert field2.name == "field2" + assert field2.get_charset("default") == "UTF-8" + field3 = await reader.next() + assert field3.name == "field3" + assert field3.get_charset("default") == "ascii" + + async def test_read_form_invalid_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"this-value-is-too-long-to-be-a-charset" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + with pytest.raises(RuntimeError, match="Invalid default charset"): + await reader.next() + async def test_writer(writer) -> None: assert writer.size == 7 @@ -1280,7 +1332,6 @@ async def test_preserve_content_disposition_header(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1291,9 +1342,7 @@ async def test_preserve_content_disposition_header(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_set_content_disposition_override(self, buf, stream): @@ -1307,7 +1356,6 @@ async def test_set_content_disposition_override(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1318,9 +1366,7 @@ async def test_set_content_disposition_override(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_reset_content_disposition_header(self, buf, stream): @@ -1332,8 +1378,6 @@ async def test_reset_content_disposition_header(self, buf, stream): headers={CONTENT_TYPE: "text/plain"}, ) - content_length = part.size - assert CONTENT_DISPOSITION in part.headers part.set_content_disposition("attachments", filename="bug.py") @@ -1346,9 +1390,7 @@ async def test_reset_content_disposition_header(self, buf, stream): b"--:\r\n" b"Content-Type: text/plain\r\n" b"Content-Disposition:" - b' attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b' attachments; filename="bug.py"' ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 04fc2e35fd1..ee61537068b 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -48,7 +48,8 @@ def fname(here): def new_dummy_form(): form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") return form @@ -447,25 +448,6 @@ async def handler(request): await resp.release() -async def test_POST_DATA_with_content_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert b"123" == data["name"] - return web.Response() - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - - await resp.release() - - async def test_post_form_with_duplicate_keys(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -523,7 +505,8 @@ async def handler(request): return web.Response() form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") app = web.Application() app.router.add_post("/", handler) @@ -727,7 +710,7 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - resp = await client.post("/", data={"file": data}) + resp = await client.post("/", data={"file": io.BytesIO(data)}) assert 200 == resp.status await resp.release() From cebe526b9c34dc3a3da9140409db63014bc4cf19 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 7 Apr 2024 13:19:31 +0100 Subject: [PATCH 0157/1511] Fix handling of multipart/form-data (#8280) (#8302) https://datatracker.ietf.org/doc/html/rfc7578 (cherry picked from commit 7d0be3fee540a3d4161ac7dc76422f1f5ea60104) --- CHANGES/8280.bugfix.rst | 1 + CHANGES/8280.deprecation.rst | 2 + aiohttp/formdata.py | 12 +++- aiohttp/multipart.py | 121 +++++++++++++++++++++----------- tests/test_client_functional.py | 44 +----------- tests/test_multipart.py | 68 ++++++++++++++---- tests/test_web_functional.py | 27 ++----- 7 files changed, 155 insertions(+), 120 deletions(-) create mode 100644 CHANGES/8280.bugfix.rst create mode 100644 CHANGES/8280.deprecation.rst diff --git a/CHANGES/8280.bugfix.rst b/CHANGES/8280.bugfix.rst new file mode 100644 index 00000000000..3aebe36fe9e --- /dev/null +++ b/CHANGES/8280.bugfix.rst @@ -0,0 +1 @@ +Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.deprecation.rst b/CHANGES/8280.deprecation.rst new file mode 100644 index 00000000000..302dbb2fe2a --- /dev/null +++ b/CHANGES/8280.deprecation.rst @@ -0,0 +1,2 @@ +Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() +` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index e7cd24ca9f7..2b75b3de72c 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -1,4 +1,5 @@ import io +import warnings from typing import Any, Iterable, List, Optional from urllib.parse import urlencode @@ -53,7 +54,12 @@ def add_field( if isinstance(value, io.IOBase): self._is_multipart = True elif isinstance(value, (bytes, bytearray, memoryview)): + msg = ( + "In v4, passing bytes will no longer create a file field. " + "Please explicitly use the filename parameter or pass a BytesIO object." + ) if filename is None and content_transfer_encoding is None: + warnings.warn(msg, DeprecationWarning) filename = name type_options: MultiDict[str] = MultiDict({"name": name}) @@ -81,7 +87,11 @@ def add_field( "content_transfer_encoding must be an instance" " of str. Got: %s" % content_transfer_encoding ) - headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + msg = ( + "content_transfer_encoding is deprecated. " + "To maintain compatibility with v4 please pass a BytesPayload." + ) + warnings.warn(msg, DeprecationWarning) self._is_multipart = True self._fields.append((type_options, headers, value)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 4471dd4bb7e..a43ec545713 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -256,13 +256,22 @@ class BodyPartReader: chunk_size = 8192 def __init__( - self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + self, + boundary: bytes, + headers: "CIMultiDictProxy[str]", + content: StreamReader, + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, ) -> None: self.headers = headers self._boundary = boundary self._content = content + self._default_charset = default_charset self._at_eof = False - length = self.headers.get(CONTENT_LENGTH, None) + self._is_form_data = subtype == "form-data" + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) self._length = int(length) if length is not None else None self._read_bytes = 0 self._unread: Deque[bytes] = deque() @@ -329,6 +338,8 @@ async def _read_chunk_from_length(self, size: int) -> bytes: assert self._length is not None, "Content-Length required for chunked read" chunk_size = min(size, self._length - self._read_bytes) chunk = await self._content.read(chunk_size) + if self._content.at_eof(): + self._at_eof = True return chunk async def _read_chunk_from_stream(self, size: int) -> bytes: @@ -449,7 +460,8 @@ def decode(self, data: bytes) -> bytes: """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) - if CONTENT_ENCODING in self.headers: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: return self._decode_content(data) return data @@ -483,7 +495,7 @@ def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, "") mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", default) + return mimetype.parameters.get("charset", self._default_charset or default) @reify def name(self) -> Optional[str]: @@ -538,9 +550,17 @@ class MultipartReader: part_reader_cls = BodyPartReader def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) + assert self._mimetype.type == "multipart", "multipart/* content type expected" + if "boundary" not in self._mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] + ) + self.headers = headers self._boundary = ("--" + self._get_boundary()).encode() self._content = content + self._default_charset: Optional[str] = None self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None self._at_eof = False self._at_bof = True @@ -592,7 +612,24 @@ async def next( await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return None - self._last_part = await self.fetch_next_part() + + part = await self.fetch_next_part() + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 + if ( + self._last_part is None + and self._mimetype.subtype == "form-data" + and isinstance(part, BodyPartReader) + ): + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) + if params.get("name") == "_charset_": + # Longest encoding in https://encoding.spec.whatwg.org/encodings.json + # is 19 characters, so 32 should be more than enough for any valid encoding. + charset = await part.read_chunk(32) + if len(charset) > 31: + raise RuntimeError("Invalid default charset") + self._default_charset = charset.strip().decode() + part = await self.fetch_next_part() + self._last_part = part return self._last_part async def release(self) -> None: @@ -628,19 +665,16 @@ def _get_part_reader( return type(self)(headers, self._content) return self.multipart_reader_cls(headers, self._content) else: - return self.part_reader_cls(self._boundary, headers, self._content) - - def _get_boundary(self) -> str: - mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) - - assert mimetype.type == "multipart", "multipart/* content type expected" - - if "boundary" not in mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + return self.part_reader_cls( + self._boundary, + headers, + self._content, + subtype=self._mimetype.subtype, + default_charset=self._default_charset, ) - boundary = mimetype.parameters["boundary"] + def _get_boundary(self) -> str: + boundary = self._mimetype.parameters["boundary"] if len(boundary) > 70: raise ValueError("boundary %r is too long (70 chars max)" % boundary) @@ -731,6 +765,7 @@ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> No super().__init__(None, content_type=ctype) self._parts: List[_Part] = [] + self._is_form_data = subtype == "form-data" def __enter__(self) -> "MultipartWriter": return self @@ -808,32 +843,36 @@ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Paylo def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" - # compression - encoding: Optional[str] = payload.headers.get( - CONTENT_ENCODING, - "", - ).lower() - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding: Optional[str] = payload.headers.get( - CONTENT_TRANSFER_ENCODING, - "", - ).lower() - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(te_encoding) + encoding: Optional[str] = None + te_encoding: Optional[str] = None + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + assert CONTENT_DISPOSITION in payload.headers + assert "name=" in payload.headers[CONTENT_DISPOSITION] + assert ( + not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} + & payload.headers.keys() ) - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) + else: + # compression + encoding = payload.headers.get(CONTENT_ENCODING, "").lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] return payload diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 8a9a4e184be..dbb2dff5ac4 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -1317,48 +1317,6 @@ async def handler(request): resp.close() -async def test_POST_DATA_with_context_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(text=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field("name", "text", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - -async def test_POST_DATA_with_content_type_context_transfer_encoding(aiohttp_client): - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(body=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field( - "name", "text", content_type="text/plain", content_transfer_encoding="base64" - ) - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - async def test_POST_MultiDict(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -1410,7 +1368,7 @@ async def handler(request): with fname.open("rb") as f: async with client.post( - "/", data={"some": f, "test": b"data"}, chunked=True + "/", data={"some": f, "test": io.BytesIO(b"data")}, chunked=True ) as resp: assert 200 == resp.status diff --git a/tests/test_multipart.py b/tests/test_multipart.py index f9d130e7949..dbfaf74b9b7 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -944,6 +944,58 @@ async def test_reading_skips_prelude(self) -> None: assert first.at_eof() assert not second.at_eof() + async def test_read_form_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"ascii" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b"Content-Type: text/plain;charset=UTF-8\r\n" + b'Content-Disposition: form-data; name="field2"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field3"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + field1 = await reader.next() + assert field1.name == "field1" + assert field1.get_charset("default") == "ascii" + field2 = await reader.next() + assert field2.name == "field2" + assert field2.get_charset("default") == "UTF-8" + field3 = await reader.next() + assert field3.name == "field3" + assert field3.get_charset("default") == "ascii" + + async def test_read_form_invalid_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"this-value-is-too-long-to-be-a-charset" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + with pytest.raises(RuntimeError, match="Invalid default charset"): + await reader.next() + async def test_writer(writer) -> None: assert writer.size == 7 @@ -1280,7 +1332,6 @@ async def test_preserve_content_disposition_header(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1291,9 +1342,7 @@ async def test_preserve_content_disposition_header(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_set_content_disposition_override(self, buf, stream): @@ -1307,7 +1356,6 @@ async def test_set_content_disposition_override(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1318,9 +1366,7 @@ async def test_set_content_disposition_override(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_reset_content_disposition_header(self, buf, stream): @@ -1332,8 +1378,6 @@ async def test_reset_content_disposition_header(self, buf, stream): headers={CONTENT_TYPE: "text/plain"}, ) - content_length = part.size - assert CONTENT_DISPOSITION in part.headers part.set_content_disposition("attachments", filename="bug.py") @@ -1346,9 +1390,7 @@ async def test_reset_content_disposition_header(self, buf, stream): b"--:\r\n" b"Content-Type: text/plain\r\n" b"Content-Disposition:" - b' attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b' attachments; filename="bug.py"' ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 04fc2e35fd1..ee61537068b 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -48,7 +48,8 @@ def fname(here): def new_dummy_form(): form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") return form @@ -447,25 +448,6 @@ async def handler(request): await resp.release() -async def test_POST_DATA_with_content_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert b"123" == data["name"] - return web.Response() - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - - await resp.release() - - async def test_post_form_with_duplicate_keys(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -523,7 +505,8 @@ async def handler(request): return web.Response() form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") app = web.Application() app.router.add_post("/", handler) @@ -727,7 +710,7 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - resp = await client.post("/", data={"file": data}) + resp = await client.post("/", data={"file": io.BytesIO(data)}) assert 200 == resp.status await resp.release() From 410394b2e20a2045f0d4c9dcc08adbb59fff440a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 7 Apr 2024 22:27:45 +0100 Subject: [PATCH 0158/1511] [PR #8304/88c80c14 backport][3.10] Check for backports in CI (#8306) **This is a backport of PR #8304 as merged into master (88c80c146d16d06d78562b803fec7c7b2f849e87).** Co-authored-by: Sam Bull --- .github/workflows/labels.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/labels.yml diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml new file mode 100644 index 00000000000..a4e961e88af --- /dev/null +++ b/.github/workflows/labels.yml @@ -0,0 +1,23 @@ +name: Labels +on: + pull_request: + branches: + - 'master' + types: [labeled, opened, synchronize, reopened, unlabeled] + +jobs: + backport: + runs-on: ubuntu-latest + name: Backport label added + steps: + - uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number + }); + if (!pr.data.labels.find(l => l.name.startsWith("backport"))) + process.exit(1); From 292d961f4ee2829a1b13fad92444a4fd693fbc87 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 7 Apr 2024 22:27:59 +0100 Subject: [PATCH 0159/1511] [PR #8304/88c80c14 backport][3.9] Check for backports in CI (#8305) **This is a backport of PR #8304 as merged into master (88c80c146d16d06d78562b803fec7c7b2f849e87).** Co-authored-by: Sam Bull --- .github/workflows/labels.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/labels.yml diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml new file mode 100644 index 00000000000..a4e961e88af --- /dev/null +++ b/.github/workflows/labels.yml @@ -0,0 +1,23 @@ +name: Labels +on: + pull_request: + branches: + - 'master' + types: [labeled, opened, synchronize, reopened, unlabeled] + +jobs: + backport: + runs-on: ubuntu-latest + name: Backport label added + steps: + - uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number + }); + if (!pr.data.labels.find(l => l.name.startsWith("backport"))) + process.exit(1); From 7853b08e5d71b3b616f6f98b59ca5a5537ead2f9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 00:00:10 +0100 Subject: [PATCH 0160/1511] [PR #8299/28d026eb backport][3.10] Create marker for internal tests (#8308) **This is a backport of PR #8299 as merged into master (28d026eb9d0c93389431f8f142a5f7c4448d13f9).** Co-authored-by: Sam Bull --- CHANGES/8299.packaging.rst | 2 ++ setup.cfg | 1 + tests/test_imports.py | 1 + 3 files changed, 4 insertions(+) create mode 100644 CHANGES/8299.packaging.rst diff --git a/CHANGES/8299.packaging.rst b/CHANGES/8299.packaging.rst new file mode 100644 index 00000000000..05abc8237e2 --- /dev/null +++ b/CHANGES/8299.packaging.rst @@ -0,0 +1,2 @@ +Added an ``internal`` pytest marker for tests which should be skipped +by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. diff --git a/setup.cfg b/setup.cfg index 02a5d54d114..cfd1be5610f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -172,3 +172,4 @@ junit_family=xunit2 xfail_strict = true markers = dev_mode: mark test to run in dev mode. + internal: tests which may cause issues for packagers, but should be run in aiohttp's CI. diff --git a/tests/test_imports.py b/tests/test_imports.py index 7d0869d46c4..7f35f5b8cc2 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -33,6 +33,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: } +@pytest.mark.internal @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", From ec2be0500e2674eea019c0966a7a905e9b3d6608 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 00:00:25 +0100 Subject: [PATCH 0161/1511] [PR #8299/28d026eb backport][3.9] Create marker for internal tests (#8307) **This is a backport of PR #8299 as merged into master (28d026eb9d0c93389431f8f142a5f7c4448d13f9).** Co-authored-by: Sam Bull --- CHANGES/8299.packaging.rst | 2 ++ setup.cfg | 1 + tests/test_imports.py | 1 + 3 files changed, 4 insertions(+) create mode 100644 CHANGES/8299.packaging.rst diff --git a/CHANGES/8299.packaging.rst b/CHANGES/8299.packaging.rst new file mode 100644 index 00000000000..05abc8237e2 --- /dev/null +++ b/CHANGES/8299.packaging.rst @@ -0,0 +1,2 @@ +Added an ``internal`` pytest marker for tests which should be skipped +by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. diff --git a/setup.cfg b/setup.cfg index c514bab9f94..15d22a2f5f7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -171,3 +171,4 @@ junit_family=xunit2 xfail_strict = true markers = dev_mode: mark test to run in dev mode. + internal: tests which may cause issues for packagers, but should be run in aiohttp's CI. diff --git a/tests/test_imports.py b/tests/test_imports.py index 7d0869d46c4..7f35f5b8cc2 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -33,6 +33,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: } +@pytest.mark.internal @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", From 6392df7b4f6b446441ffcdd7629cba18207e9591 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 10:46:05 +0000 Subject: [PATCH 0162/1511] Bump typing-extensions from 4.10.0 to 4.11.0 (#8312) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.10.0 to 4.11.0.
Release notes

Sourced from typing-extensions's releases.

4.11.0

Release 4.11.0 (April 5, 2024)

This feature release provides improvements to various recently added features, most importantly type parameter defaults (PEP 696).

There are no changes since 4.11.0rc1.

Changes since 4.10.0:

  • Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
  • Fix the runtime behavior of type parameters with defaults (PEP 696). Patch by Nadir Chowdhury.
  • Fix minor discrepancy between error messages produced by typing and typing_extensions on Python 3.10. Patch by Jelle Zijlstra.
  • When include_extra=False, get_type_hints() now strips ReadOnly from the annotation.

4.11.0rc1

  • Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
  • Fix the runtime behavior of type parameters with defaults (PEP 696). Patch by Nadir Chowdhury.
  • Fix minor discrepancy between error messages produced by typing and typing_extensions on Python 3.10. Patch by Jelle Zijlstra.
  • When include_extra=False, get_type_hints() now strips ReadOnly from the annotation.
Changelog

Sourced from typing-extensions's changelog.

Release 4.11.0 (April 5, 2024)

This feature release provides improvements to various recently added features, most importantly type parameter defaults (PEP 696).

There are no changes since 4.11.0rc1.

Release 4.11.0rc1 (March 24, 2024)

  • Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
  • Fix the runtime behavior of type parameters with defaults (PEP 696). Patch by Nadir Chowdhury.
  • Fix minor discrepancy between error messages produced by typing and typing_extensions on Python 3.10. Patch by Jelle Zijlstra.
  • When include_extra=False, get_type_hints() now strips ReadOnly from the annotation.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.10.0&new-version=4.11.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- requirements/typing-extensions.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 977f97dd5da..e7ddfd3085d 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f13a0f83ef3..f7e0b7c4798 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -246,7 +246,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/cython.txt b/requirements/cython.txt index 63bcc1ac3b5..72b9a67af98 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.10 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 6ea71482ea4..4e11ab6c31b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -232,7 +232,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/lint.txt b/requirements/lint.txt index 59da6563db3..fcfac455b3e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/test.txt b/requirements/test.txt index 19edd509007..a55d127d077 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -111,7 +111,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # annotated-types diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt index a7d80f5a0fd..289f5da53cb 100644 --- a/requirements/typing-extensions.txt +++ b/requirements/typing-extensions.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in # -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via -r requirements/typing-extensions.in From b2a1f984ef3afaa087406ccaba330389f568e6d2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 19:12:54 +0100 Subject: [PATCH 0163/1511] [PR #8309/c29945a1 backport][3.10] Improve reliability of run_app test (#8316) **This is a backport of PR #8309 as merged into master (c29945a19ef93ac05f7499bfc410e234270ddbb3).** Co-authored-by: Sam Bull --- tests/test_run_app.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 1166421a4eb..5696928b219 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -916,10 +916,16 @@ async def stop(self, request: web.Request) -> web.Response: def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: async def test() -> None: - await asyncio.sleep(1) + await asyncio.sleep(0.5) async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/"): - pass + for _ in range(5): # pragma: no cover + try: + async with sess.get(f"http://localhost:{port}/"): + pass + except ClientConnectorError: + await asyncio.sleep(0.5) + else: + break async with sess.get(f"http://localhost:{port}/stop"): pass From ed43040613988fc4666109aca82a5180ff165df5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 19:13:07 +0100 Subject: [PATCH 0164/1511] [PR #8309/c29945a1 backport][3.9] Improve reliability of run_app test (#8315) **This is a backport of PR #8309 as merged into master (c29945a19ef93ac05f7499bfc410e234270ddbb3).** Co-authored-by: Sam Bull --- tests/test_run_app.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 1166421a4eb..5696928b219 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -916,10 +916,16 @@ async def stop(self, request: web.Request) -> web.Response: def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: async def test() -> None: - await asyncio.sleep(1) + await asyncio.sleep(0.5) async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/"): - pass + for _ in range(5): # pragma: no cover + try: + async with sess.get(f"http://localhost:{port}/"): + pass + except ClientConnectorError: + await asyncio.sleep(0.5) + else: + break async with sess.get(f"http://localhost:{port}/stop"): pass From 7e16dd198ea5042bd127a0d0417c40348ac1d158 Mon Sep 17 00:00:00 2001 From: Alexey Nikitin <30608416+NewGlad@users.noreply.github.com> Date: Tue, 9 Apr 2024 21:50:18 +0800 Subject: [PATCH 0165/1511] Patchback/backports/3.10/5fd29467fb63efdfae1ace280cec36b1f8139567/pr 8290 (#8310) --- CHANGES/8253.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 51 ++++++++++++++++++++---------------- tests/test_client_session.py | 10 +++++++ 4 files changed, 40 insertions(+), 23 deletions(-) create mode 100644 CHANGES/8253.bugfix diff --git a/CHANGES/8253.bugfix b/CHANGES/8253.bugfix new file mode 100644 index 00000000000..91b06d9b35d --- /dev/null +++ b/CHANGES/8253.bugfix @@ -0,0 +1 @@ +Fixed "Unclosed client session" when initialization of ClientSession fails -- by :user:`NewGlad`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 6b53b5ad9c9..4442664118f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -27,6 +27,7 @@ Alexander Shorin Alexander Travov Alexandru Mihai Alexey Firsov +Alexey Nikitin Alexey Popravka Alexey Stepanov Amin Etesamian diff --git a/aiohttp/client.py b/aiohttp/client.py index 8d8d13f25f7..6288fb8f89c 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -252,6 +252,10 @@ def __init__( max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", ) -> None: + # We initialise _connector to None immediately, as it's referenced in __del__() + # and could cause issues if an exception occurs during initialisation. + self._connector: Optional[BaseConnector] = None + if loop is None: if connector is not None: loop = connector._loop @@ -266,29 +270,6 @@ def __init__( self._base_url.origin() == self._base_url ), "Only absolute URLs without path part are supported" - if connector is None: - connector = TCPConnector(loop=loop) - - if connector._loop is not loop: - raise RuntimeError("Session and connector has to use same event loop") - - self._loop = loop - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - if cookie_jar is None: - cookie_jar = CookieJar(loop=loop) - self._cookie_jar = cookie_jar - - if cookies is not None: - self._cookie_jar.update_cookies(cookies) - - self._connector = connector - self._connector_owner = connector_owner - self._default_auth = auth - self._version = version - self._json_serialize = json_serialize if timeout is sentinel or timeout is None: self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: @@ -324,6 +305,30 @@ def __init__( "conflict, please setup " "timeout.connect" ) + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env diff --git a/tests/test_client_session.py b/tests/test_client_session.py index a0654ed8ccd..416b6bbce5d 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -885,3 +885,13 @@ async def test_build_url_returns_expected_url( ) -> None: session = await create_session(base_url) assert session._build_url(url) == expected_url + + +async def test_instantiation_with_invalid_timeout_value(loop): + loop.set_debug(False) + logs = [] + loop.set_exception_handler(lambda loop, ctx: logs.append(ctx)) + with pytest.raises(ValueError, match="timeout parameter cannot be .*"): + ClientSession(timeout=1) + # should not have "Unclosed client session" warning + assert not logs From aa8948fb1bba0b517eefd72a509de2f572d641d4 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Thu, 11 Apr 2024 15:53:10 +0100 Subject: [PATCH 0166/1511] Escape filenames and paths in HTML when generating index pages (#8317) (#8318) Co-authored-by: J. Nick Koston (cherry picked from commit ffbc43233209df302863712b511a11bdb6001b0f) --- CHANGES/8317.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 12 ++-- tests/test_web_urldispatcher.py | 124 ++++++++++++++++++++++++++++---- 3 files changed, 118 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8317.bugfix.rst diff --git a/CHANGES/8317.bugfix.rst b/CHANGES/8317.bugfix.rst new file mode 100644 index 00000000000..b24ef2aeb81 --- /dev/null +++ b/CHANGES/8317.bugfix.rst @@ -0,0 +1 @@ +Escaped filenames in static view -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index cb433e6c857..d8f148f7618 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1,7 +1,9 @@ import abc import asyncio import base64 +import functools import hashlib +import html import inspect import keyword import os @@ -90,6 +92,8 @@ _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] +html_escape = functools.partial(html.escape, quote=True) + class _InfoDict(TypedDict, total=False): path: str @@ -708,7 +712,7 @@ def _directory_as_html(self, filepath: Path) -> str: assert filepath.is_dir() relative_path_to_dir = filepath.relative_to(self._directory).as_posix() - index_of = f"Index of /{relative_path_to_dir}" + index_of = f"Index of /{html_escape(relative_path_to_dir)}" h1 = f"

{index_of}

" index_list = [] @@ -716,7 +720,7 @@ def _directory_as_html(self, filepath: Path) -> str: for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() - file_url = self._prefix + "/" + rel_path + quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") # if file is a directory, add '/' to the end of the name if _file.is_dir(): @@ -725,9 +729,7 @@ def _directory_as_html(self, filepath: Path) -> str: file_name = _file.name index_list.append( - '
  • {name}
  • '.format( - url=file_url, name=file_name - ) + f'
  • {html_escape(file_name)}
  • ' ) ul = "
      \n{}\n
    ".format("\n".join(index_list)) body = f"\n{h1}\n{ul}\n" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 7e8fe53165d..04f2029ebaf 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,6 +1,7 @@ import asyncio import functools import pathlib +import sys from typing import Optional from unittest import mock from unittest.mock import MagicMock @@ -14,31 +15,38 @@ @pytest.mark.parametrize( - "show_index,status,prefix,data", + "show_index,status,prefix,request_path,data", [ - pytest.param(False, 403, "/", None, id="index_forbidden"), + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), pytest.param( True, 200, "/", - b"\n\nIndex of /.\n" - b"\n\n

    Index of /.

    \n\n\n", - id="index_root", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", ), pytest.param( True, 200, "/static", - b"\n\nIndex of /.\n" - b"\n\n

    Index of /.

    \n\n\n", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n', id="index_static", ), + pytest.param( + True, + 200, + "/static", + "/static/my_dir", + b"\n\nIndex of /my_dir\n\n\n

    " + b'Index of /my_dir

    \n\n\n", + id="index_subdir", + ), ], ) async def test_access_root_of_static_handler( @@ -47,6 +55,7 @@ async def test_access_root_of_static_handler( show_index: bool, status: int, prefix: str, + request_path: str, data: Optional[bytes], ) -> None: # Tests the operation of static file server. @@ -72,7 +81,94 @@ async def test_access_root_of_static_handler( client = await aiohttp_client(app) # Request the root of the static directory. - async with await client.get(prefix) as r: + async with await client.get(request_path) as r: + assert r.status == status + + if data: + assert r.headers["Content-Type"] == "text/html; charset=utf-8" + read_ = await r.read() + assert read_ == data + + +@pytest.mark.internal # Dependent on filesystem +@pytest.mark.skipif( + not sys.platform.startswith("linux"), + reason="Invalid filenames on some filesystems (like Windows)", +) +@pytest.mark.parametrize( + "show_index,status,prefix,request_path,data", + [ + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), + pytest.param( + True, + 200, + "/", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + ), + pytest.param( + True, + 200, + "/static", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + id="index_static", + ), + pytest.param( + True, + 200, + "/static", + "/static/.dir", + b"\n\nIndex of /<img src=0 onerror=alert(1)>.dir</t" + b"itle>\n</head>\n<body>\n<h1>Index of /<img src=0 onerror=alert(1)>.di" + b'r</h1>\n<ul>\n<li><a href="/static/%3Cimg%20src=0%20onerror=alert(1)%3E.di' + b'r/my_file_in_dir">my_file_in_dir</a></li>\n</ul>\n</body>\n</html>', + id="index_subdir", + ), + ], +) +async def test_access_root_of_static_handler_xss( + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, + show_index: bool, + status: int, + prefix: str, + request_path: str, + data: Optional[bytes], +) -> None: + # Tests the operation of static file server. + # Try to access the root of static file server, and make + # sure that correct HTTP statuses are returned depending if we directory + # index should be shown or not. + # Ensure that html in file names is escaped. + # Ensure that links are url quoted. + my_file = tmp_path / "<img src=0 onerror=alert(1)>.txt" + my_dir = tmp_path / "<img src=0 onerror=alert(1)>.dir" + my_dir.mkdir() + my_file_in_dir = my_dir / "my_file_in_dir" + + with my_file.open("w") as fw: + fw.write("hello") + + with my_file_in_dir.open("w") as fw: + fw.write("world") + + app = web.Application() + + # Register global static route: + app.router.add_static(prefix, str(tmp_path), show_index=show_index) + client = await aiohttp_client(app) + + # Request the root of the static directory. + async with await client.get(request_path) as r: assert r.status == status if data: From 28335525d1eac015a7e7584137678cbb6ff19397 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 11 Apr 2024 15:54:45 +0100 Subject: [PATCH 0167/1511] Escape filenames and paths in HTML when generating index pages (#8317) (#8319) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit ffbc43233209df302863712b511a11bdb6001b0f) --- CHANGES/8317.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 12 ++-- tests/test_web_urldispatcher.py | 124 ++++++++++++++++++++++++++++---- 3 files changed, 118 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8317.bugfix.rst diff --git a/CHANGES/8317.bugfix.rst b/CHANGES/8317.bugfix.rst new file mode 100644 index 00000000000..b24ef2aeb81 --- /dev/null +++ b/CHANGES/8317.bugfix.rst @@ -0,0 +1 @@ +Escaped filenames in static view -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 99696533444..954291f6449 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1,7 +1,9 @@ import abc import asyncio import base64 +import functools import hashlib +import html import inspect import keyword import os @@ -90,6 +92,8 @@ _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] +html_escape = functools.partial(html.escape, quote=True) + class _InfoDict(TypedDict, total=False): path: str @@ -708,7 +712,7 @@ def _directory_as_html(self, filepath: Path) -> str: assert filepath.is_dir() relative_path_to_dir = filepath.relative_to(self._directory).as_posix() - index_of = f"Index of /{relative_path_to_dir}" + index_of = f"Index of /{html_escape(relative_path_to_dir)}" h1 = f"<h1>{index_of}</h1>" index_list = [] @@ -716,7 +720,7 @@ def _directory_as_html(self, filepath: Path) -> str: for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() - file_url = self._prefix + "/" + rel_path + quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") # if file is a directory, add '/' to the end of the name if _file.is_dir(): @@ -725,9 +729,7 @@ def _directory_as_html(self, filepath: Path) -> str: file_name = _file.name index_list.append( - '<li><a href="{url}">{name}</a></li>'.format( - url=file_url, name=file_name - ) + f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>' ) ul = "<ul>\n{}\n</ul>".format("\n".join(index_list)) body = f"<body>\n{h1}\n{ul}\n</body>" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 76e533e473a..0441890c10b 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,6 +1,7 @@ import asyncio import functools import pathlib +import sys from typing import Optional from unittest import mock from unittest.mock import MagicMock @@ -14,31 +15,38 @@ @pytest.mark.parametrize( - "show_index,status,prefix,data", + "show_index,status,prefix,request_path,data", [ - pytest.param(False, 403, "/", None, id="index_forbidden"), + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), pytest.param( True, 200, "/", - b"<html>\n<head>\n<title>Index of /.\n" - b"\n\n

    Index of /.

    \n\n\n", - id="index_root", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", ), pytest.param( True, 200, "/static", - b"\n\nIndex of /.\n" - b"\n\n

    Index of /.

    \n\n\n", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n', id="index_static", ), + pytest.param( + True, + 200, + "/static", + "/static/my_dir", + b"\n\nIndex of /my_dir\n\n\n

    " + b'Index of /my_dir

    \n\n\n", + id="index_subdir", + ), ], ) async def test_access_root_of_static_handler( @@ -47,6 +55,7 @@ async def test_access_root_of_static_handler( show_index: bool, status: int, prefix: str, + request_path: str, data: Optional[bytes], ) -> None: # Tests the operation of static file server. @@ -72,7 +81,94 @@ async def test_access_root_of_static_handler( client = await aiohttp_client(app) # Request the root of the static directory. - async with await client.get(prefix) as r: + async with await client.get(request_path) as r: + assert r.status == status + + if data: + assert r.headers["Content-Type"] == "text/html; charset=utf-8" + read_ = await r.read() + assert read_ == data + + +@pytest.mark.internal # Dependent on filesystem +@pytest.mark.skipif( + not sys.platform.startswith("linux"), + reason="Invalid filenames on some filesystems (like Windows)", +) +@pytest.mark.parametrize( + "show_index,status,prefix,request_path,data", + [ + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), + pytest.param( + True, + 200, + "/", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + ), + pytest.param( + True, + 200, + "/static", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + id="index_static", + ), + pytest.param( + True, + 200, + "/static", + "/static/.dir", + b"\n\nIndex of /<img src=0 onerror=alert(1)>.dir</t" + b"itle>\n</head>\n<body>\n<h1>Index of /<img src=0 onerror=alert(1)>.di" + b'r</h1>\n<ul>\n<li><a href="/static/%3Cimg%20src=0%20onerror=alert(1)%3E.di' + b'r/my_file_in_dir">my_file_in_dir</a></li>\n</ul>\n</body>\n</html>', + id="index_subdir", + ), + ], +) +async def test_access_root_of_static_handler_xss( + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, + show_index: bool, + status: int, + prefix: str, + request_path: str, + data: Optional[bytes], +) -> None: + # Tests the operation of static file server. + # Try to access the root of static file server, and make + # sure that correct HTTP statuses are returned depending if we directory + # index should be shown or not. + # Ensure that html in file names is escaped. + # Ensure that links are url quoted. + my_file = tmp_path / "<img src=0 onerror=alert(1)>.txt" + my_dir = tmp_path / "<img src=0 onerror=alert(1)>.dir" + my_dir.mkdir() + my_file_in_dir = my_dir / "my_file_in_dir" + + with my_file.open("w") as fw: + fw.write("hello") + + with my_file_in_dir.open("w") as fw: + fw.write("world") + + app = web.Application() + + # Register global static route: + app.router.add_static(prefix, str(tmp_path), show_index=show_index) + client = await aiohttp_client(app) + + # Request the root of the static directory. + async with await client.get(request_path) as r: assert r.status == status if data: From a7e240a9f625a0b9559bdf5f0049c71565352400 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 17:47:50 +0100 Subject: [PATCH 0168/1511] [PR #8320/9ba9a4e5 backport][3.9] Fix Python parser to mark responses without length as closing (#8321) **This is a backport of PR #8320 as merged into master (9ba9a4e531599b9cb2f8cc80effbde40c7eab0bd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8320.bugfix.rst | 1 + aiohttp/http_parser.py | 11 ++++++++++- tests/test_http_parser.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8320.bugfix.rst diff --git a/CHANGES/8320.bugfix.rst b/CHANGES/8320.bugfix.rst new file mode 100644 index 00000000000..027074f743b --- /dev/null +++ b/CHANGES/8320.bugfix.rst @@ -0,0 +1 @@ +Fixed the pure python parser to mark a connection as closing when a response has no length -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index cce0b788d46..013511917e8 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -718,7 +718,16 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: ) = self.parse_headers(lines) if close is None: - close = version_o <= HttpVersion10 + if version_o <= HttpVersion10: + close = True + # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length + elif 100 <= status_i < 200 or status_i in {204, 304}: + close = False + elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: + close = False + else: + # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 + close = True return RawResponseMessage( version_o, diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index d76bb64bab5..ee7dc4aabc5 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -735,7 +735,7 @@ def test_max_header_value_size_continuation_under_limit(response) -> None: assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 + assert msg.should_close assert msg.compression is None assert not msg.upgrade assert not msg.chunked From 68f1e414133066c4f620201a09f71b20cda8bb29 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 18:13:17 +0100 Subject: [PATCH 0169/1511] [PR #8320/9ba9a4e5 backport][3.10] Fix Python parser to mark responses without length as closing (#8322) **This is a backport of PR #8320 as merged into master (9ba9a4e531599b9cb2f8cc80effbde40c7eab0bd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8320.bugfix.rst | 1 + aiohttp/http_parser.py | 11 ++++++++++- tests/test_http_parser.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8320.bugfix.rst diff --git a/CHANGES/8320.bugfix.rst b/CHANGES/8320.bugfix.rst new file mode 100644 index 00000000000..027074f743b --- /dev/null +++ b/CHANGES/8320.bugfix.rst @@ -0,0 +1 @@ +Fixed the pure python parser to mark a connection as closing when a response has no length -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index cce0b788d46..013511917e8 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -718,7 +718,16 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: ) = self.parse_headers(lines) if close is None: - close = version_o <= HttpVersion10 + if version_o <= HttpVersion10: + close = True + # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length + elif 100 <= status_i < 200 or status_i in {204, 304}: + close = False + elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: + close = False + else: + # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 + close = True return RawResponseMessage( version_o, diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 04b254c0ae8..32dd0e68b57 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -743,7 +743,7 @@ def test_max_header_value_size_continuation_under_limit(response) -> None: assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 + assert msg.should_close assert msg.compression is None assert not msg.upgrade assert not msg.chunked From b3397c7ac44fc80206d28f1dd0d1f3b10c4ec572 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 11 Apr 2024 18:54:11 +0100 Subject: [PATCH 0170/1511] Release v3.9.4 (#8201) Preparing for tomorrow, let me know if there's any reason to delay. @bdraco @webknjaz --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES.rst | 213 +++++++++++++++++++++++++++++++++++ CHANGES/7741.bugfix.rst | 3 - CHANGES/8089.bugfix.rst | 3 - CHANGES/8099.contrib.rst | 4 - CHANGES/8104.bugfix.rst | 1 - CHANGES/8116.contrib.rst | 1 - CHANGES/8136.contrib.rst | 7 -- CHANGES/8139.contrib.rst | 1 - CHANGES/8146.feature.rst | 1 - CHANGES/8163.bugfix.rst | 5 - CHANGES/8197.doc | 1 - CHANGES/8200.bugfix.rst | 6 - CHANGES/8251.bugfix.rst | 4 - CHANGES/8252.bugfix.rst | 2 - CHANGES/8267.doc.rst | 1 - CHANGES/8271.bugfix.rst | 1 - CHANGES/8280.bugfix.rst | 1 - CHANGES/8280.deprecation.rst | 2 - CHANGES/8283.bugfix.rst | 2 - CHANGES/8292.feature.rst | 1 - CHANGES/8299.packaging.rst | 2 - CHANGES/8317.bugfix.rst | 1 - CHANGES/8320.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 24 files changed, 214 insertions(+), 52 deletions(-) delete mode 100644 CHANGES/7741.bugfix.rst delete mode 100644 CHANGES/8089.bugfix.rst delete mode 100644 CHANGES/8099.contrib.rst delete mode 100644 CHANGES/8104.bugfix.rst delete mode 100644 CHANGES/8116.contrib.rst delete mode 100644 CHANGES/8136.contrib.rst delete mode 100644 CHANGES/8139.contrib.rst delete mode 100644 CHANGES/8146.feature.rst delete mode 100644 CHANGES/8163.bugfix.rst delete mode 100644 CHANGES/8197.doc delete mode 100644 CHANGES/8200.bugfix.rst delete mode 100644 CHANGES/8251.bugfix.rst delete mode 100644 CHANGES/8252.bugfix.rst delete mode 100644 CHANGES/8267.doc.rst delete mode 100644 CHANGES/8271.bugfix.rst delete mode 100644 CHANGES/8280.bugfix.rst delete mode 100644 CHANGES/8280.deprecation.rst delete mode 100644 CHANGES/8283.bugfix.rst delete mode 100644 CHANGES/8292.feature.rst delete mode 100644 CHANGES/8299.packaging.rst delete mode 100644 CHANGES/8317.bugfix.rst delete mode 100644 CHANGES/8320.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 64dff9b516d..72f63329af2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,219 @@ .. towncrier release notes start + +3.9.4 (2024-04-11) +================== + +Bug fixes +--------- + +- The asynchronous internals now set the underlying causes + when assigning exceptions to the future objects + -- by :user:`webknjaz`. + + + *Related issues and pull requests on GitHub:* + :issue:`8089`. + + + +- Treated values of ``Accept-Encoding`` header as case-insensitive when checking + for gzip files -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8104`. + + + +- Improved the DNS resolution performance on cache hit -- by :user:`bdraco`. + + This is achieved by avoiding an :mod:`asyncio` task creation in this case. + + + *Related issues and pull requests on GitHub:* + :issue:`8163`. + + +- Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, + :meth:`aiohttp.MultipartWriter.append_json` and + :meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` + + + *Related issues and pull requests on GitHub:* + :issue:`7741`. + + + +- Ensure websocket transport is closed when client does not close it + -- by :user:`bdraco`. + + The transport could remain open if the client did not close it. This + change ensures the transport is closed when the client does not close + it. + + + *Related issues and pull requests on GitHub:* + :issue:`8200`. + + + +- Leave websocket transport open if receive times out or is cancelled + -- by :user:`bdraco`. + + This restores the behavior prior to the change in #7978. + + + *Related issues and pull requests on GitHub:* + :issue:`8251`. + + + +- Fixed content not being read when an upgrade request was not supported with the pure Python implementation. + -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8252`. + + + +- Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8271`. + + + +- Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8280`. + + + +- Fixed blocking I/O in the event loop while processing files in a POST request + -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8283`. + + + +- Escaped filenames in static view -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8317`. + + + +- Fixed the pure python parser to mark a connection as closing when a + response has no length -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8320`. + + + + +Features +-------- + +- Upgraded *llhttp* to 9.2.1, and started rejecting obsolete line folding + in Python parser to match -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8146`, :issue:`8292`. + + + + +Deprecations (removal in next major release) +-------------------------------------------- + +- Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() + <aiohttp.FormData.add_field>` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8280`. + + + + +Improved documentation +---------------------- + +- Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8267`. + + + + +Contributor-facing changes +-------------------------- + +- The pull request template is now asking the contributors to + answer a question about the long-term maintenance challenges + they envision as a result of merging their patches + -- by :user:`webknjaz`. + + + *Related issues and pull requests on GitHub:* + :issue:`8099`. + + + +- Updated CI and documentation to use NPM clean install and upgrade + node to version 18 -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8116`. + + + +- A pytest fixture ``hello_txt`` was introduced to aid + static file serving tests in + :file:`test_web_sendfile_functional.py`. It dynamically + provisions ``hello.txt`` file variants shared across the + tests in the module. + + -- by :user:`steverep` + + + *Related issues and pull requests on GitHub:* + :issue:`8136`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Added an ``internal`` pytest marker for tests which should be skipped + by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8299`. + + + + +---- + + 3.9.3 (2024-01-29) ================== diff --git a/CHANGES/7741.bugfix.rst b/CHANGES/7741.bugfix.rst deleted file mode 100644 index 9134e920c14..00000000000 --- a/CHANGES/7741.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, -:meth:`aiohttp.MultipartWriter.append_json` and -:meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` diff --git a/CHANGES/8089.bugfix.rst b/CHANGES/8089.bugfix.rst deleted file mode 100644 index 7f47448478d..00000000000 --- a/CHANGES/8089.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -The asynchronous internals now set the underlying causes -when assigning exceptions to the future objects --- by :user:`webknjaz`. diff --git a/CHANGES/8099.contrib.rst b/CHANGES/8099.contrib.rst deleted file mode 100644 index 827ecfa5827..00000000000 --- a/CHANGES/8099.contrib.rst +++ /dev/null @@ -1,4 +0,0 @@ -The pull request template is now asking the contributors to -answer a question about the long-term maintenance challenges -they envision as a result of merging their patches --- by :user:`webknjaz`. diff --git a/CHANGES/8104.bugfix.rst b/CHANGES/8104.bugfix.rst deleted file mode 100644 index 1ebe6f06d9d..00000000000 --- a/CHANGES/8104.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Treated values of ``Accept-Encoding`` header as case-insensitive when checking for gzip files -- by :user:`steverep`. diff --git a/CHANGES/8116.contrib.rst b/CHANGES/8116.contrib.rst deleted file mode 100644 index c1c7f2ca3bf..00000000000 --- a/CHANGES/8116.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Updated CI and documentation to use NPM clean install and upgrade node to version 18 -- by :user:`steverep`. diff --git a/CHANGES/8136.contrib.rst b/CHANGES/8136.contrib.rst deleted file mode 100644 index 69718a4e0ab..00000000000 --- a/CHANGES/8136.contrib.rst +++ /dev/null @@ -1,7 +0,0 @@ -A pytest fixture ``hello_txt`` was introduced to aid -static file serving tests in -:file:`test_web_sendfile_functional.py`. It dynamically -provisions ``hello.txt`` file variants shared across the -tests in the module. - --- by :user:`steverep` diff --git a/CHANGES/8139.contrib.rst b/CHANGES/8139.contrib.rst deleted file mode 100644 index fd743e70f4a..00000000000 --- a/CHANGES/8139.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Two definitions for "test_invalid_route_name" existed, only one was being run. Refactored them into a single parameterized test. Enabled lint rule to prevent regression. -- by :user:`alexmac`. diff --git a/CHANGES/8146.feature.rst b/CHANGES/8146.feature.rst deleted file mode 100644 index 9b0cc54206e..00000000000 --- a/CHANGES/8146.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Upgraded *llhttp* to 9.2 -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8163.bugfix.rst b/CHANGES/8163.bugfix.rst deleted file mode 100644 index 8bfb10260c6..00000000000 --- a/CHANGES/8163.bugfix.rst +++ /dev/null @@ -1,5 +0,0 @@ -Improved the DNS resolution performance on cache hit --- by :user:`bdraco`. - -This is achieved by avoiding an :mod:`asyncio` task creation -in this case. diff --git a/CHANGES/8197.doc b/CHANGES/8197.doc deleted file mode 100644 index ba4117768e8..00000000000 --- a/CHANGES/8197.doc +++ /dev/null @@ -1 +0,0 @@ -Fixed false behavior of base_url param for ClientSession in client documentation -- by :user:`alexis974`. diff --git a/CHANGES/8200.bugfix.rst b/CHANGES/8200.bugfix.rst deleted file mode 100644 index e4492a8a84c..00000000000 --- a/CHANGES/8200.bugfix.rst +++ /dev/null @@ -1,6 +0,0 @@ -Ensure websocket transport is closed when client does not close it --- by :user:`bdraco`. - -The transport could remain open if the client did not close it. This -change ensures the transport is closed when the client does not close -it. diff --git a/CHANGES/8251.bugfix.rst b/CHANGES/8251.bugfix.rst deleted file mode 100644 index 6fc6507cfe2..00000000000 --- a/CHANGES/8251.bugfix.rst +++ /dev/null @@ -1,4 +0,0 @@ -Leave websocket transport open if receive times out or is cancelled --- by :user:`bdraco`. - -This restores the behavior prior to the change in #7978. diff --git a/CHANGES/8252.bugfix.rst b/CHANGES/8252.bugfix.rst deleted file mode 100644 index e932eb9c7ed..00000000000 --- a/CHANGES/8252.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed content not being read when an upgrade request was not supported with the pure Python implementation. --- by :user:`bdraco`. diff --git a/CHANGES/8267.doc.rst b/CHANGES/8267.doc.rst deleted file mode 100644 index 69f11d37560..00000000000 --- a/CHANGES/8267.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8271.bugfix.rst b/CHANGES/8271.bugfix.rst deleted file mode 100644 index 9d572ba2fe6..00000000000 --- a/CHANGES/8271.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.bugfix.rst b/CHANGES/8280.bugfix.rst deleted file mode 100644 index 3aebe36fe9e..00000000000 --- a/CHANGES/8280.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.deprecation.rst b/CHANGES/8280.deprecation.rst deleted file mode 100644 index 302dbb2fe2a..00000000000 --- a/CHANGES/8280.deprecation.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() -<aiohttp.FormData.add_field>` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8283.bugfix.rst b/CHANGES/8283.bugfix.rst deleted file mode 100644 index d456d59ba8e..00000000000 --- a/CHANGES/8283.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed blocking I/O in the event loop while processing files in a POST request --- by :user:`bdraco`. diff --git a/CHANGES/8292.feature.rst b/CHANGES/8292.feature.rst deleted file mode 100644 index 6ca82503143..00000000000 --- a/CHANGES/8292.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Upgraded to LLHTTP 9.2.1, and started rejecting obsolete line folding in Python parser to match -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8299.packaging.rst b/CHANGES/8299.packaging.rst deleted file mode 100644 index 05abc8237e2..00000000000 --- a/CHANGES/8299.packaging.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added an ``internal`` pytest marker for tests which should be skipped -by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8317.bugfix.rst b/CHANGES/8317.bugfix.rst deleted file mode 100644 index b24ef2aeb81..00000000000 --- a/CHANGES/8317.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Escaped filenames in static view -- by :user:`bdraco`. diff --git a/CHANGES/8320.bugfix.rst b/CHANGES/8320.bugfix.rst deleted file mode 100644 index 027074f743b..00000000000 --- a/CHANGES/8320.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the pure python parser to mark a connection as closing when a response has no length -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6f6ab8e6b99..46db236d00a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.3.dev0" +__version__ = "3.9.4" from typing import TYPE_CHECKING, Tuple From 01df7ec9302bf3ec8cff4addfe15f1f4bbd3199b Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 11 Apr 2024 23:19:15 +0100 Subject: [PATCH 0171/1511] Bump version --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 46db236d00a..c4af7b6f4dd 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.4" +__version__ = "3.9.4.dev0" from typing import TYPE_CHECKING, Tuple From 187f3a5f4f6ec396e3f25d4c680c75663c6fcfe5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 13:52:08 +0100 Subject: [PATCH 0172/1511] [PR #8324/4a8fd08b backport][3.10] Add missing changelogs (#8331) **This is a backport of PR #8324 as merged into master (4a8fd08b617d63b6b3a73b3d381e6e33f2c78296).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES.rst | 265 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 265 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 7074aaa6966..523b4a84787 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1926,6 +1926,271 @@ Misc +---- + + +3.4.4 (2018-09-05) +================== + +- Fix installation from sources when compiling toolkit is not available (`#3241 <https://github.com/aio-libs/aiohttp/pull/3241>`_) + + + + +---- + + +3.4.3 (2018-09-04) +================== + +- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_) + + + + +---- + + +3.4.2 (2018-09-01) +================== + +- Fix ``iter_chunks`` type annotation (`#3230 <https://github.com/aio-libs/aiohttp/pull/3230>`_) + + + + +---- + + +3.4.1 (2018-08-28) +================== + +- Fix empty header parsing regression. (`#3218 <https://github.com/aio-libs/aiohttp/pull/3218>`_) +- Fix BaseRequest.raw_headers doc. (`#3215 <https://github.com/aio-libs/aiohttp/pull/3215>`_) +- Fix documentation building on ReadTheDocs (`#3221 <https://github.com/aio-libs/aiohttp/pull/3221>`_) + + + + +---- + + +3.4.0 (2018-08-25) +================== + +Features +-------- + +- Add type hints (`#3049 <https://github.com/aio-libs/aiohttp/pull/3049>`_) +- Add ``raise_for_status`` request parameter (`#3073 <https://github.com/aio-libs/aiohttp/pull/3073>`_) +- Add type hints to HTTP client (`#3092 <https://github.com/aio-libs/aiohttp/pull/3092>`_) +- Minor server optimizations (`#3095 <https://github.com/aio-libs/aiohttp/pull/3095>`_) +- Preserve the cause when `HTTPException` is raised from another exception. (`#3096 <https://github.com/aio-libs/aiohttp/pull/3096>`_) +- Add `close_boundary` option in `MultipartWriter.write` method. Support streaming (`#3104 <https://github.com/aio-libs/aiohttp/pull/3104>`_) +- Added a ``remove_slash`` option to the ``normalize_path_middleware`` factory. (`#3173 <https://github.com/aio-libs/aiohttp/pull/3173>`_) +- The class `AbstractRouteDef` is importable from `aiohttp.web`. (`#3183 <https://github.com/aio-libs/aiohttp/pull/3183>`_) + + +Bugfixes +-------- + +- Prevent double closing when client connection is released before the + last ``data_received()`` callback. (`#3031 <https://github.com/aio-libs/aiohttp/pull/3031>`_) +- Make redirect with `normalize_path_middleware` work when using url encoded paths. (`#3051 <https://github.com/aio-libs/aiohttp/pull/3051>`_) +- Postpone web task creation to connection establishment. (`#3052 <https://github.com/aio-libs/aiohttp/pull/3052>`_) +- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_) +- When using a server-request body as the `data=` argument of a client request, iterate over the content with `readany` instead of `readline` to avoid `Line too long` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_) +- fix `UrlDispatcher` has no attribute `add_options`, add `web.options` (`#3062 <https://github.com/aio-libs/aiohttp/pull/3062>`_) +- correct filename in content-disposition with multipart body (`#3064 <https://github.com/aio-libs/aiohttp/pull/3064>`_) +- Many HTTP proxies has buggy keepalive support. + Let's not reuse connection but close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_) +- raise 413 "Payload Too Large" rather than raising ValueError in request.post() + Add helpful debug message to 413 responses (`#3087 <https://github.com/aio-libs/aiohttp/pull/3087>`_) +- Fix `StreamResponse` equality, now that they are `MutableMapping` objects. (`#3100 <https://github.com/aio-libs/aiohttp/pull/3100>`_) +- Fix server request objects comparison (`#3116 <https://github.com/aio-libs/aiohttp/pull/3116>`_) +- Do not hang on `206 Partial Content` response with `Content-Encoding: gzip` (`#3123 <https://github.com/aio-libs/aiohttp/pull/3123>`_) +- Fix timeout precondition checkers (`#3145 <https://github.com/aio-libs/aiohttp/pull/3145>`_) + + +Improved Documentation +---------------------- + +- Add a new FAQ entry that clarifies that you should not reuse response + objects in middleware functions. (`#3020 <https://github.com/aio-libs/aiohttp/pull/3020>`_) +- Add FAQ section "Why is creating a ClientSession outside of an event loop dangerous?" (`#3072 <https://github.com/aio-libs/aiohttp/pull/3072>`_) +- Fix link to Rambler (`#3115 <https://github.com/aio-libs/aiohttp/pull/3115>`_) +- Fix TCPSite documentation on the Server Reference page. (`#3146 <https://github.com/aio-libs/aiohttp/pull/3146>`_) +- Fix documentation build configuration file for Windows. (`#3147 <https://github.com/aio-libs/aiohttp/pull/3147>`_) +- Remove no longer existing lingering_timeout parameter of Application.make_handler from documentation. (`#3151 <https://github.com/aio-libs/aiohttp/pull/3151>`_) +- Mention that ``app.make_handler`` is deprecated, recommend to use runners + API instead. (`#3157 <https://github.com/aio-libs/aiohttp/pull/3157>`_) + + +Deprecations and Removals +------------------------- + +- Drop ``loop.current_task()`` from ``helpers.current_task()`` (`#2826 <https://github.com/aio-libs/aiohttp/pull/2826>`_) +- Drop ``reader`` parameter from ``request.multipart()``. (`#3090 <https://github.com/aio-libs/aiohttp/pull/3090>`_) + + + + +---- + + +3.3.2 (2018-06-12) +================== + +- Many HTTP proxies has buggy keepalive support. Let's not reuse connection but + close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_) + +- Provide vendor source files in tarball (`#3076 <https://github.com/aio-libs/aiohttp/pull/3076>`_) + + + + +---- + + +3.3.1 (2018-06-05) +================== + +- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_) +- When using a server-request body as the ``data=`` argument of a client request, + iterate over the content with ``readany`` instead of ``readline`` to avoid ``Line + too long`` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_) + + + + +---- + + +3.3.0 (2018-06-01) +================== + +Features +-------- + +- Raise ``ConnectionResetError`` instead of ``CancelledError`` on trying to + write to a closed stream. (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_) +- Implement ``ClientTimeout`` class and support socket read timeout. (`#2768 <https://github.com/aio-libs/aiohttp/pull/2768>`_) +- Enable logging when ``aiohttp.web`` is used as a program (`#2956 <https://github.com/aio-libs/aiohttp/pull/2956>`_) +- Add canonical property to resources (`#2968 <https://github.com/aio-libs/aiohttp/pull/2968>`_) +- Forbid reading response BODY after release (`#2983 <https://github.com/aio-libs/aiohttp/pull/2983>`_) +- Implement base protocol class to avoid a dependency from internal + ``asyncio.streams.FlowControlMixin`` (`#2986 <https://github.com/aio-libs/aiohttp/pull/2986>`_) +- Cythonize ``@helpers.reify``, 5% boost on macro benchmark (`#2995 <https://github.com/aio-libs/aiohttp/pull/2995>`_) +- Optimize HTTP parser (`#3015 <https://github.com/aio-libs/aiohttp/pull/3015>`_) +- Implement ``runner.addresses`` property. (`#3036 <https://github.com/aio-libs/aiohttp/pull/3036>`_) +- Use ``bytearray`` instead of a list of ``bytes`` in websocket reader. It + improves websocket message reading a little. (`#3039 <https://github.com/aio-libs/aiohttp/pull/3039>`_) +- Remove heartbeat on closing connection on keepalive timeout. The used hack + violates HTTP protocol. (`#3041 <https://github.com/aio-libs/aiohttp/pull/3041>`_) +- Limit websocket message size on reading to 4 MB by default. (`#3045 <https://github.com/aio-libs/aiohttp/pull/3045>`_) + + +Bugfixes +-------- + +- Don't reuse a connection with the same URL but different proxy/TLS settings + (`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_) +- When parsing the Forwarded header, the optional port number is now preserved. + (`#3009 <https://github.com/aio-libs/aiohttp/pull/3009>`_) + + +Improved Documentation +---------------------- + +- Make Change Log more visible in docs (`#3029 <https://github.com/aio-libs/aiohttp/pull/3029>`_) +- Make style and grammar improvements on the FAQ page. (`#3030 <https://github.com/aio-libs/aiohttp/pull/3030>`_) +- Document that signal handlers should be async functions since aiohttp 3.0 + (`#3032 <https://github.com/aio-libs/aiohttp/pull/3032>`_) + + +Deprecations and Removals +------------------------- + +- Deprecate custom application's router. (`#3021 <https://github.com/aio-libs/aiohttp/pull/3021>`_) + + +Misc +---- + +- #3008, #3011 + + + + +---- + + +3.2.1 (2018-05-10) +================== + +- Don't reuse a connection with the same URL but different proxy/TLS settings + (`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_) + + + + +---- + + +3.2.0 (2018-05-06) +================== + +Features +-------- + +- Raise ``TooManyRedirects`` exception when client gets redirected too many + times instead of returning last response. (`#2631 <https://github.com/aio-libs/aiohttp/pull/2631>`_) +- Extract route definitions into separate ``web_routedef.py`` file (`#2876 <https://github.com/aio-libs/aiohttp/pull/2876>`_) +- Raise an exception on request body reading after sending response. (`#2895 <https://github.com/aio-libs/aiohttp/pull/2895>`_) +- ClientResponse and RequestInfo now have real_url property, which is request + url without fragment part being stripped (`#2925 <https://github.com/aio-libs/aiohttp/pull/2925>`_) +- Speed up connector limiting (`#2937 <https://github.com/aio-libs/aiohttp/pull/2937>`_) +- Added and links property for ClientResponse object (`#2948 <https://github.com/aio-libs/aiohttp/pull/2948>`_) +- Add ``request.config_dict`` for exposing nested applications data. (`#2949 <https://github.com/aio-libs/aiohttp/pull/2949>`_) +- Speed up HTTP headers serialization, server micro-benchmark runs 5% faster + now. (`#2957 <https://github.com/aio-libs/aiohttp/pull/2957>`_) +- Apply assertions in debug mode only (`#2966 <https://github.com/aio-libs/aiohttp/pull/2966>`_) + + +Bugfixes +-------- + +- expose property `app` for TestClient (`#2891 <https://github.com/aio-libs/aiohttp/pull/2891>`_) +- Call on_chunk_sent when write_eof takes as a param the last chunk (`#2909 <https://github.com/aio-libs/aiohttp/pull/2909>`_) +- A closing bracket was added to `__repr__` of resources (`#2935 <https://github.com/aio-libs/aiohttp/pull/2935>`_) +- Fix compression of FileResponse (`#2942 <https://github.com/aio-libs/aiohttp/pull/2942>`_) +- Fixes some bugs in the limit connection feature (`#2964 <https://github.com/aio-libs/aiohttp/pull/2964>`_) + + +Improved Documentation +---------------------- + +- Drop ``async_timeout`` usage from documentation for client API in favor of + ``timeout`` parameter. (`#2865 <https://github.com/aio-libs/aiohttp/pull/2865>`_) +- Improve Gunicorn logging documentation (`#2921 <https://github.com/aio-libs/aiohttp/pull/2921>`_) +- Replace multipart writer `.serialize()` method with `.write()` in + documentation. (`#2965 <https://github.com/aio-libs/aiohttp/pull/2965>`_) + + +Deprecations and Removals +------------------------- + +- Deprecate Application.make_handler() (`#2938 <https://github.com/aio-libs/aiohttp/pull/2938>`_) + + +Misc +---- + +- #2958 + + + + ---- From 82fbe6476a65e81bbde6f8036baaa34ca0c82c2c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 13:52:22 +0100 Subject: [PATCH 0173/1511] [PR #8324/4a8fd08b backport][3.9] Add missing changelogs (#8330) **This is a backport of PR #8324 as merged into master (4a8fd08b617d63b6b3a73b3d381e6e33f2c78296).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES.rst | 265 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 265 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 7074aaa6966..523b4a84787 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1926,6 +1926,271 @@ Misc +---- + + +3.4.4 (2018-09-05) +================== + +- Fix installation from sources when compiling toolkit is not available (`#3241 <https://github.com/aio-libs/aiohttp/pull/3241>`_) + + + + +---- + + +3.4.3 (2018-09-04) +================== + +- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_) + + + + +---- + + +3.4.2 (2018-09-01) +================== + +- Fix ``iter_chunks`` type annotation (`#3230 <https://github.com/aio-libs/aiohttp/pull/3230>`_) + + + + +---- + + +3.4.1 (2018-08-28) +================== + +- Fix empty header parsing regression. (`#3218 <https://github.com/aio-libs/aiohttp/pull/3218>`_) +- Fix BaseRequest.raw_headers doc. (`#3215 <https://github.com/aio-libs/aiohttp/pull/3215>`_) +- Fix documentation building on ReadTheDocs (`#3221 <https://github.com/aio-libs/aiohttp/pull/3221>`_) + + + + +---- + + +3.4.0 (2018-08-25) +================== + +Features +-------- + +- Add type hints (`#3049 <https://github.com/aio-libs/aiohttp/pull/3049>`_) +- Add ``raise_for_status`` request parameter (`#3073 <https://github.com/aio-libs/aiohttp/pull/3073>`_) +- Add type hints to HTTP client (`#3092 <https://github.com/aio-libs/aiohttp/pull/3092>`_) +- Minor server optimizations (`#3095 <https://github.com/aio-libs/aiohttp/pull/3095>`_) +- Preserve the cause when `HTTPException` is raised from another exception. (`#3096 <https://github.com/aio-libs/aiohttp/pull/3096>`_) +- Add `close_boundary` option in `MultipartWriter.write` method. Support streaming (`#3104 <https://github.com/aio-libs/aiohttp/pull/3104>`_) +- Added a ``remove_slash`` option to the ``normalize_path_middleware`` factory. (`#3173 <https://github.com/aio-libs/aiohttp/pull/3173>`_) +- The class `AbstractRouteDef` is importable from `aiohttp.web`. (`#3183 <https://github.com/aio-libs/aiohttp/pull/3183>`_) + + +Bugfixes +-------- + +- Prevent double closing when client connection is released before the + last ``data_received()`` callback. (`#3031 <https://github.com/aio-libs/aiohttp/pull/3031>`_) +- Make redirect with `normalize_path_middleware` work when using url encoded paths. (`#3051 <https://github.com/aio-libs/aiohttp/pull/3051>`_) +- Postpone web task creation to connection establishment. (`#3052 <https://github.com/aio-libs/aiohttp/pull/3052>`_) +- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_) +- When using a server-request body as the `data=` argument of a client request, iterate over the content with `readany` instead of `readline` to avoid `Line too long` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_) +- fix `UrlDispatcher` has no attribute `add_options`, add `web.options` (`#3062 <https://github.com/aio-libs/aiohttp/pull/3062>`_) +- correct filename in content-disposition with multipart body (`#3064 <https://github.com/aio-libs/aiohttp/pull/3064>`_) +- Many HTTP proxies has buggy keepalive support. + Let's not reuse connection but close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_) +- raise 413 "Payload Too Large" rather than raising ValueError in request.post() + Add helpful debug message to 413 responses (`#3087 <https://github.com/aio-libs/aiohttp/pull/3087>`_) +- Fix `StreamResponse` equality, now that they are `MutableMapping` objects. (`#3100 <https://github.com/aio-libs/aiohttp/pull/3100>`_) +- Fix server request objects comparison (`#3116 <https://github.com/aio-libs/aiohttp/pull/3116>`_) +- Do not hang on `206 Partial Content` response with `Content-Encoding: gzip` (`#3123 <https://github.com/aio-libs/aiohttp/pull/3123>`_) +- Fix timeout precondition checkers (`#3145 <https://github.com/aio-libs/aiohttp/pull/3145>`_) + + +Improved Documentation +---------------------- + +- Add a new FAQ entry that clarifies that you should not reuse response + objects in middleware functions. (`#3020 <https://github.com/aio-libs/aiohttp/pull/3020>`_) +- Add FAQ section "Why is creating a ClientSession outside of an event loop dangerous?" (`#3072 <https://github.com/aio-libs/aiohttp/pull/3072>`_) +- Fix link to Rambler (`#3115 <https://github.com/aio-libs/aiohttp/pull/3115>`_) +- Fix TCPSite documentation on the Server Reference page. (`#3146 <https://github.com/aio-libs/aiohttp/pull/3146>`_) +- Fix documentation build configuration file for Windows. (`#3147 <https://github.com/aio-libs/aiohttp/pull/3147>`_) +- Remove no longer existing lingering_timeout parameter of Application.make_handler from documentation. (`#3151 <https://github.com/aio-libs/aiohttp/pull/3151>`_) +- Mention that ``app.make_handler`` is deprecated, recommend to use runners + API instead. (`#3157 <https://github.com/aio-libs/aiohttp/pull/3157>`_) + + +Deprecations and Removals +------------------------- + +- Drop ``loop.current_task()`` from ``helpers.current_task()`` (`#2826 <https://github.com/aio-libs/aiohttp/pull/2826>`_) +- Drop ``reader`` parameter from ``request.multipart()``. (`#3090 <https://github.com/aio-libs/aiohttp/pull/3090>`_) + + + + +---- + + +3.3.2 (2018-06-12) +================== + +- Many HTTP proxies has buggy keepalive support. Let's not reuse connection but + close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_) + +- Provide vendor source files in tarball (`#3076 <https://github.com/aio-libs/aiohttp/pull/3076>`_) + + + + +---- + + +3.3.1 (2018-06-05) +================== + +- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_) +- When using a server-request body as the ``data=`` argument of a client request, + iterate over the content with ``readany`` instead of ``readline`` to avoid ``Line + too long`` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_) + + + + +---- + + +3.3.0 (2018-06-01) +================== + +Features +-------- + +- Raise ``ConnectionResetError`` instead of ``CancelledError`` on trying to + write to a closed stream. (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_) +- Implement ``ClientTimeout`` class and support socket read timeout. (`#2768 <https://github.com/aio-libs/aiohttp/pull/2768>`_) +- Enable logging when ``aiohttp.web`` is used as a program (`#2956 <https://github.com/aio-libs/aiohttp/pull/2956>`_) +- Add canonical property to resources (`#2968 <https://github.com/aio-libs/aiohttp/pull/2968>`_) +- Forbid reading response BODY after release (`#2983 <https://github.com/aio-libs/aiohttp/pull/2983>`_) +- Implement base protocol class to avoid a dependency from internal + ``asyncio.streams.FlowControlMixin`` (`#2986 <https://github.com/aio-libs/aiohttp/pull/2986>`_) +- Cythonize ``@helpers.reify``, 5% boost on macro benchmark (`#2995 <https://github.com/aio-libs/aiohttp/pull/2995>`_) +- Optimize HTTP parser (`#3015 <https://github.com/aio-libs/aiohttp/pull/3015>`_) +- Implement ``runner.addresses`` property. (`#3036 <https://github.com/aio-libs/aiohttp/pull/3036>`_) +- Use ``bytearray`` instead of a list of ``bytes`` in websocket reader. It + improves websocket message reading a little. (`#3039 <https://github.com/aio-libs/aiohttp/pull/3039>`_) +- Remove heartbeat on closing connection on keepalive timeout. The used hack + violates HTTP protocol. (`#3041 <https://github.com/aio-libs/aiohttp/pull/3041>`_) +- Limit websocket message size on reading to 4 MB by default. (`#3045 <https://github.com/aio-libs/aiohttp/pull/3045>`_) + + +Bugfixes +-------- + +- Don't reuse a connection with the same URL but different proxy/TLS settings + (`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_) +- When parsing the Forwarded header, the optional port number is now preserved. + (`#3009 <https://github.com/aio-libs/aiohttp/pull/3009>`_) + + +Improved Documentation +---------------------- + +- Make Change Log more visible in docs (`#3029 <https://github.com/aio-libs/aiohttp/pull/3029>`_) +- Make style and grammar improvements on the FAQ page. (`#3030 <https://github.com/aio-libs/aiohttp/pull/3030>`_) +- Document that signal handlers should be async functions since aiohttp 3.0 + (`#3032 <https://github.com/aio-libs/aiohttp/pull/3032>`_) + + +Deprecations and Removals +------------------------- + +- Deprecate custom application's router. (`#3021 <https://github.com/aio-libs/aiohttp/pull/3021>`_) + + +Misc +---- + +- #3008, #3011 + + + + +---- + + +3.2.1 (2018-05-10) +================== + +- Don't reuse a connection with the same URL but different proxy/TLS settings + (`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_) + + + + +---- + + +3.2.0 (2018-05-06) +================== + +Features +-------- + +- Raise ``TooManyRedirects`` exception when client gets redirected too many + times instead of returning last response. (`#2631 <https://github.com/aio-libs/aiohttp/pull/2631>`_) +- Extract route definitions into separate ``web_routedef.py`` file (`#2876 <https://github.com/aio-libs/aiohttp/pull/2876>`_) +- Raise an exception on request body reading after sending response. (`#2895 <https://github.com/aio-libs/aiohttp/pull/2895>`_) +- ClientResponse and RequestInfo now have real_url property, which is request + url without fragment part being stripped (`#2925 <https://github.com/aio-libs/aiohttp/pull/2925>`_) +- Speed up connector limiting (`#2937 <https://github.com/aio-libs/aiohttp/pull/2937>`_) +- Added and links property for ClientResponse object (`#2948 <https://github.com/aio-libs/aiohttp/pull/2948>`_) +- Add ``request.config_dict`` for exposing nested applications data. (`#2949 <https://github.com/aio-libs/aiohttp/pull/2949>`_) +- Speed up HTTP headers serialization, server micro-benchmark runs 5% faster + now. (`#2957 <https://github.com/aio-libs/aiohttp/pull/2957>`_) +- Apply assertions in debug mode only (`#2966 <https://github.com/aio-libs/aiohttp/pull/2966>`_) + + +Bugfixes +-------- + +- expose property `app` for TestClient (`#2891 <https://github.com/aio-libs/aiohttp/pull/2891>`_) +- Call on_chunk_sent when write_eof takes as a param the last chunk (`#2909 <https://github.com/aio-libs/aiohttp/pull/2909>`_) +- A closing bracket was added to `__repr__` of resources (`#2935 <https://github.com/aio-libs/aiohttp/pull/2935>`_) +- Fix compression of FileResponse (`#2942 <https://github.com/aio-libs/aiohttp/pull/2942>`_) +- Fixes some bugs in the limit connection feature (`#2964 <https://github.com/aio-libs/aiohttp/pull/2964>`_) + + +Improved Documentation +---------------------- + +- Drop ``async_timeout`` usage from documentation for client API in favor of + ``timeout`` parameter. (`#2865 <https://github.com/aio-libs/aiohttp/pull/2865>`_) +- Improve Gunicorn logging documentation (`#2921 <https://github.com/aio-libs/aiohttp/pull/2921>`_) +- Replace multipart writer `.serialize()` method with `.write()` in + documentation. (`#2965 <https://github.com/aio-libs/aiohttp/pull/2965>`_) + + +Deprecations and Removals +------------------------- + +- Deprecate Application.make_handler() (`#2938 <https://github.com/aio-libs/aiohttp/pull/2938>`_) + + +Misc +---- + +- #2958 + + + + ---- From 4c65ee880c46388484979ece26be95a76ce84329 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 17:37:54 +0100 Subject: [PATCH 0174/1511] [PR #8332/482e6cdf backport][3.10] Add set_content_disposition test (#8334) **This is a backport of PR #8332 as merged into master (482e6cdf6516607360666a48c5828d3dbe959fbd).** Co-authored-by: Oleg A <t0rr@mail.ru> --- CHANGES/8332.bugfix.rst | 1 + aiohttp/multipart.py | 7 +++++-- tests/test_multipart.py | 7 +++++++ 3 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8332.bugfix.rst diff --git a/CHANGES/8332.bugfix.rst b/CHANGES/8332.bugfix.rst new file mode 100644 index 00000000000..70cad26b426 --- /dev/null +++ b/CHANGES/8332.bugfix.rst @@ -0,0 +1 @@ +Fixed regression with adding Content-Disposition to form-data part after appending to writer -- by :user:`Dreamsorcerer`/:user:`Olegt0rr`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index a43ec545713..fcdf16183cd 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -848,8 +848,6 @@ def append_payload(self, payload: Payload) -> Payload: if self._is_form_data: # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 - assert CONTENT_DISPOSITION in payload.headers - assert "name=" in payload.headers[CONTENT_DISPOSITION] assert ( not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} & payload.headers.keys() @@ -930,6 +928,11 @@ def size(self) -> Optional[int]: async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2 + assert CONTENT_DISPOSITION in part.headers + assert "name=" in part.headers[CONTENT_DISPOSITION] + await writer.write(b"--" + self._boundary + b"\r\n") await writer.write(part._binary_headers) diff --git a/tests/test_multipart.py b/tests/test_multipart.py index dbfaf74b9b7..37ac54797fb 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1282,6 +1282,13 @@ def test_append_multipart(self, writer) -> None: part = writer._parts[0][0] assert part.headers[CONTENT_TYPE] == "test/passed" + async def test_set_content_disposition_after_append(self): + writer = aiohttp.MultipartWriter("form-data") + payload = writer.append("some-data") + payload.set_content_disposition("form-data", name="method") + assert CONTENT_DISPOSITION in payload.headers + assert "name=" in payload.headers[CONTENT_DISPOSITION] + def test_with(self) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: writer.append("foo") From 7eecdff163ccf029fbb1ddc9de4169d4aaeb6597 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 20:47:19 +0100 Subject: [PATCH 0175/1511] [PR #8332/482e6cdf backport][3.9] Add set_content_disposition test (#8333) **This is a backport of PR #8332 as merged into master (482e6cdf6516607360666a48c5828d3dbe959fbd).** Co-authored-by: Oleg A <t0rr@mail.ru> --- CHANGES/8332.bugfix.rst | 1 + aiohttp/multipart.py | 7 +++++-- tests/test_multipart.py | 7 +++++++ 3 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8332.bugfix.rst diff --git a/CHANGES/8332.bugfix.rst b/CHANGES/8332.bugfix.rst new file mode 100644 index 00000000000..70cad26b426 --- /dev/null +++ b/CHANGES/8332.bugfix.rst @@ -0,0 +1 @@ +Fixed regression with adding Content-Disposition to form-data part after appending to writer -- by :user:`Dreamsorcerer`/:user:`Olegt0rr`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index a43ec545713..fcdf16183cd 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -848,8 +848,6 @@ def append_payload(self, payload: Payload) -> Payload: if self._is_form_data: # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 - assert CONTENT_DISPOSITION in payload.headers - assert "name=" in payload.headers[CONTENT_DISPOSITION] assert ( not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} & payload.headers.keys() @@ -930,6 +928,11 @@ def size(self) -> Optional[int]: async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2 + assert CONTENT_DISPOSITION in part.headers + assert "name=" in part.headers[CONTENT_DISPOSITION] + await writer.write(b"--" + self._boundary + b"\r\n") await writer.write(part._binary_headers) diff --git a/tests/test_multipart.py b/tests/test_multipart.py index dbfaf74b9b7..37ac54797fb 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1282,6 +1282,13 @@ def test_append_multipart(self, writer) -> None: part = writer._parts[0][0] assert part.headers[CONTENT_TYPE] == "test/passed" + async def test_set_content_disposition_after_append(self): + writer = aiohttp.MultipartWriter("form-data") + payload = writer.append("some-data") + payload.set_content_disposition("form-data", name="method") + assert CONTENT_DISPOSITION in payload.headers + assert "name=" in payload.headers[CONTENT_DISPOSITION] + def test_with(self) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: writer.append("foo") From cdb5129afc79bb9e073ed6ad2fcb352efa2962c2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 21:53:52 +0100 Subject: [PATCH 0176/1511] [PR #8335/5a6949da backport][3.10] Add Content-Disposition automatically (#8337) **This is a backport of PR #8335 as merged into master (5a6949da642d1db6cf414fd0d1f70e54c7b7be14).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8335.bugfix.rst | 1 + aiohttp/multipart.py | 4 ++++ tests/test_multipart.py | 22 +++++++++++++++++----- 3 files changed, 22 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8335.bugfix.rst diff --git a/CHANGES/8335.bugfix.rst b/CHANGES/8335.bugfix.rst new file mode 100644 index 00000000000..cd93b864a50 --- /dev/null +++ b/CHANGES/8335.bugfix.rst @@ -0,0 +1 @@ +Added default Content-Disposition in multipart/form-data responses -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index fcdf16183cd..71fc2654a1c 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -852,6 +852,10 @@ def append_payload(self, payload: Payload) -> Payload: not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} & payload.headers.keys() ) + # Set default Content-Disposition in case user doesn't create one + if CONTENT_DISPOSITION not in payload.headers: + name = f"section-{len(self._parts)}" + payload.set_content_disposition("form-data", name=name) else: # compression encoding = payload.headers.get(CONTENT_ENCODING, "").lower() diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 37ac54797fb..436b70957fa 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1282,12 +1282,24 @@ def test_append_multipart(self, writer) -> None: part = writer._parts[0][0] assert part.headers[CONTENT_TYPE] == "test/passed" - async def test_set_content_disposition_after_append(self): + def test_set_content_disposition_after_append(self): writer = aiohttp.MultipartWriter("form-data") - payload = writer.append("some-data") - payload.set_content_disposition("form-data", name="method") - assert CONTENT_DISPOSITION in payload.headers - assert "name=" in payload.headers[CONTENT_DISPOSITION] + part = writer.append("some-data") + part.set_content_disposition("form-data", name="method") + assert 'name="method"' in part.headers[CONTENT_DISPOSITION] + + def test_automatic_content_disposition(self): + writer = aiohttp.MultipartWriter("form-data") + writer.append_json(()) + part = payload.StringPayload("foo") + part.set_content_disposition("form-data", name="second") + writer.append_payload(part) + writer.append("foo") + + disps = tuple(p[0].headers[CONTENT_DISPOSITION] for p in writer._parts) + assert 'name="section-0"' in disps[0] + assert 'name="second"' in disps[1] + assert 'name="section-2"' in disps[2] def test_with(self) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: From f21c6f2ca512a026ce7f0f6c6311f62d6a638866 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 21:54:12 +0100 Subject: [PATCH 0177/1511] [PR #8335/5a6949da backport][3.9] Add Content-Disposition automatically (#8336) **This is a backport of PR #8335 as merged into master (5a6949da642d1db6cf414fd0d1f70e54c7b7be14).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8335.bugfix.rst | 1 + aiohttp/multipart.py | 4 ++++ tests/test_multipart.py | 22 +++++++++++++++++----- 3 files changed, 22 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8335.bugfix.rst diff --git a/CHANGES/8335.bugfix.rst b/CHANGES/8335.bugfix.rst new file mode 100644 index 00000000000..cd93b864a50 --- /dev/null +++ b/CHANGES/8335.bugfix.rst @@ -0,0 +1 @@ +Added default Content-Disposition in multipart/form-data responses -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index fcdf16183cd..71fc2654a1c 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -852,6 +852,10 @@ def append_payload(self, payload: Payload) -> Payload: not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} & payload.headers.keys() ) + # Set default Content-Disposition in case user doesn't create one + if CONTENT_DISPOSITION not in payload.headers: + name = f"section-{len(self._parts)}" + payload.set_content_disposition("form-data", name=name) else: # compression encoding = payload.headers.get(CONTENT_ENCODING, "").lower() diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 37ac54797fb..436b70957fa 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1282,12 +1282,24 @@ def test_append_multipart(self, writer) -> None: part = writer._parts[0][0] assert part.headers[CONTENT_TYPE] == "test/passed" - async def test_set_content_disposition_after_append(self): + def test_set_content_disposition_after_append(self): writer = aiohttp.MultipartWriter("form-data") - payload = writer.append("some-data") - payload.set_content_disposition("form-data", name="method") - assert CONTENT_DISPOSITION in payload.headers - assert "name=" in payload.headers[CONTENT_DISPOSITION] + part = writer.append("some-data") + part.set_content_disposition("form-data", name="method") + assert 'name="method"' in part.headers[CONTENT_DISPOSITION] + + def test_automatic_content_disposition(self): + writer = aiohttp.MultipartWriter("form-data") + writer.append_json(()) + part = payload.StringPayload("foo") + part.set_content_disposition("form-data", name="second") + writer.append_payload(part) + writer.append("foo") + + disps = tuple(p[0].headers[CONTENT_DISPOSITION] for p in writer._parts) + assert 'name="section-0"' in disps[0] + assert 'name="second"' in disps[1] + assert 'name="section-2"' in disps[2] def test_with(self) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: From 0415a4ca18e15e734999925fd7f20607f05e2a4b Mon Sep 17 00:00:00 2001 From: Alexey Nikitin <30608416+NewGlad@users.noreply.github.com> Date: Tue, 16 Apr 2024 23:36:07 +0800 Subject: [PATCH 0178/1511] Patchback/backports/3.9/5fd29467fb63efdfae1ace280cec36b1f8139567/pr 8290 (#8311) --- CHANGES/8253.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 73 +++++++++++++++++++----------------- tests/test_client_session.py | 10 +++++ 4 files changed, 50 insertions(+), 35 deletions(-) create mode 100644 CHANGES/8253.bugfix diff --git a/CHANGES/8253.bugfix b/CHANGES/8253.bugfix new file mode 100644 index 00000000000..91b06d9b35d --- /dev/null +++ b/CHANGES/8253.bugfix @@ -0,0 +1 @@ +Fixed "Unclosed client session" when initialization of ClientSession fails -- by :user:`NewGlad`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index ab889685fc8..277171a239e 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -27,6 +27,7 @@ Alexander Shorin Alexander Travov Alexandru Mihai Alexey Firsov +Alexey Nikitin Alexey Popravka Alexey Stepanov Amin Etesamian diff --git a/aiohttp/client.py b/aiohttp/client.py index 7e9b32fad6f..32d2c3b7119 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -234,6 +234,44 @@ def __init__( max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", ) -> None: + # We initialise _connector to None immediately, as it's referenced in __del__() + # and could cause issues if an exception occurs during initialisation. + self._connector: Optional[BaseConnector] = None + if timeout is sentinel or timeout is None: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn( + "read_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, connect=conn_timeout) + warnings.warn( + "conn_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + else: + if not isinstance(timeout, ClientTimeout): + raise ValueError( + f"timeout parameter cannot be of {type(timeout)} type, " + "please use 'timeout=ClientTimeout(...)'", + ) + self._timeout = timeout + if read_timeout is not sentinel: + raise ValueError( + "read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read" + ) + if conn_timeout is not None: + raise ValueError( + "conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect" + ) if loop is None: if connector is not None: loop = connector._loop @@ -271,41 +309,6 @@ def __init__( self._default_auth = auth self._version = version self._json_serialize = json_serialize - if timeout is sentinel or timeout is None: - self._timeout = DEFAULT_TIMEOUT - if read_timeout is not sentinel: - warnings.warn( - "read_timeout is deprecated, " "use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - self._timeout = attr.evolve(self._timeout, total=read_timeout) - if conn_timeout is not None: - self._timeout = attr.evolve(self._timeout, connect=conn_timeout) - warnings.warn( - "conn_timeout is deprecated, " "use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - else: - if not isinstance(timeout, ClientTimeout): - raise ValueError( - f"timeout parameter cannot be of {type(timeout)} type, " - "please use 'timeout=ClientTimeout(...)'", - ) - self._timeout = timeout - if read_timeout is not sentinel: - raise ValueError( - "read_timeout and timeout parameters " - "conflict, please setup " - "timeout.read" - ) - if conn_timeout is not None: - raise ValueError( - "conn_timeout and timeout parameters " - "conflict, please setup " - "timeout.connect" - ) self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env diff --git a/tests/test_client_session.py b/tests/test_client_session.py index a0654ed8ccd..416b6bbce5d 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -885,3 +885,13 @@ async def test_build_url_returns_expected_url( ) -> None: session = await create_session(base_url) assert session._build_url(url) == expected_url + + +async def test_instantiation_with_invalid_timeout_value(loop): + loop.set_debug(False) + logs = [] + loop.set_exception_handler(lambda loop, ctx: logs.append(ctx)) + with pytest.raises(ValueError, match="timeout parameter cannot be .*"): + ClientSession(timeout=1) + # should not have "Unclosed client session" warning + assert not logs From b844d4293a5bd4921bd7267550294122e83617a8 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 16 Apr 2024 17:29:00 +0100 Subject: [PATCH 0179/1511] Release v3.9.5 (#8340) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- CHANGES.rst | 36 ++++++++++++++++++++++++++++++++++++ CHANGES/8253.bugfix | 1 - CHANGES/8332.bugfix.rst | 1 - CHANGES/8335.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- docs/contributing-admins.rst | 2 +- 6 files changed, 38 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/8253.bugfix delete mode 100644 CHANGES/8332.bugfix.rst delete mode 100644 CHANGES/8335.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 523b4a84787..5b02623067a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,42 @@ .. towncrier release notes start +3.9.5 (2024-04-16) +================== + +Bug fixes +--------- + +- Fixed "Unclosed client session" when initialization of + :py:class:`~aiohttp.ClientSession` fails -- by :user:`NewGlad`. + + + *Related issues and pull requests on GitHub:* + :issue:`8253`. + + + +- Fixed regression (from :pr:`8280`) with adding ``Content-Disposition`` to the ``form-data`` + part after appending to writer -- by :user:`Dreamsorcerer`/:user:`Olegt0rr`. + + + *Related issues and pull requests on GitHub:* + :issue:`8332`. + + + +- Added default ``Content-Disposition`` in ``multipart/form-data`` responses to avoid broken + form-data responses -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8335`. + + + + +---- + 3.9.4 (2024-04-11) ================== diff --git a/CHANGES/8253.bugfix b/CHANGES/8253.bugfix deleted file mode 100644 index 91b06d9b35d..00000000000 --- a/CHANGES/8253.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed "Unclosed client session" when initialization of ClientSession fails -- by :user:`NewGlad`. diff --git a/CHANGES/8332.bugfix.rst b/CHANGES/8332.bugfix.rst deleted file mode 100644 index 70cad26b426..00000000000 --- a/CHANGES/8332.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed regression with adding Content-Disposition to form-data part after appending to writer -- by :user:`Dreamsorcerer`/:user:`Olegt0rr`. diff --git a/CHANGES/8335.bugfix.rst b/CHANGES/8335.bugfix.rst deleted file mode 100644 index cd93b864a50..00000000000 --- a/CHANGES/8335.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Added default Content-Disposition in multipart/form-data responses -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index c4af7b6f4dd..e82e790b46a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.4.dev0" +__version__ = "3.9.5" from typing import TYPE_CHECKING, Tuple diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst index 488953c6cc5..9444f8ac5c4 100644 --- a/docs/contributing-admins.rst +++ b/docs/contributing-admins.rst @@ -47,7 +47,7 @@ first merge into the newer release branch (e.g. 3.8 into 3.9) and then to master the target branch (as tests have passed on the merge commit now). #. This should automatically consider the PR merged and delete the temporary branch. -Back on the original release branch, append ``.dev0`` to the version number in ``__init__.py``. +Back on the original release branch, bump the version number and append ``.dev0`` in ``__init__.py``. If doing a minor release: From 010f2ad37d64120cbbf6e49f80e497b72ec5231d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Apr 2024 11:06:00 +0000 Subject: [PATCH 0180/1511] Bump gunicorn from 21.2.0 to 22.0.0 (#8344) Bumps [gunicorn](https://github.com/benoitc/gunicorn) from 21.2.0 to 22.0.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/benoitc/gunicorn/releases">gunicorn's releases</a>.</em></p> <blockquote> <h2>Gunicorn 22.0 has been released</h2> <p><strong>Gunicorn 22.0.0 has been released.</strong> This version fix the numerous security vulnerabilities. You're invited to upgrade asap your own installation.</p> <p>Changes:</p> <pre><code>22.0.0 - 2024-04-17 =================== <ul> <li>use <code>utime</code> to notify workers liveness</li> <li>migrate setup to pyproject.toml</li> <li>fix numerous security vulnerabilities in HTTP parser (closing some request smuggling vectors)</li> <li>parsing additional requests is no longer attempted past unsupported request framing</li> <li>on HTTP versions < 1.1 support for chunked transfer is refused (only used in exploits)</li> <li>requests conflicting configured or passed SCRIPT_NAME now produce a verbose error</li> <li>Trailer fields are no longer inspected for headers indicating secure scheme</li> <li>support Python 3.12</li> </ul> <p>** Breaking changes **</p> <ul> <li>minimum version is Python 3.7</li> <li>the limitations on valid characters in the HTTP method have been bounded to Internet Standards</li> <li>requests specifying unsupported transfer coding (order) are refused by default (rare)</li> <li>HTTP methods are no longer casefolded by default (IANA method registry contains none affected)</li> <li>HTTP methods containing the number sign (#) are no longer accepted by default (rare)</li> <li>HTTP versions < 1.0 or >= 2.0 are no longer accepted by default (rare, only HTTP/1.1 is supported)</li> <li>HTTP versions consisting of multiple digits or containing a prefix/suffix are no longer accepted</li> <li>HTTP header field names Gunicorn cannot safely map to variables are silently dropped, as in other software</li> <li>HTTP headers with empty field name are refused by default (no legitimate use cases, used in exploits)</li> <li>requests with both Transfer-Encoding and Content-Length are refused by default (such a message might indicate an attempt to perform request smuggling)</li> <li>empty transfer codings are no longer permitted (reportedly seen with really old & broken proxies)</li> </ul> <p>** SECURITY **</p> <ul> <li>fix CVE-2024-1135 </code></pre></li> </ul> <ol> <li>Documentation is available there: <a href="https://docs.gunicorn.org/en/stable/news.html">https://docs.gunicorn.org/en/stable/news.html</a></li> <li>Packages: <a href="https://pypi.org/project/gunicorn/">https://pypi.org/project/gunicorn/</a></li> </ol> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/benoitc/gunicorn/commit/f63d59e4d73a8ee28748d2c700fb81c8780bc419"><code>f63d59e</code></a> bump to 22.0</li> <li><a href="https://github.com/benoitc/gunicorn/commit/4ac81e0a1037ba5b570323be7430e09caa233e38"><code>4ac81e0</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/3175">#3175</a> from e-kwsm/typo</li> <li><a href="https://github.com/benoitc/gunicorn/commit/401cecfaed85d79236c7a9a1f7d8946b01c466fc"><code>401cecf</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/3179">#3179</a> from dhdaines/exclude-eventlet-0360</li> <li><a href="https://github.com/benoitc/gunicorn/commit/0243ec39ef4fc1b479ff4e1659e165f0b980b571"><code>0243ec3</code></a> fix(deps): exclude eventlet 0.36.0</li> <li><a href="https://github.com/benoitc/gunicorn/commit/628a0bcb61ef3a211d67dfd68ad1ba161cccb3b8"><code>628a0bc</code></a> chore: fix typos</li> <li><a href="https://github.com/benoitc/gunicorn/commit/88fc4a43152039c28096c8ba3eeadb3fbaa4aff9"><code>88fc4a4</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/3131">#3131</a> from pajod/patch-py12-rebased</li> <li><a href="https://github.com/benoitc/gunicorn/commit/deae2fc4c5f93bfce59be5363055d4cd4ab1b0b6"><code>deae2fc</code></a> CI: back off the agressive timeout</li> <li><a href="https://github.com/benoitc/gunicorn/commit/f4703824c323fe6867dce0e2f11013b8de319353"><code>f470382</code></a> docs: promise 3.12 compat</li> <li><a href="https://github.com/benoitc/gunicorn/commit/5e30bfa6b1a3e1f2bde7feb514d1734d28f39231"><code>5e30bfa</code></a> add changelog to project.urls (updated for PEP621)</li> <li><a href="https://github.com/benoitc/gunicorn/commit/481c3f9522edc58806a3efc5b49be4f202cc7700"><code>481c3f9</code></a> remove setup.cfg - overridden by pyproject.toml</li> <li>Additional commits viewable in <a href="https://github.com/benoitc/gunicorn/compare/21.2.0...22.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=gunicorn&package-manager=pip&previous-version=21.2.0&new-version=22.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e7ddfd3085d..2fc77c5455f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -22,7 +22,7 @@ frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in idna==3.4 # via yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f7e0b7c4798..23b1ce98dfe 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ funcparserlib==1.0.1 # via blockdiag gidgethub==5.0.1 # via cherry-picker -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in identify==2.3.5 # via pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 4e11ab6c31b..702b2649304 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -78,7 +78,7 @@ funcparserlib==1.0.1 # via blockdiag gidgethub==5.3.0 # via cherry-picker -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in identify==2.5.26 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index a55d127d077..f879b83440b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -44,7 +44,7 @@ frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -gunicorn==21.2.0 +gunicorn==22.0.0 # via -r requirements/base.in idna==3.4 # via From 3d280bf027753475ffbdb21320b2d119b7f70a8b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 23:34:45 +0100 Subject: [PATCH 0181/1511] [PR #8346/b43ac8a7 backport][3.10] :memo: Add nacl_middleware to third_party.rst. (#8347) **This is a backport of PR #8346 as merged into master (b43ac8a7fadd564d1ff518f40bcddfc1d1b299e9).** Co-authored-by: Webmaster At Cosmic DNA <92752640+DanielAtCosmicDNA@users.noreply.github.com> --- CHANGES/8346.misc.rst | 1 + docs/third_party.rst | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/8346.misc.rst diff --git a/CHANGES/8346.misc.rst b/CHANGES/8346.misc.rst new file mode 100644 index 00000000000..e3e1a309be1 --- /dev/null +++ b/CHANGES/8346.misc.rst @@ -0,0 +1 @@ +Add `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ to the list of middlewares in the third party section of the documentation. diff --git a/docs/third_party.rst b/docs/third_party.rst index 5c354f1e6c6..29522e5364f 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -295,3 +295,6 @@ ask to raise the status. - `rsocket <https://github.com/rsocket/rsocket-py>`_ Python implementation of `RSocket protocol <https://rsocket.io>`_. + +- `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ + An aiohttp middleware library for asymmetric encryption of data transmitted via http and/or websocket connections. From 87b688902280f9f126894e824329c3efb7b75f4e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 00:12:03 +0000 Subject: [PATCH 0182/1511] [PR #8339/c26f3562 backport][3.10] Upgrade the deprecated GitHub Actions `set-output` workflow command syntax (#8349) **This is a backport of PR #8339 as merged into master (c26f356234c3b49e08f9cab3019b88f5d42f1aa8).** Updating the way of saving output for cache path, cause [GitHub Actions deprecating save-state and set-output commands](https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/) --- .github/workflows/ci-cd.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 0b9c1dbcb96..965fcba061b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -162,7 +162,8 @@ jobs: - name: Get pip cache dir id: pip-cache run: | - echo "::set-output name=dir::$(pip cache dir)" # - name: Cache + echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" + shell: bash - name: Cache PyPI uses: actions/cache@v4.0.2 with: From b7cbabd78a67cef06b9d4df9924cfc47d98d86e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 17:13:40 +0000 Subject: [PATCH 0183/1511] Bump pytest from 7.4.4 to 8.1.1 (#8223) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.4 to 8.1.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.1.1</h2> <h1>pytest 8.1.1 (2024-03-08)</h1> <p>::: {.note} ::: {.title} Note :::</p> <p>This release is not a usual bug fix release -- it contains features and improvements, being a follow up to <code>8.1.0</code>, which has been yanked from PyPI. :::</p> <h2>Features</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11475">#11475</a>: Added the new <code>consider_namespace_packages</code>{.interpreted-text role="confval"} configuration option, defaulting to <code>False</code>.</p> <p>If set to <code>True</code>, pytest will attempt to identify modules that are part of <a href="https://packaging.python.org/en/latest/guides/packaging-namespace-packages">namespace packages</a> when importing modules.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11653">#11653</a>: Added the new <code>verbosity_test_cases</code>{.interpreted-text role="confval"} configuration option for fine-grained control of test execution verbosity. See <code>Fine-grained verbosity <pytest.fine_grained_verbosity></code>{.interpreted-text role="ref"} for more details.</p> </li> </ul> <h2>Improvements</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/10865">#10865</a>: <code>pytest.warns</code>{.interpreted-text role="func"} now validates that <code>warnings.warn</code>{.interpreted-text role="func"} was called with a [str]{.title-ref} or a [Warning]{.title-ref}. Currently in Python it is possible to use other types, however this causes an exception when <code>warnings.filterwarnings</code>{.interpreted-text role="func"} is used to filter those warnings (see [CPython <a href="https://redirect.github.com/pytest-dev/pytest/issues/103577">#103577</a>](<a href="https://redirect.github.com/python/cpython/issues/103577">python/cpython#103577</a>) for a discussion). While this can be considered a bug in CPython, we decided to put guards in pytest as the error message produced without this check in place is confusing.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11311">#11311</a>: When using <code>--override-ini</code> for paths in invocations without a configuration file defined, the current working directory is used as the relative directory.</p> <p>Previoulsy this would raise an <code>AssertionError</code>{.interpreted-text role="class"}.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11475">#11475</a>: <code>--import-mode=importlib <import-mode-importlib></code>{.interpreted-text role="ref"} now tries to import modules using the standard import mechanism (but still without changing :py<code>sys.path</code>{.interpreted-text role="data"}), falling back to importing modules directly only if that fails.</p> <p>This means that installed packages will be imported under their canonical name if possible first, for example <code>app.core.models</code>, instead of having the module name always be derived from their path (for example <code>.env310.lib.site_packages.app.core.models</code>).</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11801">#11801</a>: Added the <code>iter_parents() <_pytest.nodes.Node.iter_parents></code>{.interpreted-text role="func"} helper method on nodes. It is similar to <code>listchain <_pytest.nodes.Node.listchain></code>{.interpreted-text role="func"}, but goes from bottom to top, and returns an iterator, not a list.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11850">#11850</a>: Added support for <code>sys.last_exc</code>{.interpreted-text role="data"} for post-mortem debugging on Python>=3.12.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11962">#11962</a>: In case no other suitable candidates for configuration file are found, a <code>pyproject.toml</code> (even without a <code>[tool.pytest.ini_options]</code> table) will be considered as the configuration file and define the <code>rootdir</code>.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11978">#11978</a>: Add <code>--log-file-mode</code> option to the logging plugin, enabling appending to log-files. This option accepts either <code>"w"</code> or <code>"a"</code> and defaults to <code>"w"</code>.</p> <p>Previously, the mode was hard-coded to be <code>"w"</code> which truncates the file before logging.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/81653ee385f4c62ee7e64502a7b7530096553115"><code>81653ee</code></a> Adjust changelog manually for 8.1.1</li> <li><a href="https://github.com/pytest-dev/pytest/commit/e60b4b9ed80f761e3a51868a01338911a567b093"><code>e60b4b9</code></a> Prepare release version 8.1.1</li> <li><a href="https://github.com/pytest-dev/pytest/commit/15fbe57c44fed6737f5c6dad99cf4437b6755a6c"><code>15fbe57</code></a> [8.1.x] Revert legacy path removals (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12093">#12093</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/86c3aab005a98de7e12ee5e37782837f5db70ac3"><code>86c3aab</code></a> [8.1.x] Do not import duplicated modules with --importmode=importlib (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12077">#12077</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/5b82b0cd20c3adcc21f34ae30c595c7355a87e23"><code>5b82b0c</code></a> [8.1.x] Yank version 8.1.0 (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12076">#12076</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/0a536810dc5f51dac99bdb90dde06704b5aa034e"><code>0a53681</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12054">#12054</a> from pytest-dev/release-8.1.0</li> <li><a href="https://github.com/pytest-dev/pytest/commit/b9a167f9bbbd6eda4f0360c5bf5b7f5af50f2bc4"><code>b9a167f</code></a> Prepare release version 8.1.0</li> <li><a href="https://github.com/pytest-dev/pytest/commit/00043f7f1047b29fdaeb18e169fe9d6146988cb8"><code>00043f7</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12038">#12038</a> from bluetech/fixtures-rm-arg2index</li> <li><a href="https://github.com/pytest-dev/pytest/commit/f4e10251a4a003495b5228cea421d4de5fa0ce89"><code>f4e1025</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12048">#12048</a> from bluetech/fixture-teardown-excgroup</li> <li><a href="https://github.com/pytest-dev/pytest/commit/43492f5707b38dab9b62dfb829bb41a13579629f"><code>43492f5</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12051">#12051</a> from jakkdl/test_debugging_pythonbreakpoint</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/7.4.4...8.1.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=7.4.4&new-version=8.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> > **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Sam Bull <git@sambull.org> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- tests/test_pytest_plugin.py | 12 ++++++------ 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 23b1ce98dfe..7102cc9f3ac 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -136,7 +136,7 @@ pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv -pluggy==1.0.0 +pluggy==1.4.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in @@ -164,7 +164,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==7.4.4 +pytest==8.1.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 702b2649304..fd91ccc895d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -131,7 +131,7 @@ pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv -pluggy==1.2.0 +pluggy==1.4.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in @@ -155,7 +155,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==7.4.4 +pytest==8.1.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index fcfac455b3e..c61afb3c20b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -32,11 +32,11 @@ packaging==23.1 # via pytest platformdirs==3.10.0 # via virtualenv -pluggy==1.2.0 +pluggy==1.4.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -pytest==7.4.4 +pytest==8.1.1 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index f879b83440b..dde37c806bd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -65,7 +65,7 @@ packaging==23.1 # via # gunicorn # pytest -pluggy==1.2.0 +pluggy==1.4.0 # via pytest proxy-py==2.4.4rc4 # via -r requirements/test.in @@ -77,7 +77,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==7.4.4 +pytest==8.1.1 # via # -r requirements/test.in # pytest-cov diff --git a/tests/test_pytest_plugin.py b/tests/test_pytest_plugin.py index b25a553b868..ad222545294 100644 --- a/tests/test_pytest_plugin.py +++ b/tests/test_pytest_plugin.py @@ -19,6 +19,8 @@ def test_aiohttp_plugin(testdir) -> None: from aiohttp import web +value = web.AppKey('value', str) + async def hello(request): return web.Response(body=b'Hello, world') @@ -75,10 +77,10 @@ async def test_noop() -> None: async def previous(request): if request.method == 'POST': with pytest.deprecated_call(): # FIXME: this isn't actually called - request.app['value'] = (await request.post())['value'] + request.app[value] = (await request.post())['value'] return web.Response(body=b'thanks for the data') else: - v = request.app.get('value', 'unknown') + v = request.app.get(value, 'unknown') return web.Response(body='value: {}'.format(v).encode()) @@ -98,7 +100,7 @@ async def test_set_value(cli) -> None: assert resp.status == 200 text = await resp.text() assert text == 'thanks for the data' - assert cli.server.app['value'] == 'foo' + assert cli.server.app[value] == 'foo' async def test_get_value(cli) -> None: @@ -107,7 +109,7 @@ async def test_get_value(cli) -> None: text = await resp.text() assert text == 'value: unknown' with pytest.warns(DeprecationWarning): - cli.server.app['value'] = 'bar' + cli.server.app[value] = 'bar' resp = await cli.get('/') assert resp.status == 200 text = await resp.text() @@ -119,7 +121,6 @@ def test_noncoro() -> None: async def test_failed_to_create_client(aiohttp_client) -> None: - def make_app(loop): raise RuntimeError() @@ -142,7 +143,6 @@ async def test_custom_port_test_server(aiohttp_server, aiohttp_unused_port): port = aiohttp_unused_port() server = await aiohttp_server(app, port=port) assert server.port == port - """ ) testdir.makeconftest(CONFTEST) From 89b7c74ddd77e6ab11c1ba4edaac99e672e4dd24 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 19:24:33 +0000 Subject: [PATCH 0184/1511] Bump proxy-py from 2.4.4rc4 to 2.4.4rc5 (#8329) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.4rc4 to 2.4.4rc5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/abhinavsingh/proxy.py/releases">proxy-py's releases</a>.</em></p> <blockquote> <h2>v2.4.4rc5</h2> <p><strong>Full Changelog</strong>: <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.4rc5...v2.4.4rc4">https://github.com/abhinavsingh/proxy.py/compare/v2.4.4rc5...v2.4.4rc4</a></p> <h2>What's Changed</h2> <ul> <li>Remove <code>codecov</code> from dependency files by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1328">abhinavsingh/proxy.py#1328</a></li> <li>Support --hostnames by <a href="https://github.com/alexey-pelykh"><code>@​alexey-pelykh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1325">abhinavsingh/proxy.py#1325</a></li> <li>Update project test dependencies to ensure green workflow by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1371">abhinavsingh/proxy.py#1371</a></li> <li>Migrate away from setuptools_scm_git_archive by <a href="https://github.com/tjni"><code>@​tjni</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1345">abhinavsingh/proxy.py#1345</a></li> <li>fix: Bypass proxy authentication with HTTP/1.0 requests <a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1267">#1267</a> by <a href="https://github.com/dongfangtianyu"><code>@​dongfangtianyu</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1342">abhinavsingh/proxy.py#1342</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/tjni"><code>@​tjni</code></a> made their first contribution in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1345">abhinavsingh/proxy.py#1345</a></li> <li><a href="https://github.com/dongfangtianyu"><code>@​dongfangtianyu</code></a> made their first contribution in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1342">abhinavsingh/proxy.py#1342</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.4rc4...v2.4.4rc5">https://github.com/abhinavsingh/proxy.py/compare/v2.4.4rc4...v2.4.4rc5</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/81510a0cec870c40a9d73adb2d453e6211ac39e9"><code>81510a0</code></a> FIX proxy authentication bypass with HTTP/1.0 requests <a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1267">#1267</a> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1342">#1342</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/a8a7d7e19c8d7e67e57c55d0de04e2e9238c17ba"><code>a8a7d7e</code></a> Migrate away from setuptools_scm_git_archive (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1345">#1345</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/ddc96d4c09d1bcc3378f83ab510f4c6af084c185"><code>ddc96d4</code></a> Update project test dependencies to ensure green workflow (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1371">#1371</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/30574fd0414005dfa8792a6e797023e862bdcf43"><code>30574fd</code></a> Support --hostnames (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1325">#1325</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/ac4d5a705bf7c6ea46f1f8b8ae0c24756c5cf704"><code>ac4d5a7</code></a> Remove <code>codecov</code> from dependency files (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1328">#1328</a>)</li> <li>See full diff in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.4rc4...v2.4.4rc5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.4rc4&new-version=2.4.4rc5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7102cc9f3ac..b7a757dfe3e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -140,7 +140,7 @@ pluggy==1.4.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4rc4 +proxy-py==2.4.4rc5 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index fd91ccc895d..0a7f409ad8b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -135,7 +135,7 @@ pluggy==1.4.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4rc4 +proxy-py==2.4.4rc5 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index dde37c806bd..a7d7c235fbf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ packaging==23.1 # pytest pluggy==1.4.0 # via pytest -proxy-py==2.4.4rc4 +proxy-py==2.4.4rc5 # via -r requirements/test.in pycares==4.3.0 # via aiodns From 94a02324821ed51224282a959b67c91551ba3f02 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 20:29:07 +0100 Subject: [PATCH 0185/1511] [PR #8353/8e4167d1 backport][3.10] Skip labels for dependabot (#8354) **This is a backport of PR #8353 as merged into master (8e4167d1ee54852205558f856a9bc006751c6452).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/labels.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index a4e961e88af..268ef1c7cdb 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -9,6 +9,7 @@ jobs: backport: runs-on: ubuntu-latest name: Backport label added + if: ${{ github.actor != 'dependabot[bot]' }} steps: - uses: actions/github-script@v7 with: From fcdedb8ecab44de0f2815cefe8b67c15ce9c9319 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 19 Apr 2024 20:42:49 +0100 Subject: [PATCH 0186/1511] Remove typing-extensions (#8352) (#8355) (cherry picked from commit e8d4b7b65583ec58cfe09699b5d67b5622ef761b) --- requirements/base.in | 1 - requirements/cython.in | 1 - requirements/lint.in | 2 -- requirements/typing-extensions.in | 1 - requirements/typing-extensions.txt | 8 -------- 5 files changed, 13 deletions(-) delete mode 100644 requirements/typing-extensions.in delete mode 100644 requirements/typing-extensions.txt diff --git a/requirements/base.in b/requirements/base.in index df67f78afde..70493b6c83a 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -1,4 +1,3 @@ --r typing-extensions.in -r runtime-deps.in gunicorn diff --git a/requirements/cython.in b/requirements/cython.in index ee07533e17c..6f0238f170d 100644 --- a/requirements/cython.in +++ b/requirements/cython.in @@ -1,4 +1,3 @@ -r multidict.in --r typing-extensions.in # required for parsing aiohttp/hdrs.py by tools/gen.py Cython diff --git a/requirements/lint.in b/requirements/lint.in index 34616155912..f1f16a99aa9 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,5 +1,3 @@ --r typing-extensions.in - aioredis mypy; implementation_name == "cpython" pre-commit diff --git a/requirements/typing-extensions.in b/requirements/typing-extensions.in deleted file mode 100644 index 5fd4f05f341..00000000000 --- a/requirements/typing-extensions.in +++ /dev/null @@ -1 +0,0 @@ -typing_extensions diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt deleted file mode 100644 index 289f5da53cb..00000000000 --- a/requirements/typing-extensions.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in -# -typing-extensions==4.11.0 - # via -r requirements/typing-extensions.in From d62f96f2a062e7222c4a5670681013e855efa704 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 21:35:56 +0100 Subject: [PATCH 0187/1511] [PR #8351/131703fb backport][3.10] Fix dependabot backport target (#8357) **This is a backport of PR #8351 as merged into master (131703fbe4b994f4402b47ff424ef8f532cbb1d5).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/dependabot.yml | 6 +++--- docs/contributing-admins.rst | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 3b392a34b3b..d1898c69e6e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -18,17 +18,17 @@ updates: interval: "daily" open-pull-requests-limit: 10 - # Maintain dependencies for GitHub Actions aiohttp 3.9 + # Maintain dependencies for GitHub Actions aiohttp backport - package-ecosystem: "github-actions" directory: "/" labels: - dependencies - target-branch: "3.9" + target-branch: "3.10" schedule: interval: "daily" open-pull-requests-limit: 10 - # Maintain dependencies for Python aiohttp 3.10 + # Maintain dependencies for Python aiohttp backport - package-ecosystem: "pip" directory: "/" labels: diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst index 9444f8ac5c4..acfaebc0e97 100644 --- a/docs/contributing-admins.rst +++ b/docs/contributing-admins.rst @@ -52,6 +52,6 @@ Back on the original release branch, bump the version number and append ``.dev0` If doing a minor release: #. Create a new release branch for future features to go to: e.g. ``git checkout -b 3.10 3.9 && git push`` -#. Update ``target-branch`` for Dependabot to reference the new branch name in ``.github/dependabot.yml``. +#. Update both ``target-branch`` backports for Dependabot to reference the new branch name in ``.github/dependabot.yml``. #. Delete the older backport label (e.g. backport-3.8): https://github.com/aio-libs/aiohttp/labels #. Add a new backport label (e.g. backport-3.10). From b2afad1ad28cdca5e0c93752c5d9edb801f49bf1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 21:36:22 +0100 Subject: [PATCH 0188/1511] Bump codecov/codecov-action from 3 to 4 (#8358) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/codecov/codecov-action/releases">codecov/codecov-action's releases</a>.</em></p> <blockquote> <h2>v4.0.0</h2> <p>v4 of the Codecov Action uses the <a href="https://docs.codecov.com/docs/the-codecov-cli">CLI</a> as the underlying upload. The CLI has helped to power new features including local upload, the global upload token, and new upcoming features.</p> <h2>Breaking Changes</h2> <ul> <li>The Codecov Action runs as a <code>node20</code> action due to <code>node16</code> deprecation. See <a href="https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/">this post from GitHub</a> on how to migrate.</li> <li>Tokenless uploading is unsupported. However, PRs made from forks to the upstream public repos will support tokenless (e.g. contributors to OS projects do not need the upstream repo's Codecov token). This <a href="https://docs.codecov.com/docs/adding-the-codecov-token#github-actions">doc</a> shows instructions on how to add the Codecov token.</li> <li>OS platforms have been added, though some may not be automatically detected. To see a list of platforms, see our <a href="https://cli.codecov.io">CLI download page</a></li> <li>Various arguments to the Action have been changed. Please be aware that the arguments match with the CLI's needs</li> </ul> <p><code>v3</code> versions and below will not have access to CLI features (e.g. global upload token, ATS).</p> <h2>What's Changed</h2> <ul> <li>build(deps): bump openpgp from 5.8.0 to 5.9.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/985">codecov/codecov-action#985</a></li> <li>build(deps): bump actions/checkout from 3.0.0 to 3.5.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1000">codecov/codecov-action#1000</a></li> <li>build(deps): bump ossf/scorecard-action from 2.1.3 to 2.2.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1006">codecov/codecov-action#1006</a></li> <li>build(deps): bump tough-cookie from 4.0.0 to 4.1.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1013">codecov/codecov-action#1013</a></li> <li>build(deps-dev): bump word-wrap from 1.2.3 to 1.2.4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1024">codecov/codecov-action#1024</a></li> <li>build(deps): bump node-fetch from 3.3.1 to 3.3.2 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1031">codecov/codecov-action#1031</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.1.4 to 20.4.5 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1032">codecov/codecov-action#1032</a></li> <li>build(deps): bump github/codeql-action from 1.0.26 to 2.21.2 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1033">codecov/codecov-action#1033</a></li> <li>build commit,report and upload args based on codecovcli by <a href="https://github.com/dana-yaish"><code>@​dana-yaish</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/943">codecov/codecov-action#943</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.4.5 to 20.5.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1055">codecov/codecov-action#1055</a></li> <li>build(deps): bump github/codeql-action from 2.21.2 to 2.21.4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1051">codecov/codecov-action#1051</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.5.3 to 20.5.4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1058">codecov/codecov-action#1058</a></li> <li>chore(deps): update outdated deps by <a href="https://github.com/thomasrockhu-codecov"><code>@​thomasrockhu-codecov</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1059">codecov/codecov-action#1059</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.5.4 to 20.5.6 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1060">codecov/codecov-action#1060</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 6.4.1 to 6.5.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1065">codecov/codecov-action#1065</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 6.4.1 to 6.5.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1064">codecov/codecov-action#1064</a></li> <li>build(deps): bump actions/checkout from 3.5.3 to 3.6.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1063">codecov/codecov-action#1063</a></li> <li>build(deps-dev): bump eslint from 8.47.0 to 8.48.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1061">codecov/codecov-action#1061</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.5.6 to 20.5.7 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1062">codecov/codecov-action#1062</a></li> <li>build(deps): bump openpgp from 5.9.0 to 5.10.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1066">codecov/codecov-action#1066</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.5.7 to 20.5.9 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1070">codecov/codecov-action#1070</a></li> <li>build(deps): bump github/codeql-action from 2.21.4 to 2.21.5 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1069">codecov/codecov-action#1069</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 6.5.0 to 6.6.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1072">codecov/codecov-action#1072</a></li> <li>Update README.md by <a href="https://github.com/thomasrockhu-codecov"><code>@​thomasrockhu-codecov</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1073">codecov/codecov-action#1073</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 6.5.0 to 6.6.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1071">codecov/codecov-action#1071</a></li> <li>build(deps-dev): bump <code>@​vercel/ncc</code> from 0.36.1 to 0.38.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1074">codecov/codecov-action#1074</a></li> <li>build(deps): bump <code>@​actions/core</code> from 1.10.0 to 1.10.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1081">codecov/codecov-action#1081</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 6.6.0 to 6.7.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1080">codecov/codecov-action#1080</a></li> <li>build(deps): bump actions/checkout from 3.6.0 to 4.0.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1078">codecov/codecov-action#1078</a></li> <li>build(deps): bump actions/upload-artifact from 3.1.2 to 3.1.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1077">codecov/codecov-action#1077</a></li> <li>build(deps-dev): bump <code>@​types/node</code> from 20.5.9 to 20.6.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1075">codecov/codecov-action#1075</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 6.6.0 to 6.7.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1079">codecov/codecov-action#1079</a></li> <li>build(deps-dev): bump eslint from 8.48.0 to 8.49.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1076">codecov/codecov-action#1076</a></li> <li>use cli instead of node uploader by <a href="https://github.com/dana-yaish"><code>@​dana-yaish</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1068">codecov/codecov-action#1068</a></li> <li>chore(release): 4.0.0-beta.1 by <a href="https://github.com/thomasrockhu-codecov"><code>@​thomasrockhu-codecov</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1084">codecov/codecov-action#1084</a></li> <li>not adding -n if empty to do-upload command by <a href="https://github.com/dana-yaish"><code>@​dana-yaish</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1085">codecov/codecov-action#1085</a></li> <li>4.0.0-beta.2 by <a href="https://github.com/thomasrockhu-codecov"><code>@​thomasrockhu-codecov</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1086">codecov/codecov-action#1086</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md">codecov/codecov-action's changelog</a>.</em></p> <blockquote> <h2>4.0.0-beta.2</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/1085">#1085</a> not adding -n if empty to do-upload command</li> </ul> <h2>4.0.0-beta.1</h2> <p><code>v4</code> represents a move from the <a href="https://github.com/codecov/uploader">universal uploader</a> to the <a href="https://github.com/codecov/codecov-cli">Codecov CLI</a>. Although this will unlock new features for our users, the CLI is not yet at feature parity with the universal uploader.</p> <h3>Breaking Changes</h3> <ul> <li>No current support for <code>aarch64</code> and <code>alpine</code> architectures.</li> <li>Tokenless uploading is unsuported</li> <li>Various arguments to the Action have been removed</li> </ul> <h2>3.1.4</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/967">#967</a> Fix typo in README.md</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/971">#971</a> fix: add back in working dir</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/969">#969</a> fix: CLI option names for uploader</li> </ul> <h3>Dependencies</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/970">#970</a> build(deps-dev): bump <code>@​types/node</code> from 18.15.12 to 18.16.3</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/979">#979</a> build(deps-dev): bump <code>@​types/node</code> from 20.1.0 to 20.1.2</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/981">#981</a> build(deps-dev): bump <code>@​types/node</code> from 20.1.2 to 20.1.4</li> </ul> <h2>3.1.3</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/960">#960</a> fix: allow for aarch64 build</li> </ul> <h3>Dependencies</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/957">#957</a> build(deps-dev): bump jest-junit from 15.0.0 to 16.0.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/958">#958</a> build(deps): bump openpgp from 5.7.0 to 5.8.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/959">#959</a> build(deps-dev): bump <code>@​types/node</code> from 18.15.10 to 18.15.12</li> </ul> <h2>3.1.2</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/718">#718</a> Update README.md</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/851">#851</a> Remove unsupported path_to_write_report argument</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/898">#898</a> codeql-analysis.yml</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/901">#901</a> Update README to contain correct information - inputs and negate feature</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/955">#955</a> fix: add in all the extra arguments for uploader</li> </ul> <h3>Dependencies</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/819">#819</a> build(deps): bump openpgp from 5.4.0 to 5.5.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/835">#835</a> build(deps): bump node-fetch from 3.2.4 to 3.2.10</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/840">#840</a> build(deps): bump ossf/scorecard-action from 1.1.1 to 2.0.4</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/841">#841</a> build(deps): bump <code>@​actions/core</code> from 1.9.1 to 1.10.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/843">#843</a> build(deps): bump <code>@​actions/github</code> from 5.0.3 to 5.1.1</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/869">#869</a> build(deps): bump node-fetch from 3.2.10 to 3.3.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/872">#872</a> build(deps-dev): bump jest-junit from 13.2.0 to 15.0.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/879">#879</a> build(deps): bump decode-uri-component from 0.2.0 to 0.2.2</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/codecov/codecov-action/commit/84508663e988701840491b86de86b666e8a86bed"><code>8450866</code></a> chore(release): v4.3.0 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1366">#1366</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/e8419092e9de1e2493563107cf52bfa83d9ae62f"><code>e841909</code></a> build(deps): bump undici from 5.28.3 to 5.28.4 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1361">#1361</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/363a65ad481c04303701b094b4d2c113a65f5d3e"><code>363a65a</code></a> feat: add network params (<a href="https://redirect.github.com/codecov/codecov-action/issues/1365">#1365</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/640b86a455d2fd2f6e7acbb9eb863bc9f4a51954"><code>640b86a</code></a> build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 7.5.0 to 7.6.0 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1363">#1363</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/375c033fbe86348a044b3884012d849d84b90769"><code>375c033</code></a> build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 7.5.0 to 7.6.0 (#...</li> <li><a href="https://github.com/codecov/codecov-action/commit/d701256d7a115528200188787cd91ce87889775f"><code>d701256</code></a> build(deps): bump github/codeql-action from 3.24.9 to 3.24.10 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1360">#1360</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/0bb547ab4974725a7d01e2ac18c843a6924c39f3"><code>0bb547a</code></a> build(deps-dev): bump typescript from 5.4.3 to 5.4.4 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1355">#1355</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/55e8381a3e12db78da75715d4d60379adb17c4e6"><code>55e8381</code></a> fix: automatically detect if using GitHub enterprise (<a href="https://redirect.github.com/codecov/codecov-action/issues/1356">#1356</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/7afa10ed9b269c561c2336fd862446844e0cbf71"><code>7afa10e</code></a> fix: use_oidc shoudl be required false (<a href="https://redirect.github.com/codecov/codecov-action/issues/1353">#1353</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/d820d60619df930d5a859db2d0d77ba93f9d1702"><code>d820d60</code></a> feat: allow for authentication via OIDC token (<a href="https://redirect.github.com/codecov/codecov-action/issues/1330">#1330</a>)</li> <li>Additional commits viewable in <a href="https://github.com/codecov/codecov-action/compare/v3...v4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=codecov/codecov-action&package-manager=github_actions&previous-version=3&new-version=4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 965fcba061b..7f0464dd1b6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -222,7 +222,7 @@ jobs: run: | python -m coverage xml - name: Upload coverage - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: file: ./coverage.xml flags: >- From ae0cc1b22e91662d99aa457d64027dba461f5f01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 23:00:37 +0000 Subject: [PATCH 0189/1511] Bump softprops/action-gh-release from 1 to 2 (#8360) Bumps [softprops/action-gh-release](https://github.com/softprops/action-gh-release) from 1 to 2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/softprops/action-gh-release/releases">softprops/action-gh-release's releases</a>.</em></p> <blockquote> <h2>v2.0.0</h2> <ul> <li>update actions.yml declaration to node20 to address warnings</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/softprops/action-gh-release/blob/master/CHANGELOG.md">softprops/action-gh-release's changelog</a>.</em></p> <blockquote> <h2>0.1.12</h2> <ul> <li>fix bug leading to empty strings subsituted for inputs users don't provide breaking api calls <a href="https://redirect.github.com/softprops/action-gh-release/pull/144">#144</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/softprops/action-gh-release/commit/9d7c94cfd0a1f3ed45544c887983e9fa900f0564"><code>9d7c94c</code></a> build</li> <li><a href="https://github.com/softprops/action-gh-release/commit/6ffed59e5547e6594ba7b3e49883af40801fa1df"><code>6ffed59</code></a> followup to <a href="https://redirect.github.com/softprops/action-gh-release/issues/417">#417</a> (<a href="https://redirect.github.com/softprops/action-gh-release/issues/425">#425</a>)</li> <li><a href="https://github.com/softprops/action-gh-release/commit/1ce812a7bd7fa6c191ed3334eeace31579a98f25"><code>1ce812a</code></a> package script for updating git tag</li> <li><a href="https://github.com/softprops/action-gh-release/commit/3198ee18f814cdf787321b4a32a26ddbf37acc52"><code>3198ee1</code></a> prep release</li> <li><a href="https://github.com/softprops/action-gh-release/commit/7ee8e0638151a48bb421b567d7d53a07cb158121"><code>7ee8e06</code></a> declare an update docs for make_latest input (<a href="https://redirect.github.com/softprops/action-gh-release/issues/419">#419</a>)</li> <li><a href="https://github.com/softprops/action-gh-release/commit/d99959edae48b5ffffd7b00da66dcdb0a33a52ee"><code>d99959e</code></a> prep release</li> <li><a href="https://github.com/softprops/action-gh-release/commit/0e39c679e8bac647fe5ad41070fe2295b0393d58"><code>0e39c67</code></a> make pattern error opt in (<a href="https://redirect.github.com/softprops/action-gh-release/issues/417">#417</a>)</li> <li><a href="https://github.com/softprops/action-gh-release/commit/20e085ccc73308c2c8e43ab8da4f8d7ecbb94d4e"><code>20e085c</code></a> kick off 2.0.1 release</li> <li><a href="https://github.com/softprops/action-gh-release/commit/9f5c4d39bc56b5ed07f92f260dd05d33000058a6"><code>9f5c4d3</code></a> update changelog</li> <li><a href="https://github.com/softprops/action-gh-release/commit/0bea76b22707a1871a52b97e07c80657071adb76"><code>0bea76b</code></a> Add support for make_latest property (<a href="https://redirect.github.com/softprops/action-gh-release/issues/304">#304</a>)</li> <li>Additional commits viewable in <a href="https://github.com/softprops/action-gh-release/compare/v1...v2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=softprops/action-gh-release&package-manager=github_actions&previous-version=1&new-version=2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 7f0464dd1b6..e7b6ee24a1a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -416,7 +416,7 @@ jobs: # Confusingly, this action also supports updating releases, not # just creating them. This is what we want here, since we've manually # created the release above. - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: # dist/ contains the built packages, which smoketest-artifacts/ # contains the signatures and certificates. From 7d87cbdacc3b6377d3264c3ca1d8cb86ca607455 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 00:01:20 +0100 Subject: [PATCH 0190/1511] Bump actions/download-artifact from 3 to 4 (#8359) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/download-artifact/releases">actions/download-artifact's releases</a>.</em></p> <blockquote> <h2>v4.0.0</h2> <h2>What's Changed</h2> <p>The release of upload-artifact@v4 and download-artifact@v4 are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.</p> <p>ℹ️ However, this is a major update that includes breaking changes. Artifacts created with versions v3 and below are not compatible with the v4 actions. Uploads and downloads <em>must</em> use the same major actions versions. There are also key differences from previous versions that may require updates to your workflows.</p> <p>For more information, please see:</p> <ol> <li>The <a href="https://github.blog/changelog/2023-12-14-github-actions-artifacts-v4-is-now-generally-available/">changelog</a> post.</li> <li>The <a href="https://github.com/actions/download-artifact/blob/main/README.md">README</a>.</li> <li>The <a href="https://github.com/actions/upload-artifact/blob/main/docs/MIGRATION.md">migration documentation</a>.</li> <li>As well as the underlying npm package, <a href="https://github.com/actions/toolkit/tree/main/packages/artifact"><code>@​actions/artifact</code></a> documentation.</li> </ol> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/bflad"><code>@​bflad</code></a> made their first contribution in <a href="https://redirect.github.com/actions/download-artifact/pull/194">actions/download-artifact#194</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/download-artifact/compare/v3...v4.0.0">https://github.com/actions/download-artifact/compare/v3...v4.0.0</a></p> <h2>v3.0.2</h2> <ul> <li>Bump <code>@actions/artifact</code> to v1.1.1 - <a href="https://redirect.github.com/actions/download-artifact/pull/195">actions/download-artifact#195</a></li> <li>Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet <a href="hhttps://redirect.github.com/actions/toolkit/pull/1278">actions/toolkit#1278</a></li> </ul> <h2>v3.0.1</h2> <ul> <li><a href="https://redirect.github.com/actions/download-artifact/pull/178">Bump <code>@​actions/core</code> to 1.10.0</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/download-artifact/commit/8caf195ad4b1dee92908e23f56eeb0696f1dd42d"><code>8caf195</code></a> package lock update</li> <li><a href="https://github.com/actions/download-artifact/commit/d7a2ec411d177e8ca679ac5969b70be59c322700"><code>d7a2ec4</code></a> updating package version</li> <li><a href="https://github.com/actions/download-artifact/commit/e56a1d48ef0f0a8ad8d864416ee03b4f1ab51164"><code>e56a1d4</code></a> updating core dependency</li> <li><a href="https://github.com/actions/download-artifact/commit/1fcda58b3a5574619fcf6509778850ca2e4b8736"><code>1fcda58</code></a> updating core license</li> <li><a href="https://github.com/actions/download-artifact/commit/325a10d8b7c55aa3c81692cd880369f752a98121"><code>325a10d</code></a> updating actions dependency to v2.1.5</li> <li><a href="https://github.com/actions/download-artifact/commit/f8aaee4a210a18d68ebae4a0694be12377e38762"><code>f8aaee4</code></a> Merge pull request <a href="https://redirect.github.com/actions/download-artifact/issues/322">#322</a> from actions/robherley/deprecation-notice</li> <li><a href="https://github.com/actions/download-artifact/commit/d98334b11d13d83933069c7e52a609290f880d37"><code>d98334b</code></a> Update readme with v3/v2/v1 deprecation notice</li> <li><a href="https://github.com/actions/download-artifact/commit/c850b930e6ba138125429b7e5c93fc707a7f8427"><code>c850b93</code></a> Merge pull request <a href="https://redirect.github.com/actions/download-artifact/issues/307">#307</a> from bethanyj28/main</li> <li><a href="https://github.com/actions/download-artifact/commit/6fd111f15ace90c9fbd0d73880db8338a776aaed"><code>6fd111f</code></a> update <code>@​actions/artifact</code></li> <li><a href="https://github.com/actions/download-artifact/commit/87c55149d96e628cc2ef7e6fc2aab372015aec85"><code>87c5514</code></a> Merge pull request <a href="https://redirect.github.com/actions/download-artifact/issues/303">#303</a> from bethanyj28/main</li> <li>Additional commits viewable in <a href="https://github.com/actions/download-artifact/compare/v3...v4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/download-artifact&package-manager=github_actions&previous-version=3&new-version=4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/ci-cd.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e7b6ee24a1a..4c66aff0ec0 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -114,7 +114,7 @@ jobs: run: | make generate-llhttp - name: Upload llhttp generated files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: llhttp path: vendor/llhttp/build @@ -179,7 +179,7 @@ jobs: python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files if: ${{ matrix.no-extensions == '' }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -280,7 +280,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -291,7 +291,7 @@ jobs: run: | python -m build --sdist - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: dist path: dist @@ -343,7 +343,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -354,7 +354,7 @@ jobs: uses: pypa/cibuildwheel@v2.17.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: dist path: ./wheelhouse/*.whl @@ -381,7 +381,7 @@ jobs: run: | echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token - name: Download distributions - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: dist path: dist From 40b1b71e42a4edeffbdec6af9aa80c3e6303e68f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 00:58:12 +0100 Subject: [PATCH 0191/1511] [PR #8362/9b6f2c0b backport][3.10] Use bot type for check (#8363) **This is a backport of PR #8362 as merged into master (9b6f2c0bf1e060b7c15eaf147d83cda786f6af17).** None Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/labels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 268ef1c7cdb..8d9c0f6f4a2 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -9,7 +9,7 @@ jobs: backport: runs-on: ubuntu-latest name: Backport label added - if: ${{ github.actor != 'dependabot[bot]' }} + if: ${{ github.event.pull_request.user.type != 'Bot' }} steps: - uses: actions/github-script@v7 with: From 9bd65fa936f6f7945f27246270251c056342263f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 21:30:27 +0100 Subject: [PATCH 0192/1511] [PR #8364/0e4a5c3c backport][3.10] Improve accuracy of subapp resource types (#8366) **This is a backport of PR #8364 as merged into master (0e4a5c3c851fecb516e5a3d444f8598dcf340ef4).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8364.misc.rst | 1 + aiohttp/web_app.py | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8364.misc.rst diff --git a/CHANGES/8364.misc.rst b/CHANGES/8364.misc.rst new file mode 100644 index 00000000000..493916f0421 --- /dev/null +++ b/CHANGES/8364.misc.rst @@ -0,0 +1 @@ +Minor improvements to static typing -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 91bf5fdac61..b143cdc1db9 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -76,6 +76,7 @@ _T = TypeVar("_T") _U = TypeVar("_U") +_Resource = TypeVar("_Resource", bound=AbstractResource) class Application(MutableMapping[Union[str, AppKey[Any]], Any]): @@ -334,7 +335,7 @@ async def handler(app: "Application") -> None: reg_handler("on_shutdown") reg_handler("on_cleanup") - def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: + def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource: if not isinstance(prefix, str): raise TypeError("Prefix must be str") prefix = prefix.rstrip("/") @@ -344,8 +345,8 @@ def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: return self._add_subapp(factory, subapp) def _add_subapp( - self, resource_factory: Callable[[], AbstractResource], subapp: "Application" - ) -> AbstractResource: + self, resource_factory: Callable[[], _Resource], subapp: "Application" + ) -> _Resource: if self.frozen: raise RuntimeError("Cannot add sub application to frozen application") if subapp.frozen: @@ -359,7 +360,7 @@ def _add_subapp( subapp._set_loop(self._loop) return resource - def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: + def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource: if not isinstance(domain, str): raise TypeError("Domain must be str") elif "*" in domain: From 12d4046e59e37e5c276538ab0694b9d69ed82904 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 21 Apr 2024 13:33:13 +0100 Subject: [PATCH 0193/1511] Remove unused readall from Python parser (#8096) (#8368) (cherry picked from commit 175954c010eb2446fe43c3167cdca671a4079e53) --- aiohttp/http_parser.py | 20 +------------------- tests/test_http_parser.py | 19 +++++-------------- 2 files changed, 6 insertions(+), 33 deletions(-) diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 013511917e8..751a7e1bb73 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -47,7 +47,6 @@ TransferEncodingError, ) from .http_writer import HttpVersion, HttpVersion10 -from .log import internal_logger from .streams import EMPTY_PAYLOAD, StreamReader from .typedefs import RawHeaders @@ -249,7 +248,6 @@ def __init__( timer: Optional[BaseTimerContext] = None, code: Optional[int] = None, method: Optional[str] = None, - readall: bool = False, payload_exception: Optional[Type[BaseException]] = None, response_with_body: bool = True, read_until_eof: bool = False, @@ -263,7 +261,6 @@ def __init__( self.timer = timer self.code = code self.method = method - self.readall = readall self.payload_exception = payload_exception self.response_with_body = response_with_body self.read_until_eof = read_until_eof @@ -393,7 +390,6 @@ def get_content_length() -> Optional[int]: method=method, compression=msg.compression, code=self.code, - readall=self.readall, response_with_body=self.response_with_body, auto_decompress=self._auto_decompress, lax=self.lax, @@ -413,7 +409,6 @@ def get_content_length() -> Optional[int]: payload, method=msg.method, compression=msg.compression, - readall=True, auto_decompress=self._auto_decompress, lax=self.lax, ) @@ -431,7 +426,6 @@ def get_content_length() -> Optional[int]: method=method, compression=msg.compression, code=self.code, - readall=True, response_with_body=self.response_with_body, auto_decompress=self._auto_decompress, lax=self.lax, @@ -751,13 +745,12 @@ def __init__( compression: Optional[str] = None, code: Optional[int] = None, method: Optional[str] = None, - readall: bool = False, response_with_body: bool = True, auto_decompress: bool = True, lax: bool = False, ) -> None: self._length = 0 - self._type = ParseState.PARSE_NONE + self._type = ParseState.PARSE_UNTIL_EOF self._chunk = ChunkState.PARSE_CHUNKED_SIZE self._chunk_size = 0 self._chunk_tail = b"" @@ -779,7 +772,6 @@ def __init__( self._type = ParseState.PARSE_NONE real_payload.feed_eof() self.done = True - elif chunked: self._type = ParseState.PARSE_CHUNKED elif length is not None: @@ -788,16 +780,6 @@ def __init__( if self._length == 0: real_payload.feed_eof() self.done = True - else: - if readall and code != 204: - self._type = ParseState.PARSE_UNTIL_EOF - elif method in ("PUT", "POST"): - internal_logger.warning( # pragma: no cover - "Content-Length or Transfer-Encoding header is required" - ) - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True self.payload = real_payload diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 32dd0e68b57..6afef1a3df8 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1230,8 +1230,8 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None: assert payload.is_eof() -def _test_parse_no_length_or_te_on_post(loop, protocol, request_cls): - parser = request_cls(protocol, loop, readall=True) +def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: Any): + parser = request_cls(protocol, loop, limit=2**16) text = b"POST /test HTTP/1.1\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1478,22 +1478,13 @@ async def test_parse_eof_payload(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) - p = HttpPayloadParser(out, readall=True) + p = HttpPayloadParser(out) p.feed_data(b"data") p.feed_eof() assert out.is_eof() assert [(bytearray(b"data"), 4)] == list(out._buffer) - async def test_parse_no_body(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) - p = HttpPayloadParser(out, method="PUT") - - assert out.is_eof() - assert p.done - async def test_parse_length_payload_eof(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() @@ -1625,7 +1616,7 @@ async def test_http_payload_parser_deflate_split(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) - p = HttpPayloadParser(out, compression="deflate", readall=True) + p = HttpPayloadParser(out, compression="deflate") # Feeding one correct byte should be enough to choose exact # deflate decompressor p.feed_data(b"x", 1) @@ -1637,7 +1628,7 @@ async def test_http_payload_parser_deflate_split_err(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) - p = HttpPayloadParser(out, compression="deflate", readall=True) + p = HttpPayloadParser(out, compression="deflate") # Feeding one wrong byte should be enough to choose exact # deflate decompressor p.feed_data(b"K", 1) From a9200db60f6780c2ce331fe6c43f78809ab8be92 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 23:56:13 +0000 Subject: [PATCH 0194/1511] [PR #8369/5b5d0ce1 backport][3.10] Update Matrix link (#8371) **This is a backport of PR #8369 as merged into master (5b5d0ce13d7ef851eeac44119ea88f1a3b60252f).** Co-authored-by: Sam Bull <git@sambull.org> --- README.rst | 2 +- docs/index.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 90b7f713577..45b647437e3 100644 --- a/README.rst +++ b/README.rst @@ -148,7 +148,7 @@ Communication channels *aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions -*gitter chat* https://gitter.im/aio-libs/Lobby +*Matrix*: `#aio-libs:matrix.org <https://matrix.to/#/#aio-libs:matrix.org>`_ We support `Stack Overflow <https://stackoverflow.com/questions/tagged/aiohttp>`_. diff --git a/docs/index.rst b/docs/index.rst index 4f55c5ddf09..9692152cb99 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -174,7 +174,7 @@ Communication channels Feel free to post your questions and ideas here. -*gitter chat* https://gitter.im/aio-libs/Lobby +*Matrix*: `#aio-libs:matrix.org <https://matrix.to/#/#aio-libs:matrix.org>`_ We support `Stack Overflow <https://stackoverflow.com/questions/tagged/aiohttp>`_. From 3e0b88d3c84a71e9b8d1a131f1cb07a71cf73291 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 25 Apr 2024 19:40:25 +0100 Subject: [PATCH 0195/1511] Update .pre-commit-config.yaml (#8384) --- .pre-commit-config.yaml | 14 +++---- aiohttp/client_reqrep.py | 8 ++-- aiohttp/client_ws.py | 16 +++++--- aiohttp/compression_utils.py | 8 ++-- aiohttp/connector.py | 6 +-- aiohttp/helpers.py | 18 +++------ aiohttp/http_exceptions.py | 1 - aiohttp/tracing.py | 21 +++++----- aiohttp/web_app.py | 21 ++++------ aiohttp/web_request.py | 8 ++-- aiohttp/web_response.py | 4 +- aiohttp/web_routedef.py | 6 +-- aiohttp/web_server.py | 1 + aiohttp/web_ws.py | 16 +++++--- tests/test_circular_imports.py | 17 ++++---- tests/test_http_parser.py | 72 +++++++++------------------------- tests/test_proxy_functional.py | 7 +++- 17 files changed, 104 insertions(+), 140 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d11ab1bfa32..dc3e65cf52f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -48,24 +48,24 @@ repos: entry: ./tools/check_changes.py pass_filenames: false - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.0.1' + rev: 'v4.6.0' hooks: - id: check-merge-conflict - repo: https://github.com/asottile/yesqa - rev: v1.3.0 + rev: v1.5.0 hooks: - id: yesqa - repo: https://github.com/PyCQA/isort - rev: '5.11.5' + rev: '5.13.2' hooks: - id: isort - repo: https://github.com/psf/black - rev: '22.3.0' + rev: '24.4.0' hooks: - id: black language_version: python3 # Should be a command that runs python - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 'v4.0.1' + rev: 'v4.6.0' hooks: - id: end-of-file-fixer exclude: >- @@ -97,12 +97,12 @@ repos: - id: detect-private-key exclude: ^examples/ - repo: https://github.com/asottile/pyupgrade - rev: 'v2.29.0' + rev: 'v3.15.2' hooks: - id: pyupgrade args: ['--py37-plus'] - repo: https://github.com/PyCQA/flake8 - rev: '4.0.1' + rev: '7.0.0' hooks: - id: flake8 additional_dependencies: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index afe719da16e..a5c711609a8 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -566,7 +566,7 @@ def update_body_from_data(self, body: Any) -> None: # copy payload headers assert body.headers - for (key, value) in body.headers.items(): + for key, value in body.headers.items(): if key in self.headers: continue if key in self.skip_auto_headers: @@ -820,9 +820,9 @@ def __init__( # work after the response has finished reading the body. if session is None: # TODO: Fix session=None in tests (see ClientRequest.__init__). - self._resolve_charset: Callable[ - ["ClientResponse", bytes], str - ] = lambda *_: "utf-8" + self._resolve_charset: Callable[["ClientResponse", bytes], str] = ( + lambda *_: "utf-8" + ) else: self._resolve_charset = session._resolve_charset if loop.get_debug(): diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index d9c74a30f52..1ba6f78f514 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -90,9 +90,11 @@ def _reset_heartbeat(self) -> None: self._send_heartbeat, self._heartbeat, self._loop, - timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5, + timeout_ceil_threshold=( + self._conn._connector._timeout_ceil_threshold + if self._conn is not None + else 5 + ), ) def _send_heartbeat(self) -> None: @@ -108,9 +110,11 @@ def _send_heartbeat(self) -> None: self._pong_not_received, self._pong_heartbeat, self._loop, - timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5, + timeout_ceil_threshold=( + self._conn._connector._timeout_ceil_threshold + if self._conn is not None + else 5 + ), ) def _pong_not_received(self) -> None: diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 9631d377e9a..ab4a2f1cc84 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -50,9 +50,11 @@ def __init__( max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ): super().__init__( - mode=encoding_to_mode(encoding, suppress_deflate_header) - if wbits is None - else wbits, + mode=( + encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits + ), executor=executor, max_sync_chunk_size=max_sync_chunk_size, ) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 90f7c3bb8c0..6e84aa45bb4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -242,9 +242,9 @@ def __init__( self._limit = limit self._limit_per_host = limit_per_host self._acquired: Set[ResponseHandler] = set() - self._acquired_per_host: DefaultDict[ - ConnectionKey, Set[ResponseHandler] - ] = defaultdict(set) + self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = ( + defaultdict(set) + ) self._keepalive_timeout = cast(float, keepalive_timeout) self._force_close = force_close diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 284033b7a04..afaa0d7e3b8 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -818,8 +818,7 @@ def set_exception( self, exc: BaseException, exc_cause: BaseException = ..., - ) -> None: - ... # pragma: no cover + ) -> None: ... # pragma: no cover def set_exception( @@ -905,12 +904,10 @@ def __init_subclass__(cls) -> None: ) @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: - ... + def __getitem__(self, key: AppKey[_T]) -> _T: ... @overload - def __getitem__(self, key: str) -> Any: - ... + def __getitem__(self, key: str) -> Any: ... def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: for mapping in self._maps: @@ -921,16 +918,13 @@ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: raise KeyError(key) @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: - ... + def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ... @overload - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: - ... + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... @overload - def get(self, key: str, default: Any = ...) -> Any: - ... + def get(self, key: str, default: Any = ...) -> Any: ... def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: try: diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index 72eac3a3cac..c43ee0d9659 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -1,6 +1,5 @@ """Low-level http related exceptions.""" - from textwrap import indent from typing import Optional, Union diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 62847a0bf7c..66007cbeb2c 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -19,8 +19,7 @@ def __call__( __client_session: ClientSession, __trace_config_ctx: SimpleNamespace, __params: _ParamT_contra, - ) -> Awaitable[None]: - ... + ) -> Awaitable[None]: ... __all__ = ( @@ -50,9 +49,9 @@ class TraceConfig: def __init__( self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace ) -> None: - self._on_request_start: Signal[ - _SignalCallback[TraceRequestStartParams] - ] = Signal(self) + self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = ( + Signal(self) + ) self._on_request_chunk_sent: Signal[ _SignalCallback[TraceRequestChunkSentParams] ] = Signal(self) @@ -89,12 +88,12 @@ def __init__( self._on_dns_resolvehost_end: Signal[ _SignalCallback[TraceDnsResolveHostEndParams] ] = Signal(self) - self._on_dns_cache_hit: Signal[ - _SignalCallback[TraceDnsCacheHitParams] - ] = Signal(self) - self._on_dns_cache_miss: Signal[ - _SignalCallback[TraceDnsCacheMissParams] - ] = Signal(self) + self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = ( + Signal(self) + ) + self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = ( + Signal(self) + ) self._on_request_headers_sent: Signal[ _SignalCallback[TraceRequestHeadersSentParams] ] = Signal(self) diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index b143cdc1db9..4d27714e3ba 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -184,12 +184,10 @@ def __eq__(self, other: object) -> bool: return self is other @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: - ... + def __getitem__(self, key: AppKey[_T]) -> _T: ... @overload - def __getitem__(self, key: str) -> Any: - ... + def __getitem__(self, key: str) -> Any: ... def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: return self._state[key] @@ -203,12 +201,10 @@ def _check_frozen(self) -> None: ) @overload # type: ignore[override] - def __setitem__(self, key: AppKey[_T], value: _T) -> None: - ... + def __setitem__(self, key: AppKey[_T], value: _T) -> None: ... @overload - def __setitem__(self, key: str, value: Any) -> None: - ... + def __setitem__(self, key: str, value: Any) -> None: ... def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: self._check_frozen() @@ -233,16 +229,13 @@ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: return iter(self._state) @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: - ... + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... @overload - def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: - ... + def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ... @overload - def get(self, key: str, default: Any = ...) -> Any: - ... + def get(self, key: str, default: Any = ...) -> Any: ... def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: return self._state.get(key, default) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 4bc670a798c..7d1694584ea 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -99,10 +99,10 @@ class FileField: qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR ) -_FORWARDED_PAIR: Final[ - str -] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( - token=_TOKEN, quoted_string=_QUOTED_STRING +_FORWARDED_PAIR: Final[str] = ( + r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING + ) ) _QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 07030305329..78d3fe32949 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -176,7 +176,7 @@ def enable_compression( ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. - if type(force) == bool: + if isinstance(force, bool): force = ContentCoding.deflate if force else ContentCoding.identity warnings.warn( "Using boolean for force is deprecated #3318", DeprecationWarning @@ -674,7 +674,7 @@ def body(self, body: bytes) -> None: # copy payload headers if body.headers: - for (key, value) in body.headers.items(): + for key, value in body.headers.items(): if key not in headers: headers[key] = value diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index d79cd32a14a..93802141c56 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -162,12 +162,10 @@ def __repr__(self) -> str: return f"<RouteTableDef count={len(self._items)}>" @overload - def __getitem__(self, index: int) -> AbstractRouteDef: - ... + def __getitem__(self, index: int) -> AbstractRouteDef: ... @overload - def __getitem__(self, index: slice) -> List[AbstractRouteDef]: - ... + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... def __getitem__(self, index): # type: ignore[no-untyped-def] return self._items[index] diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index 52faacb164a..3cd31c8ff10 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -1,4 +1,5 @@ """Low level HTTP server.""" + import asyncio from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 9fe66527539..c34e30e62de 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -113,9 +113,11 @@ def _reset_heartbeat(self) -> None: self._send_heartbeat, self._heartbeat, self._loop, - timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5, + timeout_ceil_threshold=( + self._req._protocol._timeout_ceil_threshold + if self._req is not None + else 5 + ), ) def _send_heartbeat(self) -> None: @@ -132,9 +134,11 @@ def _send_heartbeat(self) -> None: self._pong_not_received, self._pong_heartbeat, self._loop, - timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5, + timeout_ceil_threshold=( + self._req._protocol._timeout_ceil_threshold + if self._req is not None + else 5 + ), ) def _pong_not_received(self) -> None: diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py index 516326444c5..d513e9bde8b 100644 --- a/tests/test_circular_imports.py +++ b/tests/test_circular_imports.py @@ -8,6 +8,7 @@ * https://github.com/pytest-dev/pytest/blob/d18c75b/testing/test_meta.py * https://twitter.com/codewithanthony/status/1229445110510735361 """ + import os import pkgutil import socket @@ -30,14 +31,16 @@ def _mark_aiohttp_worker_for_skipping( importables: List[str], ) -> List[Union[str, "ParameterSet"]]: return [ - pytest.param( - importable, - marks=pytest.mark.skipif( - not hasattr(socket, "AF_UNIX"), reason="It's a UNIX-only module" - ), + ( + pytest.param( + importable, + marks=pytest.mark.skipif( + not hasattr(socket, "AF_UNIX"), reason="It's a UNIX-only module" + ), + ) + if importable == "aiohttp.worker" + else importable ) - if importable == "aiohttp.worker" - else importable for importable in importables ] diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 6afef1a3df8..187f9d27a77 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1475,9 +1475,7 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol): class TestParsePayload: async def test_parse_eof_payload(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out) p.feed_data(b"data") p.feed_eof() @@ -1486,9 +1484,7 @@ async def test_parse_eof_payload(self, stream) -> None: assert [(bytearray(b"data"), 4)] == list(out._buffer) async def test_parse_length_payload_eof(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=4) p.feed_data(b"da") @@ -1497,9 +1493,7 @@ async def test_parse_length_payload_eof(self, stream) -> None: p.feed_eof() async def test_parse_chunked_payload_size_error(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, chunked=True) with pytest.raises(http_exceptions.TransferEncodingError): p.feed_data(b"blah\r\n") @@ -1562,9 +1556,7 @@ async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None assert b"asdf" == b"".join(out._buffer) async def test_http_payload_parser_length(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=2) eof, tail = p.feed_data(b"1245") assert eof @@ -1577,9 +1569,7 @@ async def test_http_payload_parser_deflate(self, stream) -> None: COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) assert b"data" == b"".join(d for d, _ in out._buffer) @@ -1591,9 +1581,7 @@ async def test_http_payload_parser_deflate_no_hdrs(self, stream: Any) -> None: COMPRESSED = b"KI,I\x04\x00" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) assert b"data" == b"".join(d for d, _ in out._buffer) @@ -1604,18 +1592,14 @@ async def test_http_payload_parser_deflate_light(self, stream) -> None: COMPRESSED = b"\x18\x95KI,I\x04\x00\x04\x00\x01\x9b" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) assert b"data" == b"".join(d for d, _ in out._buffer) assert out.is_eof() async def test_http_payload_parser_deflate_split(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, compression="deflate") # Feeding one correct byte should be enough to choose exact # deflate decompressor @@ -1625,9 +1609,7 @@ async def test_http_payload_parser_deflate_split(self, stream) -> None: assert b"data" == b"".join(d for d, _ in out._buffer) async def test_http_payload_parser_deflate_split_err(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, compression="deflate") # Feeding one wrong byte should be enough to choose exact # deflate decompressor @@ -1637,9 +1619,7 @@ async def test_http_payload_parser_deflate_split_err(self, stream) -> None: assert b"data" == b"".join(d for d, _ in out._buffer) async def test_http_payload_parser_length_zero(self, stream) -> None: - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=0) assert p.done assert out.is_eof() @@ -1647,9 +1627,7 @@ async def test_http_payload_parser_length_zero(self, stream) -> None: @pytest.mark.skipif(brotli is None, reason="brotli is not installed") async def test_http_payload_brotli(self, stream) -> None: compressed = brotli.compress(b"brotli data") - out = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) p = HttpPayloadParser(out, length=len(compressed), compression="br") p.feed_data(compressed) assert b"brotli data" == b"".join(d for d, _ in out._buffer) @@ -1658,9 +1636,7 @@ async def test_http_payload_brotli(self, stream) -> None: class TestDeflateBuffer: async def test_feed_data(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1671,9 +1647,7 @@ async def test_feed_data(self, stream) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) async def test_feed_data_err(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") exc = ValueError() @@ -1686,9 +1660,7 @@ async def test_feed_data_err(self, stream) -> None: dbuf.feed_data(b"xsomedata", 9) async def test_feed_eof(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1699,9 +1671,7 @@ async def test_feed_eof(self, stream) -> None: assert buf._eof async def test_feed_eof_err_deflate(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1712,9 +1682,7 @@ async def test_feed_eof_err_deflate(self, stream) -> None: dbuf.feed_eof() async def test_feed_eof_no_err_gzip(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "gzip") dbuf.decompressor = mock.Mock() @@ -1725,9 +1693,7 @@ async def test_feed_eof_no_err_gzip(self, stream) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) async def test_feed_eof_no_err_brotli(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "br") dbuf.decompressor = mock.Mock() @@ -1738,9 +1704,7 @@ async def test_feed_eof_no_err_brotli(self, stream) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) async def test_empty_body(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue( - stream, 2**16, loop=asyncio.get_event_loop() - ) + buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.feed_eof() diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 099922ac77f..4dfa80d7db8 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -192,13 +192,16 @@ async def test_https_proxy_unsupported_tls_in_tls( r"$" ) - with pytest.warns(RuntimeWarning, match=expected_warning_text,), pytest.raises( + with pytest.warns( + RuntimeWarning, + match=expected_warning_text, + ), pytest.raises( ClientConnectionError, match=expected_exception_reason, ) as conn_err: await sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx) - assert type(conn_err.value.__cause__) == TypeError + assert isinstance(conn_err.value.__cause__, TypeError) assert match_regex(f"^{type_err!s}$", str(conn_err.value.__cause__)) await sess.close() From 5ea0e75f916bafe462763747e895b6ffcef8876b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 20:09:07 +0000 Subject: [PATCH 0196/1511] Bump python-on-whales from 0.70.1 to 0.71.0 (#8373) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.70.1 to 0.71.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.71.0</h2> <h2>What's Changed</h2> <ul> <li>Add stream output for pruning by <a href="https://github.com/anesmemisevic"><code>@​anesmemisevic</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/566">gabrieldemarmiesse/python-on-whales#566</a></li> <li>Improve typing for <code>DockerClient.compose.config</code> by <a href="https://github.com/einarwar"><code>@​einarwar</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/572">gabrieldemarmiesse/python-on-whales#572</a></li> <li>Function docker.compose.down does not accept optional list of service names <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/570">#570</a> by <a href="https://github.com/MisterOwlPT"><code>@​MisterOwlPT</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/571">gabrieldemarmiesse/python-on-whales#571</a></li> <li>Fix time argument formatting to include time zone by <a href="https://github.com/kamalmarhubi"><code>@​kamalmarhubi</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/574">gabrieldemarmiesse/python-on-whales#574</a></li> <li>feat: Add <code>--wait-timeout</code> option for <code>docker.compose.up</code> by <a href="https://github.com/Taragolis"><code>@​Taragolis</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/573">gabrieldemarmiesse/python-on-whales#573</a></li> <li>Emit a <code>DeprecationWarning</code> when downloading the docker client by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/577">gabrieldemarmiesse/python-on-whales#577</a></li> <li>Support podman's <code>--preserve-fds</code> arg to container run/exec by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/576">gabrieldemarmiesse/python-on-whales#576</a></li> <li>Added timezone argument to run and create by <a href="https://github.com/tjd78"><code>@​tjd78</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/579">gabrieldemarmiesse/python-on-whales#579</a></li> <li>Docker Buildx Pruning Logs Streaming by <a href="https://github.com/anesmemisevic"><code>@​anesmemisevic</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/578">gabrieldemarmiesse/python-on-whales#578</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/kamalmarhubi"><code>@​kamalmarhubi</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/574">gabrieldemarmiesse/python-on-whales#574</a></li> <li><a href="https://github.com/Taragolis"><code>@​Taragolis</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/573">gabrieldemarmiesse/python-on-whales#573</a></li> <li><a href="https://github.com/tjd78"><code>@​tjd78</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/579">gabrieldemarmiesse/python-on-whales#579</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.1...v0.71.0">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.1...v0.71.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/6ef2cc9ef29d8c388c22b0ba2ca7dfe05785ce03"><code>6ef2cc9</code></a> Bump version to 0.71.0</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/4a74fd8f1cec2aee098872e584b0d6d4b4732c11"><code>4a74fd8</code></a> Docker Buildx Pruning Logs Streaming (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/578">#578</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/de4666793c5adb44f8737c5167120d8dbb51c2de"><code>de46667</code></a> Added timezone argument to run and create (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/579">#579</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/0a9edc57d42b5065bfe7993f8fc8887b03723702"><code>0a9edc5</code></a> :sparkles: Support podman's <code>--preserve-fds</code> arg to container run/exec (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/576">#576</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/4e0c80c8ff208ad5e1470bf1f222a37d1192400a"><code>4e0c80c</code></a> Emit a <code>DeprecationWarning</code> when downloading the docker client (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/577">#577</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/a10959c45d3af1be90b0f6f9c0a1f127955f107b"><code>a10959c</code></a> feat: Add <code>--wait-timeout</code> option for <code>docker.compose.up</code> (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/573">#573</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/ac0e53b93f03821d626a2c605d65fc3db524c9ec"><code>ac0e53b</code></a> Fix time argument formatting to include time zone (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/574">#574</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/32770a14dbf379b8bafdac592629090a841f345a"><code>32770a1</code></a> :sparkles: Make docker.compose.down accept an optional list of service names ...</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/a8714e427283372031026a5752d8c1d05b8b3262"><code>a8714e4</code></a> Update cli_wrapper.py (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/572">#572</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/070bf6ead954a6917d03cbb44897c4a0c0063307"><code>070bf6e</code></a> :sparkles: Add stream output for pruning containers (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/566">#566</a>)</li> <li>See full diff in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.1...v0.71.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.70.1&new-version=0.71.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- requirements/constraints.txt | 3 +-- requirements/dev.txt | 3 +-- requirements/test.txt | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b7a757dfe3e..40c7a47de70 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.1 +python-on-whales==0.71.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel @@ -248,7 +248,6 @@ typer==0.6.1 # via python-on-whales typing-extensions==4.11.0 # via - # -r requirements/typing-extensions.in # aioredis # annotated-types # mypy diff --git a/requirements/dev.txt b/requirements/dev.txt index 0a7f409ad8b..0e14006c45c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.1 +python-on-whales==0.71.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel @@ -234,7 +234,6 @@ typer==0.9.0 # via python-on-whales typing-extensions==4.11.0 # via - # -r requirements/typing-extensions.in # aioredis # annotated-types # mypy diff --git a/requirements/test.txt b/requirements/test.txt index a7d7c235fbf..658e8ee07a2 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.1 +python-on-whales==0.71.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in @@ -113,7 +113,6 @@ typer==0.9.0 # via python-on-whales typing-extensions==4.11.0 # via - # -r requirements/typing-extensions.in # annotated-types # mypy # pydantic From f1eda16015a9b819eaea78f7e8ad09901b521153 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 20:20:05 +0000 Subject: [PATCH 0197/1511] Bump mypy from 1.9.0 to 1.10.0 (#8381) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.9.0 to 1.10.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/mypy/blob/master/CHANGELOG.md">mypy's changelog</a>.</em></p> <blockquote> <h1>Mypy Release Notes</h1> <h2>Next release</h2> <h2>Mypy 1.10</h2> <p>We’ve just uploaded mypy 1.10 to the Python Package Index (<a href="https://pypi.org/project/mypy/">PyPI</a>). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:</p> <pre><code>python3 -m pip install -U mypy </code></pre> <p>You can read the full documentation for this release on <a href="http://mypy.readthedocs.io">Read the Docs</a>.</p> <h4>Support TypeIs (PEP 742)</h4> <p>Mypy now supports <code>TypeIs</code> (<a href="https://peps.python.org/pep-0742/">PEP 742</a>), which allows functions to narrow the type of a value, similar to <code>isinstance()</code>. Unlike <code>TypeGuard</code>, <code>TypeIs</code> can narrow in both the <code>if</code> and <code>else</code> branches of an if statement:</p> <pre lang="python"><code>from typing_extensions import TypeIs <p>def is_str(s: object) -> TypeIs[str]: return isinstance(s, str)</p> <p>def f(o: str | int) -> None: if is_str(o): # Type of o is 'str' ... else: # Type of o is 'int' ... </code></pre></p> <p><code>TypeIs</code> will be added to the <code>typing</code> module in Python 3.13, but it can be used on earlier Python versions by importing it from <code>typing_extensions</code>.</p> <p>This feature was contributed by Jelle Zijlstra (PR <a href="https://redirect.github.com/python/mypy/pull/16898">16898</a>).</p> <h4>Support TypeVar Defaults (PEP 696)</h4> <p><a href="https://peps.python.org/pep-0696/">PEP 696</a> adds support for type parameter defaults. Example:</p> <pre lang="python"><code>from typing import Generic from typing_extensions import TypeVar <p></tr></table> </code></pre></p> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy/commit/3faf0fc4798ec3ee6b1cd123965193dc0a753fb0"><code>3faf0fc</code></a> Remove +dev for version for release 1.10</li> <li><a href="https://github.com/python/mypy/commit/a5998d20402515f0c0bf05c7fe1029e93aa9bfa8"><code>a5998d2</code></a> Update CHANGELOG.md (<a href="https://redirect.github.com/python/mypy/issues/17159">#17159</a>)</li> <li><a href="https://github.com/python/mypy/commit/62ea5b01f0c0c99e7db93326cb8d219eecfb3cb6"><code>62ea5b0</code></a> Various updates to changelog for 1.10 (<a href="https://redirect.github.com/python/mypy/issues/17158">#17158</a>)</li> <li><a href="https://github.com/python/mypy/commit/2f0864c4e55a74700d8ce2d97ab2d3ca2b288513"><code>2f0864c</code></a> Update CHANGELOG.md with draft for release 1.10 (<a href="https://redirect.github.com/python/mypy/issues/17150">#17150</a>)</li> <li><a href="https://github.com/python/mypy/commit/e1443bbade91118794055449cc8b4b4f7fd08b7d"><code>e1443bb</code></a> fix: incorrect returned type of access descriptors on unions of types (<a href="https://redirect.github.com/python/mypy/issues/16604">#16604</a>)</li> <li><a href="https://github.com/python/mypy/commit/5161ac2e5b73dc7597536eb4444219868317e5d9"><code>5161ac2</code></a> Sync typeshed (<a href="https://redirect.github.com/python/mypy/issues/17124">#17124</a>)</li> <li><a href="https://github.com/python/mypy/commit/e2fc1f28935806ca04b18fab277217f583b51594"><code>e2fc1f2</code></a> Fix crash when expanding invalid Unpack in a <code>Callable</code> alias (<a href="https://redirect.github.com/python/mypy/issues/17028">#17028</a>)</li> <li><a href="https://github.com/python/mypy/commit/3ff6e47c57a67e807e0b4579a816b4f66ab16824"><code>3ff6e47</code></a> Docs: docstrings in checker.py, ast_helpers.py (<a href="https://redirect.github.com/python/mypy/issues/16908">#16908</a>)</li> <li><a href="https://github.com/python/mypy/commit/732d98ecb2a98e4eaea14aba1ed8ac9c1f5ccdb6"><code>732d98e</code></a> Fix string formatting for string enums (<a href="https://redirect.github.com/python/mypy/issues/16555">#16555</a>)</li> <li><a href="https://github.com/python/mypy/commit/80190101f68b52e960c22572ed6cc814de078b9c"><code>8019010</code></a> Narrow individual items when matching a tuple to a sequence pattern (<a href="https://redirect.github.com/python/mypy/issues/16905">#16905</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/mypy/compare/1.9.0...v1.10.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.9.0&new-version=1.10.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 3 +-- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 40c7a47de70..8927517fd8f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -114,7 +114,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.9.0 ; implementation_name == "cpython" +mypy==1.10.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 0e14006c45c..48f7b2a1663 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.9.0 ; implementation_name == "cpython" +mypy==1.10.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index c61afb3c20b..cc097f74df0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ identify==2.5.26 # via pre-commit iniconfig==2.0.0 # via pytest -mypy==1.9.0 ; implementation_name == "cpython" +mypy==1.10.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy @@ -49,7 +49,6 @@ tomli==2.0.1 # slotscheck typing-extensions==4.11.0 # via - # -r requirements/typing-extensions.in # aioredis # mypy uvloop==0.19.0 ; platform_system != "Windows" diff --git a/requirements/test.txt b/requirements/test.txt index 658e8ee07a2..a95bb1d08ad 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.9.0 ; implementation_name == "cpython" +mypy==1.10.0 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From fd9b940a01a9fb37ac07c3d360bdb44fc1a23a3b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 20:28:46 +0000 Subject: [PATCH 0198/1511] Bump freezegun from 1.4.0 to 1.5.0 (#8375) Bumps [freezegun](https://github.com/spulec/freezegun) from 1.4.0 to 1.5.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/spulec/freezegun/blob/master/CHANGELOG">freezegun's changelog</a>.</em></p> <blockquote> <h2>1.5.0</h2> <ul> <li>The default ignore list now contains the <code>queue</code>-package</li> <li>Added a missing <code>move_to</code>-function when calling <code>freeze_time(tick=True)</code></li> <li>Fixes a rounding error in <code>time.time_ns()</code></li> <li>Fixed a bug where the default ignore list could not be empty (<code>configure(default_ignore_list=[])</code>)</li> <li>All <code>tick()</code> methods now return the new datetime (instead of None)</li> <li>Type improvements</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/spulec/freezegun/commit/17ea422800fc72a6e507755f1cfbf6ec75ba0359"><code>17ea422</code></a> Admin: Release may not need to update version nr</li> <li><a href="https://github.com/spulec/freezegun/commit/3b3326001384843a1bef604b62efe4e735ac25c3"><code>3b33260</code></a> Admin: Release 1.5.0 changelog</li> <li><a href="https://github.com/spulec/freezegun/commit/45d92937673b77557aa2b2243e7e28c29ba43436"><code>45d9293</code></a> Merge pull request <a href="https://redirect.github.com/spulec/freezegun/issues/538">#538</a> from romuald/fix-time-ns-mock</li> <li><a href="https://github.com/spulec/freezegun/commit/6f14dc3333585250c544f53eb67322f16b0f081a"><code>6f14dc3</code></a> Merge branch 'master' into fix-time-ns-mock</li> <li><a href="https://github.com/spulec/freezegun/commit/5b6c4a20dc3f6512a60a4d644cf9e067738d1aff"><code>5b6c4a2</code></a> Merge pull request <a href="https://redirect.github.com/spulec/freezegun/issues/543">#543</a> from spulec/mypy</li> <li><a href="https://github.com/spulec/freezegun/commit/aabe629cc6e34fd56e047b74eed27cc710457d96"><code>aabe629</code></a> MyPy</li> <li><a href="https://github.com/spulec/freezegun/commit/dfd5d9e166d3149194fbd35daa07e2862c3f918c"><code>dfd5d9e</code></a> Merge pull request <a href="https://redirect.github.com/spulec/freezegun/issues/542">#542</a> from kingbuzzman/patch-1</li> <li><a href="https://github.com/spulec/freezegun/commit/4ecad0ccb728a80797430a6382b1c3e7793bbbcc"><code>4ecad0c</code></a> Merge pull request <a href="https://redirect.github.com/spulec/freezegun/issues/540">#540</a> from encukou/fix-empty-ignore</li> <li><a href="https://github.com/spulec/freezegun/commit/2436841269115d54e3abe412792b531a567ec85d"><code>2436841</code></a> Update test_datetimes.py</li> <li><a href="https://github.com/spulec/freezegun/commit/673adf0e129ffe974fb573a3d8282d6fc5a661da"><code>673adf0</code></a> Update test_datetimes.py</li> <li>Additional commits viewable in <a href="https://github.com/spulec/freezegun/compare/1.4.0...1.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=freezegun&package-manager=pip&previous-version=1.4.0&new-version=1.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8927517fd8f..595b5c56e3f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -72,7 +72,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv -freezegun==1.4.0 +freezegun==1.5.0 # via -r requirements/test.in frozenlist==1.4.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 48f7b2a1663..b5324aac6e0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -68,7 +68,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv -freezegun==1.4.0 +freezegun==1.5.0 # via -r requirements/test.in frozenlist==1.4.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index a95bb1d08ad..83cba28c140 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -38,7 +38,7 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest -freezegun==1.4.0 +freezegun==1.5.0 # via -r requirements/test.in frozenlist==1.4.1 # via From 573c5ad4e24292109de65370ec2f12027149947c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Apr 2024 22:48:55 +0200 Subject: [PATCH 0199/1511] Bump coverage from 7.4.4 to 7.5.0 (#8374) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.4 to 7.5.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.5.0 — 2024-04-23</h2> <ul> <li> <p>Added initial support for function and class reporting in the HTML report. There are now three index pages which link to each other: files, functions, and classes. Other reports don't yet have this information, but it will be added in the future where it makes sense. Feedback gladly accepted! Finishes <code>issue 780</code>_.</p> </li> <li> <p>Other HTML report improvements:</p> <ul> <li> <p>There is now a "hide covered" checkbox to filter out 100% files, finishing <code>issue 1384</code>_.</p> </li> <li> <p>The index page is always sorted by one of its columns, with clearer indications of the sorting.</p> </li> <li> <p>The "previous file" shortcut key didn't work on the index page, but now it does, fixing <code>issue 1765</code>_.</p> </li> </ul> </li> <li> <p>The debug output showing which configuration files were tried now shows absolute paths to help diagnose problems where settings aren't taking effect, and is renamed from "attempted_config_files" to the more logical "config_files_attempted."</p> </li> <li> <p>Python 3.13.0a6 is supported.</p> </li> </ul> <p>.. _issue 780: <a href="https://redirect.github.com/nedbat/coveragepy/issues/780">nedbat/coveragepy#780</a> .. _issue 1384: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1384">nedbat/coveragepy#1384</a> .. _issue 1765: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1765">nedbat/coveragepy#1765</a></p> <p>.. _changes_7-4-4:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/5f4e0348da6b51bcc6171685081062f9565ec1cc"><code>5f4e034</code></a> docs: sample HTML for 7.5.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/ed97cfb375dc9a7ecb6b2f9c5e513f29b9286646"><code>ed97cfb</code></a> docs: prep for 7.5.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/41e01d3ef0f98ec256173fd1864488cc72ab6f73"><code>41e01d3</code></a> build: use macos 13 for 3.8 and 3.9 while GitHub rolls out macos 14</li> <li><a href="https://github.com/nedbat/coveragepy/commit/583f0c0deb0c0f232019521ad574ce24a61d66dc"><code>583f0c0</code></a> test: add a test for skipping covered functions</li> <li><a href="https://github.com/nedbat/coveragepy/commit/b115ed34fcf65bdf444e4d79fc15a9a987aad24d"><code>b115ed3</code></a> refactor: keep Analysis private</li> <li><a href="https://github.com/nedbat/coveragepy/commit/40a052e2fa31496e52347d880657a552ce2d642c"><code>40a052e</code></a> docs: document CodeRegion and its plugin methods</li> <li><a href="https://github.com/nedbat/coveragepy/commit/2ff9933ee74a8d8048968fef9cdec68ba372b4dc"><code>2ff9933</code></a> docs: remove comment that now explains nothing.</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a6ba1c8ea6f780d8d351a237d47aff9d396ccb48"><code>a6ba1c8</code></a> fix: html report pages fully validate</li> <li><a href="https://github.com/nedbat/coveragepy/commit/74c87a821b8db7f0dc131e42756d58f3a7abc784"><code>74c87a8</code></a> fix: previous page shortcut works in index page. <a href="https://redirect.github.com/nedbat/coveragepy/issues/1765">#1765</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/e016967ca656414f8372f3de6cab648a2b0778ae"><code>e016967</code></a> feat: main index page links to other index pages</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.4.4...7.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.4.4&new-version=7.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 595b5c56e3f..1bcf5015e63 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.4.4 +coverage==7.5.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index b5324aac6e0..8ee4676fad7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.4.4 +coverage==7.5.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 83cba28c140..66204fcf08b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.4.4 +coverage==7.5.0 # via # -r requirements/test.in # pytest-cov From 17af04701211874e46b14636893054ca748bb451 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 25 Apr 2024 23:14:13 +0100 Subject: [PATCH 0200/1511] Drop old Mypy ignores (#8365) (#8383) (cherry picked from commit d26f16ce02be6b20bcd1087416878cb878d6fbc4) --- .mypy.ini | 9 ------- aiohttp/abc.py | 2 +- aiohttp/connector.py | 2 +- aiohttp/resolver.py | 9 +++---- examples/fake_server.py | 4 +-- requirements/lint.in | 2 ++ requirements/lint.txt | 48 ++++++++++++++++++++++++++++++++- tests/autobahn/test_autobahn.py | 3 ++- tests/test_resolver.py | 9 +------ 9 files changed, 60 insertions(+), 28 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index 86b5c86f345..f1e6c5361be 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -26,12 +26,6 @@ warn_return_any = True disallow_untyped_calls = False disallow_untyped_defs = False -[mypy-aiodns] -ignore_missing_imports = True - -[mypy-asynctest] -ignore_missing_imports = True - [mypy-brotli] ignore_missing_imports = True @@ -40,6 +34,3 @@ ignore_missing_imports = True [mypy-gunicorn.*] ignore_missing_imports = True - -[mypy-python_on_whales] -ignore_missing_imports = True diff --git a/aiohttp/abc.py b/aiohttp/abc.py index b6c0514a615..d9e7725eab2 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -148,7 +148,7 @@ class AbstractResolver(ABC): @abstractmethod async def resolve( - self, host: str, port: int = 0, family: int = socket.AF_INET + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET ) -> List[ResolveResult]: """Return IP address for given hostname""" diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 6e84aa45bb4..8dac891383c 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -766,7 +766,7 @@ def __init__( fingerprint: Optional[bytes] = None, use_dns_cache: bool = True, ttl_dns_cache: Optional[int] = 10, - family: int = 0, + family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC, ssl_context: Optional[SSLContext] = None, ssl: Union[bool, Fingerprint, SSLContext] = True, local_addr: Optional[Tuple[str, int]] = None, diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index ad502c5e5c8..2ac204a4e32 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -13,7 +13,7 @@ # aiodns_default = hasattr(aiodns.DNSResolver, 'getaddrinfo') except ImportError: # pragma: no cover - aiodns = None + aiodns = None # type: ignore[assignment] aiodns_default = False @@ -33,7 +33,7 @@ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: self._loop = get_running_loop(loop) async def resolve( - self, host: str, port: int = 0, family: int = socket.AF_INET + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET ) -> List[ResolveResult]: infos = await self._loop.getaddrinfo( host, @@ -92,15 +92,14 @@ def __init__( if aiodns is None: raise RuntimeError("Resolver requires aiodns library") - self._loop = get_running_loop(loop) - self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) + self._resolver = aiodns.DNSResolver(*args, **kwargs) if not hasattr(self._resolver, "gethostbyname"): # aiodns 1.1 is not available, fallback to DNSResolver.query self.resolve = self._resolve_with_query # type: ignore async def resolve( - self, host: str, port: int = 0, family: int = socket.AF_INET + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET ) -> List[ResolveResult]: try: resp = await self._resolver.getaddrinfo( diff --git a/examples/fake_server.py b/examples/fake_server.py index 4f796d42386..2cfe3ed710e 100755 --- a/examples/fake_server.py +++ b/examples/fake_server.py @@ -3,7 +3,7 @@ import pathlib import socket import ssl -from typing import List, Union +from typing import List import aiohttp from aiohttp import web @@ -24,7 +24,7 @@ async def resolve( self, host: str, port: int = 0, - family: Union[socket.AddressFamily, int] = socket.AF_INET, + family: socket.AddressFamily = socket.AF_INET, ) -> List[ResolveResult]: fake_port = self._fakes.get(host) if fake_port is not None: diff --git a/requirements/lint.in b/requirements/lint.in index f1f16a99aa9..98910e21f0e 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,6 +1,8 @@ +aiodns aioredis mypy; implementation_name == "cpython" pre-commit pytest +python-on-whales slotscheck uvloop; platform_system != "Windows" diff --git a/requirements/lint.txt b/requirements/lint.txt index cc097f74df0..8867d523df9 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,14 +4,26 @@ # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in # +aiodns==3.2.0 + # via -r requirements/lint.in aioredis==2.0.1 # via -r requirements/lint.in +annotated-types==0.6.0 + # via pydantic async-timeout==4.0.3 # via aioredis +certifi==2024.2.2 + # via requests +cffi==1.16.0 + # via pycares cfgv==3.3.1 # via pre-commit +charset-normalizer==3.3.2 + # via requests click==8.1.6 - # via slotscheck + # via + # slotscheck + # typer distlib==0.3.7 # via virtualenv exceptiongroup==1.1.2 @@ -20,8 +32,14 @@ filelock==3.12.2 # via virtualenv identify==2.5.26 # via pre-commit +idna==3.7 + # via requests iniconfig==2.0.0 # via pytest +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py mypy==1.10.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 @@ -36,10 +54,28 @@ pluggy==1.4.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in +pycares==4.4.0 + # via aiodns +pycparser==2.22 + # via cffi +pydantic==2.7.1 + # via python-on-whales +pydantic-core==2.18.2 + # via pydantic +pygments==2.17.2 + # via rich pytest==8.1.1 # via -r requirements/lint.in +python-on-whales==0.70.1 + # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit +requests==2.31.0 + # via python-on-whales +rich==13.7.1 + # via typer +shellingham==1.5.4 + # via typer slotscheck==0.19.0 # via -r requirements/lint.in tomli==2.0.1 @@ -47,10 +83,20 @@ tomli==2.0.1 # mypy # pytest # slotscheck +tqdm==4.66.2 + # via python-on-whales +typer==0.12.3 + # via python-on-whales typing-extensions==4.11.0 # via # aioredis # mypy + # pydantic + # pydantic-core + # python-on-whales + # typer +urllib3==2.2.1 + # via requests uvloop==0.19.0 ; platform_system != "Windows" # via -r requirements/lint.in virtualenv==20.24.2 diff --git a/tests/autobahn/test_autobahn.py b/tests/autobahn/test_autobahn.py index f30f6afd693..651183d5f92 100644 --- a/tests/autobahn/test_autobahn.py +++ b/tests/autobahn/test_autobahn.py @@ -73,7 +73,8 @@ def test_client(report_dir: Path, request: Any) -> None: print("Stopping client and server") client.terminate() client.wait() - autobahn_container.stop() + # https://github.com/gabrieldemarmiesse/python-on-whales/pull/580 + autobahn_container.stop() # type: ignore[union-attr] failed_messages = get_failed_tests(f"{report_dir}/clients", "aiohttp") diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 2650ccadd6e..fe1902180dd 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -20,7 +20,7 @@ getaddrinfo: Any = hasattr(aiodns.DNSResolver, "getaddrinfo") except ImportError: - aiodns = None + aiodns = None # type: ignore[assignment] getaddrinfo = False @@ -295,13 +295,6 @@ async def test_default_loop_for_threaded_resolver(loop) -> None: assert resolver._loop is loop -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_default_loop_for_async_resolver(loop) -> None: - asyncio.set_event_loop(loop) - resolver = AsyncResolver() - assert resolver._loop is loop - - @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") async def test_async_resolver_ipv6_positive_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: From d81c6f0d555b2f817654e91bb0a0b9cb7e8b1dc6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 11:15:37 +0000 Subject: [PATCH 0201/1511] Bump python-on-whales from 0.70.1 to 0.71.0 (#8386) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.70.1 to 0.71.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.71.0</h2> <h2>What's Changed</h2> <ul> <li>Add stream output for pruning by <a href="https://github.com/anesmemisevic"><code>@​anesmemisevic</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/566">gabrieldemarmiesse/python-on-whales#566</a></li> <li>Improve typing for <code>DockerClient.compose.config</code> by <a href="https://github.com/einarwar"><code>@​einarwar</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/572">gabrieldemarmiesse/python-on-whales#572</a></li> <li>Function docker.compose.down does not accept optional list of service names <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/570">#570</a> by <a href="https://github.com/MisterOwlPT"><code>@​MisterOwlPT</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/571">gabrieldemarmiesse/python-on-whales#571</a></li> <li>Fix time argument formatting to include time zone by <a href="https://github.com/kamalmarhubi"><code>@​kamalmarhubi</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/574">gabrieldemarmiesse/python-on-whales#574</a></li> <li>feat: Add <code>--wait-timeout</code> option for <code>docker.compose.up</code> by <a href="https://github.com/Taragolis"><code>@​Taragolis</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/573">gabrieldemarmiesse/python-on-whales#573</a></li> <li>Emit a <code>DeprecationWarning</code> when downloading the docker client by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/577">gabrieldemarmiesse/python-on-whales#577</a></li> <li>Support podman's <code>--preserve-fds</code> arg to container run/exec by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/576">gabrieldemarmiesse/python-on-whales#576</a></li> <li>Added timezone argument to run and create by <a href="https://github.com/tjd78"><code>@​tjd78</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/579">gabrieldemarmiesse/python-on-whales#579</a></li> <li>Docker Buildx Pruning Logs Streaming by <a href="https://github.com/anesmemisevic"><code>@​anesmemisevic</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/578">gabrieldemarmiesse/python-on-whales#578</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/kamalmarhubi"><code>@​kamalmarhubi</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/574">gabrieldemarmiesse/python-on-whales#574</a></li> <li><a href="https://github.com/Taragolis"><code>@​Taragolis</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/573">gabrieldemarmiesse/python-on-whales#573</a></li> <li><a href="https://github.com/tjd78"><code>@​tjd78</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/579">gabrieldemarmiesse/python-on-whales#579</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.1...v0.71.0">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.1...v0.71.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/6ef2cc9ef29d8c388c22b0ba2ca7dfe05785ce03"><code>6ef2cc9</code></a> Bump version to 0.71.0</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/4a74fd8f1cec2aee098872e584b0d6d4b4732c11"><code>4a74fd8</code></a> Docker Buildx Pruning Logs Streaming (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/578">#578</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/de4666793c5adb44f8737c5167120d8dbb51c2de"><code>de46667</code></a> Added timezone argument to run and create (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/579">#579</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/0a9edc57d42b5065bfe7993f8fc8887b03723702"><code>0a9edc5</code></a> :sparkles: Support podman's <code>--preserve-fds</code> arg to container run/exec (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/576">#576</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/4e0c80c8ff208ad5e1470bf1f222a37d1192400a"><code>4e0c80c</code></a> Emit a <code>DeprecationWarning</code> when downloading the docker client (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/577">#577</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/a10959c45d3af1be90b0f6f9c0a1f127955f107b"><code>a10959c</code></a> feat: Add <code>--wait-timeout</code> option for <code>docker.compose.up</code> (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/573">#573</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/ac0e53b93f03821d626a2c605d65fc3db524c9ec"><code>ac0e53b</code></a> Fix time argument formatting to include time zone (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/574">#574</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/32770a14dbf379b8bafdac592629090a841f345a"><code>32770a1</code></a> :sparkles: Make docker.compose.down accept an optional list of service names ...</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/a8714e427283372031026a5752d8c1d05b8b3262"><code>a8714e4</code></a> Update cli_wrapper.py (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/572">#572</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/070bf6ead954a6917d03cbb44897c4a0c0063307"><code>070bf6e</code></a> :sparkles: Add stream output for pruning containers (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/566">#566</a>)</li> <li>See full diff in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.1...v0.71.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.70.1&new-version=0.71.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 8 ++++++-- requirements/dev.txt | 8 ++++++-- requirements/lint.txt | 4 +++- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1bcf5015e63..e983fddf40b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -5,7 +5,9 @@ # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" - # via -r requirements/runtime-deps.in + # via + # -r requirements/lint.in + # -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 @@ -177,7 +179,9 @@ pytest-mock==3.14.0 python-dateutil==2.8.2 # via freezegun python-on-whales==0.71.0 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in pytz==2023.3.post1 # via babel pyyaml==6.0.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 8ee4676fad7..ff778dcd131 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -5,7 +5,9 @@ # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" - # via -r requirements/runtime-deps.in + # via + # -r requirements/lint.in + # -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 @@ -168,7 +170,9 @@ pytest-mock==3.14.0 python-dateutil==2.8.2 # via freezegun python-on-whales==0.71.0 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in pytz==2023.3.post1 # via babel pyyaml==6.0.1 diff --git a/requirements/lint.txt b/requirements/lint.txt index 8867d523df9..0582769bc21 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -66,7 +66,7 @@ pygments==2.17.2 # via rich pytest==8.1.1 # via -r requirements/lint.in -python-on-whales==0.70.1 +python-on-whales==0.71.0 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit @@ -90,10 +90,12 @@ typer==0.12.3 typing-extensions==4.11.0 # via # aioredis + # annotated-types # mypy # pydantic # pydantic-core # python-on-whales + # rich # typer urllib3==2.2.1 # via requests From a43045a7fa49b47c3ef5ab019f18995e5c2e394b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 11:27:57 +0000 Subject: [PATCH 0202/1511] Bump pytest from 8.1.1 to 8.2.0 (#8389) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.1.1 to 8.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.2.0</h2> <h1>pytest 8.2.0 (2024-04-27)</h1> <h2>Deprecations</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12069">#12069</a>: A deprecation warning is now raised when implementations of one of the following hooks request a deprecated <code>py.path.local</code> parameter instead of the <code>pathlib.Path</code> parameter which replaced it:</p> <ul> <li><code>pytest_ignore_collect</code>{.interpreted-text role="hook"} - the <code>path</code> parameter - use <code>collection_path</code> instead.</li> <li><code>pytest_collect_file</code>{.interpreted-text role="hook"} - the <code>path</code> parameter - use <code>file_path</code> instead.</li> <li><code>pytest_pycollect_makemodule</code>{.interpreted-text role="hook"} - the <code>path</code> parameter - use <code>module_path</code> instead.</li> <li><code>pytest_report_header</code>{.interpreted-text role="hook"} - the <code>startdir</code> parameter - use <code>start_path</code> instead.</li> <li><code>pytest_report_collectionfinish</code>{.interpreted-text role="hook"} - the <code>startdir</code> parameter - use <code>start_path</code> instead.</li> </ul> <p>The replacement parameters are available since pytest 7.0.0. The old parameters will be removed in pytest 9.0.0.</p> <p>See <code>legacy-path-hooks-deprecated</code>{.interpreted-text role="ref"} for more details.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11871">#11871</a>: Added support for reading command line arguments from a file using the prefix character <code>@</code>, like e.g.: <code>pytest @tests.txt</code>. The file must have one argument per line.</p> <p>See <code>Read arguments from file <args-from-file></code>{.interpreted-text role="ref"} for details.</p> </li> </ul> <h2>Improvements</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11523">#11523</a>: <code>pytest.importorskip</code>{.interpreted-text role="func"} will now issue a warning if the module could be found, but raised <code>ImportError</code>{.interpreted-text role="class"} instead of <code>ModuleNotFoundError</code>{.interpreted-text role="class"}.</p> <p>The warning can be suppressed by passing <code>exc_type=ImportError</code> to <code>pytest.importorskip</code>{.interpreted-text role="func"}.</p> <p>See <code>import-or-skip-import-error</code>{.interpreted-text role="ref"} for details.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11728">#11728</a>: For <code>unittest</code>-based tests, exceptions during class cleanup (as raised by functions registered with <code>TestCase.addClassCleanup <unittest.TestCase.addClassCleanup></code>{.interpreted-text role="meth"}) are now reported instead of silently failing.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/11777">#11777</a>: Text is no longer truncated in the <code>short test summary info</code> section when <code>-vv</code> is given.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12112">#12112</a>: Improved namespace packages detection when <code>consider_namespace_packages</code>{.interpreted-text role="confval"} is enabled, covering more situations (like editable installs).</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/9502">#9502</a>: Added <code>PYTEST_VERSION</code>{.interpreted-text role="envvar"} environment variable which is defined at the start of the pytest session and undefined afterwards. It contains the value of <code>pytest.__version__</code>, and among other things can be used to easily check if code is running from within a pytest run.</p> </li> </ul> <h2>Bug Fixes</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12065">#12065</a>: Fixed a regression in pytest 8.0.0 where test classes containing <code>setup_method</code> and tests using <code>@staticmethod</code> or <code>@classmethod</code> would crash with <code>AttributeError: 'NoneType' object has no attribute 'setup_method'</code>.</p> <p>Now the <code>request.instance <pytest.FixtureRequest.instance></code>{.interpreted-text role="attr"} attribute of tests using <code>@staticmethod</code> and <code>@classmethod</code> is no longer <code>None</code>, but a fresh instance of the class, like in non-static methods.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/6bd3f313447290380cbc2db30fb9ee5cca7eb941"><code>6bd3f31</code></a> Tweak changelog for 8.2.0</li> <li><a href="https://github.com/pytest-dev/pytest/commit/9b6219b5e89af237e5bc80354d405d2b5c2fc8a0"><code>9b6219b</code></a> Prepare release version 8.2.0</li> <li><a href="https://github.com/pytest-dev/pytest/commit/835765c9d31e0a86c6028f983b28d32c82a759c4"><code>835765c</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12130">#12130</a> from bluetech/fixtures-inline</li> <li><a href="https://github.com/pytest-dev/pytest/commit/7e7503c0b015f61d9d21d3b5f55990b7fcd683f7"><code>7e7503c</code></a> unittest: report class cleanup exceptions (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12250">#12250</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/882c4da2f37702b00bdbd3b6c74e9821d33e0204"><code>882c4da</code></a> fixtures: inline <code>fail_fixturefunc</code></li> <li><a href="https://github.com/pytest-dev/pytest/commit/2e8fb9f1401d727e20f004326752fd1922f9c601"><code>2e8fb9f</code></a> fixtures: extract a <code>_check_fixturedef</code> method</li> <li><a href="https://github.com/pytest-dev/pytest/commit/acf2971f46a9518b3552d48ea9541a1951c2b207"><code>acf2971</code></a> fixtures: inline <code>_getnextfixturedef</code> into <code>_get_active_fixturedef</code></li> <li><a href="https://github.com/pytest-dev/pytest/commit/3c77aec1dac0894ec4ca774b71ec91c85cf91dd1"><code>3c77aec</code></a> fixtures: move "request" check early</li> <li><a href="https://github.com/pytest-dev/pytest/commit/d217d68cde0c34d619862f15c773ecc02ecdaabe"><code>d217d68</code></a> fixtures: inline <code>_compute_fixture_value</code></li> <li><a href="https://github.com/pytest-dev/pytest/commit/530be285751143febe54b8974b234eed5eb8b079"><code>530be28</code></a> fixtures: use early return in <code>_get_active_fixturedef</code></li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.1.1...8.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.1.1&new-version=8.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e983fddf40b..d2c04cf6252 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -138,7 +138,7 @@ pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv -pluggy==1.4.0 +pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in @@ -166,7 +166,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.1.1 +pytest==8.2.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index ff778dcd131..fc451635c50 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -133,7 +133,7 @@ pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv -pluggy==1.4.0 +pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in @@ -157,7 +157,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.1.1 +pytest==8.2.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 0582769bc21..17f71a6b917 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -50,7 +50,7 @@ packaging==23.1 # via pytest platformdirs==3.10.0 # via virtualenv -pluggy==1.4.0 +pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in @@ -64,7 +64,7 @@ pydantic-core==2.18.2 # via pydantic pygments==2.17.2 # via rich -pytest==8.1.1 +pytest==8.2.0 # via -r requirements/lint.in python-on-whales==0.71.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 66204fcf08b..c7e4d6b26f5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -65,7 +65,7 @@ packaging==23.1 # via # gunicorn # pytest -pluggy==1.4.0 +pluggy==1.5.0 # via pytest proxy-py==2.4.4rc5 # via -r requirements/test.in @@ -77,7 +77,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==8.1.1 +pytest==8.2.0 # via # -r requirements/test.in # pytest-cov From c215e56dc175e9da545e73b00531f743833e0de8 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 29 Apr 2024 23:19:37 +0100 Subject: [PATCH 0203/1511] Fix flaky tests with unclosed warnings (#8391) (#8392) (cherry picked from commit 0ba6cf26fa99c056e9b98a1f972a91b416850006) --- tests/test_proxy_functional.py | 88 ++++++++++++---------------------- 1 file changed, 30 insertions(+), 58 deletions(-) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 4dfa80d7db8..c15ca326288 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -16,18 +16,6 @@ from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS -pytestmark = [ - pytest.mark.filterwarnings( - "ignore:unclosed <socket.socket fd=.*:ResourceWarning", - ), - pytest.mark.filterwarnings( - "ignore:" - "unclosed transport <_SelectorSocketTransport closing fd=.*" - ":ResourceWarning", - ), -] - - ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11) @@ -121,16 +109,14 @@ async def test_secure_https_proxy_absolute_path( conn = aiohttp.TCPConnector() sess = aiohttp.ClientSession(connector=conn) - response = await sess.get( + async with sess.get( web_server_endpoint_url, proxy=secure_proxy_url, ssl=client_ssl_ctx, # used for both proxy and endpoint connections - ) - - assert response.status == 200 - assert await response.text() == web_server_endpoint_payload + ) as response: + assert response.status == 200 + assert await response.text() == web_server_endpoint_payload - response.close() await sess.close() await conn.close() @@ -199,7 +185,8 @@ async def test_https_proxy_unsupported_tls_in_tls( ClientConnectionError, match=expected_exception_reason, ) as conn_err: - await sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx) + async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx): + pass assert isinstance(conn_err.value.__cause__, TypeError) assert match_regex(f"^{type_err!s}$", str(conn_err.value.__cause__)) @@ -259,13 +246,11 @@ async def proxy_server(): def get_request(loop): async def _request(method="GET", *, url, trust_env=False, **kwargs): connector = aiohttp.TCPConnector(ssl=False, loop=loop) - client = aiohttp.ClientSession(connector=connector, trust_env=trust_env) - try: - resp = await client.request(method, url, **kwargs) - await resp.release() - return resp - finally: - await client.close() + async with aiohttp.ClientSession( + connector=connector, trust_env=trust_env + ) as client: + async with client.request(method, url, **kwargs) as resp: + return resp return _request @@ -405,11 +390,8 @@ async def test_proxy_http_acquired_cleanup_force(proxy_test_server, loop) -> Non assert 0 == len(conn._acquired) async def request(): - resp = await sess.get(url, proxy=proxy.url) - - assert 1 == len(conn._acquired) - - await resp.release() + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) await request() @@ -433,13 +415,11 @@ async def request(pid): # process requests only one by one nonlocal current_pid - resp = await sess.get(url, proxy=proxy.url) - - current_pid = pid - await asyncio.sleep(0.2, loop=loop) - assert current_pid == pid + async with sess.get(url, proxy=proxy.url) as resp: + current_pid = pid + await asyncio.sleep(0.2, loop=loop) + assert current_pid == pid - await resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] @@ -490,9 +470,8 @@ async def xtest_proxy_https_send_body(proxy_test_server, loop): proxy.return_value = {"status": 200, "body": b"1" * (2**20)} url = "https://www.google.com.ua/search?q=aiohttp proxy" - resp = await sess.get(url, proxy=proxy.url) - body = await resp.read() - await resp.release() + async with sess.get(url, proxy=proxy.url) as resp: + body = await resp.read() await sess.close() assert body == b"1" * (2**20) @@ -586,11 +565,8 @@ async def xtest_proxy_https_acquired_cleanup(proxy_test_server, loop): assert 0 == len(conn._acquired) async def request(): - resp = await sess.get(url, proxy=proxy.url) - - assert 1 == len(conn._acquired) - - await resp.release() + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) await request() @@ -610,11 +586,8 @@ async def xtest_proxy_https_acquired_cleanup_force(proxy_test_server, loop): assert 0 == len(conn._acquired) async def request(): - resp = await sess.get(url, proxy=proxy.url) - - assert 1 == len(conn._acquired) - - await resp.release() + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) await request() @@ -638,13 +611,11 @@ async def request(pid): # process requests only one by one nonlocal current_pid - resp = await sess.get(url, proxy=proxy.url) - - current_pid = pid - await asyncio.sleep(0.2, loop=loop) - assert current_pid == pid + async with sess.get(url, proxy=proxy.url) as resp: + current_pid = pid + await asyncio.sleep(0.2, loop=loop) + assert current_pid == pid - await resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] @@ -850,8 +821,9 @@ async def test_proxy_auth() -> None: with pytest.raises( ValueError, match=r"proxy_auth must be None or BasicAuth\(\) tuple" ): - await session.get( + async with session.get( "http://python.org", proxy="http://proxy.example.com", proxy_auth=("user", "pass"), - ) + ): + pass From e83927932bf534914878867c965638a8ffc47101 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 8 May 2024 19:22:02 +0100 Subject: [PATCH 0204/1511] [PR #8403/06033c23 backport][3.10] Update docs for ssl param in client (#8404) **This is a backport of PR #8403 as merged into master (06033c231c3ff28bd1a2046fe23029f4cacf1e44).** Co-authored-by: Aitor Gamarra <60578201+aitor-gamarra@users.noreply.github.com> --- CHANGES/8403.doc.rst | 1 + docs/client_reference.rst | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 CHANGES/8403.doc.rst diff --git a/CHANGES/8403.doc.rst b/CHANGES/8403.doc.rst new file mode 100644 index 00000000000..71618c3c99c --- /dev/null +++ b/CHANGES/8403.doc.rst @@ -0,0 +1 @@ +Improve the docs for the `ssl` params. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d25f381e03c..ef31003c201 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -373,7 +373,7 @@ The client session supports the context manager protocol for self closing. read_until_eof=True, \ read_bufsize=None, \ proxy=None, proxy_auth=None,\ - timeout=sentinel, ssl=None, \ + timeout=sentinel, ssl=True, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None, \ server_hostname=None, auto_decompress=None) @@ -491,7 +491,7 @@ The client session supports the context manager protocol for self closing. If :class:`float` is passed it is a *total* timeout (in seconds). - :param ssl: SSL validation mode. ``None`` for default SSL check + :param ssl: SSL validation mode. ``True`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint @@ -696,7 +696,7 @@ The client session supports the context manager protocol for self closing. origin=None, \ params=None, \ headers=None, \ - proxy=None, proxy_auth=None, ssl=None, \ + proxy=None, proxy_auth=None, ssl=True, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None, \ compress=0, max_msg_size=4194304) @@ -760,7 +760,7 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP Basic Authorization (optional) - :param ssl: SSL validation mode. ``None`` for default SSL check + :param ssl: SSL validation mode. ``True`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint @@ -1066,7 +1066,7 @@ is controlled by *force_close* constructor's parameter). overridden in subclasses. -.. class:: TCPConnector(*, ssl=None, verify_ssl=True, fingerprint=None, \ +.. class:: TCPConnector(*, ssl=True, verify_ssl=True, fingerprint=None, \ use_dns_cache=True, ttl_dns_cache=10, \ family=0, ssl_context=None, local_addr=None, \ resolver=None, keepalive_timeout=sentinel, \ @@ -1084,7 +1084,7 @@ is controlled by *force_close* constructor's parameter). Constructor accepts all parameters suitable for :class:`BaseConnector` plus several TCP-specific ones: - :param ssl: SSL validation mode. ``None`` for default SSL check + :param ssl: SSL validation mode. ``True`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint From c8d420bbf9b22bba71795fad9d8e836e587ac379 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 May 2024 11:13:46 +0000 Subject: [PATCH 0205/1511] Bump proxy-py from 2.4.4rc5 to 2.4.4 (#8407) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.4rc5 to 2.4.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/abhinavsingh/proxy.py/releases">proxy-py's releases</a>.</em></p> <blockquote> <h2>v2.4.4</h2> <h2>What's Changed</h2> <ul> <li>Add <code>timeout=1</code> when joining acceptors by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1196">abhinavsingh/proxy.py#1196</a></li> <li>Use only a single acceptor in tests to avoid Windows pitfalls by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1199">abhinavsingh/proxy.py#1199</a></li> <li>Avoid installing <code>!=7.0.0,!=7.0.1,!=7.0.2</code> which leads to 0.0 as wheel version by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1209">abhinavsingh/proxy.py#1209</a></li> <li>Silence brew during workflows by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1210">abhinavsingh/proxy.py#1210</a></li> <li>v2.4.4rc1 by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1207">abhinavsingh/proxy.py#1207</a></li> <li>Reverse proxy plugin constructor now accepts client connection object by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1235">abhinavsingh/proxy.py#1235</a></li> <li>Pyre type error fixed. by <a href="https://github.com/luca-digrazia"><code>@​luca-digrazia</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1250">abhinavsingh/proxy.py#1250</a></li> <li>Document <code>--cache-by-content-type</code> flag usage by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1251">abhinavsingh/proxy.py#1251</a></li> <li>Add <code>before_routing</code> for Reverse Proxy plugins by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1252">abhinavsingh/proxy.py#1252</a></li> <li>Ability to customize path to openssl using <code>--openssl</code> flag by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1257">abhinavsingh/proxy.py#1257</a></li> <li>Honor ca-file for reverse proxy upstream connections by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1283">abhinavsingh/proxy.py#1283</a></li> <li>Revert "pip prod(deps): bump autopep8 from 1.6.0 to 1.7.0" by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1285">abhinavsingh/proxy.py#1285</a></li> <li>Fix broken build by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1301">abhinavsingh/proxy.py#1301</a></li> <li>Log raw bytes for invalid request line by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1313">abhinavsingh/proxy.py#1313</a></li> <li>Add timeout to requests by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1321">abhinavsingh/proxy.py#1321</a></li> <li>Support plugins defined as inner classes by <a href="https://github.com/alexey-pelykh"><code>@​alexey-pelykh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1318">abhinavsingh/proxy.py#1318</a></li> <li>Remove <code>codecov</code> from dependency files by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1328">abhinavsingh/proxy.py#1328</a></li> <li>Support --hostnames by <a href="https://github.com/alexey-pelykh"><code>@​alexey-pelykh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1325">abhinavsingh/proxy.py#1325</a></li> <li>Update project test dependencies to ensure green workflow by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1371">abhinavsingh/proxy.py#1371</a></li> <li>Migrate away from setuptools_scm_git_archive by <a href="https://github.com/tjni"><code>@​tjni</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1345">abhinavsingh/proxy.py#1345</a></li> <li>fix: Bypass proxy authentication with HTTP/1.0 requests <a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1267">#1267</a> by <a href="https://github.com/dongfangtianyu"><code>@​dongfangtianyu</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1342">abhinavsingh/proxy.py#1342</a></li> <li>Python 3.11 support by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1384">abhinavsingh/proxy.py#1384</a></li> <li>Wait until buffer flush by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1385">abhinavsingh/proxy.py#1385</a></li> <li>Update benchmark results by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1386">abhinavsingh/proxy.py#1386</a></li> <li>Ability to override <code>--data-dir</code> for scenarios when <code>proxy.py</code> is running as a user with no home directory by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1389">abhinavsingh/proxy.py#1389</a></li> <li>Add <code>proxy.http.client</code> utility and base SSH classes by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1395">abhinavsingh/proxy.py#1395</a></li> <li>Catch <code>KeyError</code> within Threadless executors by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1396">abhinavsingh/proxy.py#1396</a></li> <li>Reverse proxy ability to return Url, memoryview or TcpServerConnection object by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1397">abhinavsingh/proxy.py#1397</a></li> <li>SSH handler/listener plugins by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1398">abhinavsingh/proxy.py#1398</a></li> <li>Fix support for multiple ephemeral ports by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1399">abhinavsingh/proxy.py#1399</a></li> <li>Fix ssh tunnel use case by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1400">abhinavsingh/proxy.py#1400</a></li> <li><code>compress</code> option for <code>serve_static_file</code> utility method by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1405">abhinavsingh/proxy.py#1405</a></li> <li>Teardown on unhandled exceptions by work by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1406">abhinavsingh/proxy.py#1406</a></li> <li>Grout: ngrok Alternative by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1407">abhinavsingh/proxy.py#1407</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/luca-digrazia"><code>@​luca-digrazia</code></a> made their first contribution in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1250">abhinavsingh/proxy.py#1250</a></li> <li><a href="https://github.com/alexey-pelykh"><code>@​alexey-pelykh</code></a> made their first contribution in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1318">abhinavsingh/proxy.py#1318</a></li> <li><a href="https://github.com/tjni"><code>@​tjni</code></a> made their first contribution in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1345">abhinavsingh/proxy.py#1345</a></li> <li><a href="https://github.com/dongfangtianyu"><code>@​dongfangtianyu</code></a> made their first contribution in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1342">abhinavsingh/proxy.py#1342</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.3...v2.4.4">https://github.com/abhinavsingh/proxy.py/compare/v2.4.3...v2.4.4</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/367205826df500bf59e1592690f3b0a976a3fe6b"><code>3672058</code></a> Grout: ngrok Alternative (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1407">#1407</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/e713752e49f8bc5fe9a3777a54f2f3e74161d773"><code>e713752</code></a> Teardown on unhandled exceptions by work (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1406">#1406</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/fce40f03a3a85aef83b4638e3ecd624711504501"><code>fce40f0</code></a> <code>compress</code> option for <code>serve_static_file</code> utility method (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1405">#1405</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/8b929f079e5bd888f0c99e1065424e05a9ef06dd"><code>8b929f0</code></a> Fix ssh tunnel use case (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1400">#1400</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/5b0c484e6ad502c47648d0b8f763f612799eb929"><code>5b0c484</code></a> Fix support for multiple ephemeral ports (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1399">#1399</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/81aa82b9a4ed7f04a139679dba3ab77c7a040218"><code>81aa82b</code></a> SSH handler/listener plugins (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1398">#1398</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/67706ac1ef600ec4070b3494fc84e181cfd52af6"><code>67706ac</code></a> Reverse proxy ability to return Url, memoryview or TcpServerConnection object...</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/380e0cc3ce58f8e0b65f94908e707426fed9f54b"><code>380e0cc</code></a> Catch <code>KeyError</code> within Threadless executors (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1396">#1396</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/78248474bca86b3691df658694ee4d383466ff26"><code>7824847</code></a> Add <code>proxy.http.client</code> utility and base SSH classes (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1395">#1395</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/c24862ba85c9fec926ae4e6e72ab66ff53c1180a"><code>c24862b</code></a> Ability to override <code>--data-dir</code> for scenarios when <code>proxy.py</code> is running as ...</li> <li>Additional commits viewable in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.4rc5...v2.4.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.4rc5&new-version=2.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d2c04cf6252..8e9bc813524 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -142,7 +142,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4rc5 +proxy-py==2.4.4 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index fc451635c50..6583c2e4a6f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4rc5 +proxy-py==2.4.4 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index c7e4d6b26f5..3974bd427cf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ packaging==23.1 # pytest pluggy==1.5.0 # via pytest -proxy-py==2.4.4rc5 +proxy-py==2.4.4 # via -r requirements/test.in pycares==4.3.0 # via aiodns From 70e8a9f26c878f33d4ad19e4f54bc97c4f0ab501 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 May 2024 17:20:21 +0000 Subject: [PATCH 0206/1511] Bump coverage from 7.5.0 to 7.5.1 (#8400) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.5.0 to 7.5.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.5.1 — 2024-05-04</h2> <ul> <li> <p>Fix: a pragma comment on the continuation lines of a multi-line statement now excludes the statement and its body, the same as if the pragma is on the first line. This closes <code>issue 754</code><em>. The fix was contributed by <code>Daniel Diniz <pull 1773_></code></em>.</p> </li> <li> <p>Fix: very complex source files like <code>this one <resolvent_lookup_></code>_ could cause a maximum recursion error when creating an HTML report. This is now fixed, closing <code>issue 1774</code>_.</p> </li> <li> <p>HTML report improvements:</p> <ul> <li> <p>Support files (JavaScript and CSS) referenced by the HTML report now have hashes added to their names to ensure updated files are used instead of stale cached copies.</p> </li> <li> <p>Missing branch coverage explanations that said "the condition was never false" now read "the condition was always true" because it's easier to understand.</p> </li> <li> <p>Column sort order is remembered better as you move between the index pages, fixing <code>issue 1766</code><em>. Thanks, <code>Daniel Diniz <pull 1768_></code></em>.</p> </li> </ul> </li> </ul> <p>.. _resolvent_lookup: <a href="https://github.com/sympy/sympy/blob/130950f3e6b3f97fcc17f4599ac08f70fdd2e9d4/sympy/polys/numberfields/resolvent_lookup.py">https://github.com/sympy/sympy/blob/130950f3e6b3f97fcc17f4599ac08f70fdd2e9d4/sympy/polys/numberfields/resolvent_lookup.py</a> .. _issue 754: <a href="https://redirect.github.com/nedbat/coveragepy/issues/754">nedbat/coveragepy#754</a> .. _issue 1766: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1766">nedbat/coveragepy#1766</a> .. _pull 1768: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1768">nedbat/coveragepy#1768</a> .. _pull 1773: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1773">nedbat/coveragepy#1773</a> .. _issue 1774: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1774">nedbat/coveragepy#1774</a></p> <p>.. _changes_7-5-0:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/be938eaa195a52dd89f3a13aa68bb80de3425b11"><code>be938ea</code></a> docs: sample HTML for 7.5.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/02c66d76912259c8a03282d153007a569f05f495"><code>02c66d7</code></a> docs: prep for 7.5.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/5fa9f67853a7112e08185ed416de7907b8e524da"><code>5fa9f67</code></a> fix: avoid max recursion errors in ast code. <a href="https://redirect.github.com/nedbat/coveragepy/issues/1774">#1774</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/34af01dfc877e7f35b49f5ec402c39437ae7c1f1"><code>34af01d</code></a> build: easier to run metasmoke on desired python version</li> <li><a href="https://github.com/nedbat/coveragepy/commit/6b0cac5843d0cbfb68391f36397e6759e13e297d"><code>6b0cac5</code></a> perf: cache _human_key to speed html report by about 10%</li> <li><a href="https://github.com/nedbat/coveragepy/commit/fdc0ee896825334bfa13735d94b2da78da72f76b"><code>fdc0ee8</code></a> docs: oops, typo</li> <li><a href="https://github.com/nedbat/coveragepy/commit/60e6cb4267c1f25690e37198e1e55130ae94b4e1"><code>60e6cb4</code></a> docs: changelog for <a href="https://redirect.github.com/nedbat/coveragepy/issues/754">#754</a> and <a href="https://redirect.github.com/nedbat/coveragepy/issues/1773">#1773</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/277c8c43c9ee59c941ec8fd7da8ea2a49049d1e0"><code>277c8c4</code></a> fix: '# pragma: no branch' in multiline if statements. <a href="https://redirect.github.com/nedbat/coveragepy/issues/754">#754</a> (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1773">#1773</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/34d3eb76b7833268019ac25e5265c2c1b192abcb"><code>34d3eb7</code></a> docs: update changelog for <a href="https://redirect.github.com/nedbat/coveragepy/issues/1786">#1786</a>. Thanks, Daniel Diniz</li> <li><a href="https://github.com/nedbat/coveragepy/commit/2bb5ef22787185fd90a525e8e26bbe360a3492f1"><code>2bb5ef2</code></a> fix(html): make HTML column sorting consistent across index pages (fix <a href="https://redirect.github.com/nedbat/coveragepy/issues/1766">#1766</a>)...</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.5.0...7.5.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.5.0&new-version=7.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8e9bc813524..f659e99701e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.5.0 +coverage==7.5.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 6583c2e4a6f..76b9ef91f5e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.5.0 +coverage==7.5.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 3974bd427cf..58c62233bcf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.5.0 +coverage==7.5.1 # via # -r requirements/test.in # pytest-cov From 64f02b17493b935acc70f5bf3387269bb1f8db8c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 10 May 2024 23:59:47 +0100 Subject: [PATCH 0207/1511] [PR #8408/17c39719 backport][3.10] Remove use of `typing.ByteString` (#8409) **This is a backport of PR #8408 as merged into master (17c39719a45a4a75550bb2c619f73ef0bb28dbe3).** ## What do these changes do? This PR removes use of `typing.ByteString` from `aiohttp`. `ByteString` has been deprecated since Python 3.12, and has already been removed on the CPython `main` branch (though that change will not be released until Python 3.14 comes out). That means that `aiohttp` currently installs, but fails to be imported, on Python 3.14: ```pytb import aiohttp env/lib/python3.14/site-packages/aiohttp/__init__.py:6: in <module> from .client import ( env/lib/python3.14/site-packages/aiohttp/client.py:38: in <module> from . import hdrs, http, payload env/lib/python3.14/site-packages/aiohttp/payload.py:10: in <module> from typing import ( E ImportError: cannot import name 'ByteString' from 'typing' (/Users/alexw/dev/cpython/Lib/typing.py) ``` ## Are there changes in behavior for the user? The user will now be able to import `aiohttp` using Python 3.14+ ## Is it a substantial burden for the maintainers to support this? no ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com> --- aiohttp/payload.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 6593b05c6f7..5271393612a 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -11,7 +11,6 @@ IO, TYPE_CHECKING, Any, - ByteString, Dict, Final, Iterable, @@ -217,7 +216,9 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesPayload(Payload): - def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: + def __init__( + self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any + ) -> None: if not isinstance(value, (bytes, bytearray, memoryview)): raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") From 067df3bf9843abd22ae242e34c6746248c209483 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 11:01:06 +0000 Subject: [PATCH 0208/1511] Bump pypa/cibuildwheel from 2.17.0 to 2.18.0 (#8411) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.17.0 to 2.18.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.18.0</h2> <ul> <li>✨ Adds CPython 3.13 support, under the prerelease flag <a href="https://cibuildwheel.pypa.io/en/stable/options/#prerelease-pythons">CIBW_PRERELEASE_PYTHONS</a>. This version of cibuildwheel uses 3.13.0b1. Free-threading mode is not available yet, waiting on official binaries (planned for beta 2) and pip support. <em>While CPython is in beta, the ABI can change, so your wheels might not be compatible with the final release. For this reason, we don't recommend distributing wheels until RC1, at which point 3.13 will be available in cibuildwheel without the flag.</em> (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</li> <li>✨ Musllinux now defaults to <code>musllinux_1_2</code>. You can set the older <code>musllinux_1_1</code> via config if needed. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1817">#1817</a>)</li> <li>🛠 No longer pre-seed setuptools/wheel in virtual environments (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1819">#1819</a>)</li> <li>🛠 Respect the constraints file when building with pip, matching build (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1818">#1818</a>)</li> <li>🛠 Use uv to compile our pinned dependencies, 10x faster and doesn't require special setup (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1778">#1778</a>)</li> <li>🐛 Fix an issue with the schema (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1788">#1788</a>)</li> <li>📚 Document the new delocate error checking macOS versions (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1766">#1766</a>)</li> <li>📚 Document Rust builds (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</li> <li>📚 Speed up our readthedocs builds with uv, 26 seconds -> 6 seconds to install dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.18.0</h3> <p><em>12 May 2024</em></p> <ul> <li> <p>✨ Adds CPython 3.13 support, under the prerelease flag <a href="https://cibuildwheel.pypa.io/en/stable/options/#prerelease-pythons">CIBW_PRERELEASE_PYTHONS</a>. This version of cibuildwheel uses 3.13.0b1. Free-threading mode is not available yet, waiting on official binaries (planned for beta 2) and pip support.</p> <p><em>While CPython is in beta, the ABI can change, so your wheels might not be compatible with the final release. For this reason, we don't recommend distributing wheels until RC1, at which point 3.13 will be available in cibuildwheel without the flag.</em> (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</p> </li> <li> <p>✨ Musllinux now defaults to <code>musllinux_1_2</code>. You can set the older <code>manylinux_1_1</code> via config if needed. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1817">#1817</a>)</p> </li> <li> <p>🛠 No longer pre-seed setuptools/wheel in virtual environments (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1819">#1819</a>)</p> </li> <li> <p>🛠 Respect the constraints file when building with pip, matching build (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1818">#1818</a>)</p> </li> <li> <p>🛠 Use uv to compile our pinned dependencies, 10x faster and doesn't require special setup (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1778">#1778</a>)</p> </li> <li> <p>🐛 Fix an issue with the schema (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1788">#1788</a>)</p> </li> <li> <p>📚 Document the new delocate error checking macOS versions (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1766">#1766</a>)</p> </li> <li> <p>📚 Document Rust builds (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</p> </li> <li> <p>📚 Speed up our readthedocs builds with uv, 26 seconds -> 6 seconds to install dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/711a3d017d0729f3edde18545fee967f03d65f65"><code>711a3d0</code></a> Bump version: v2.18.0</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/3873a1edf81e0a3ae89431bbb07801fd6fe63cf6"><code>3873a1e</code></a> chore: update bump_version script</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/6dd4f15a0ee07ba6f2b7cb51e5a14827e46a1261"><code>6dd4f15</code></a> docs: rust (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1816">#1816</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/44c47424775673b9416a4f1f4f8f6efed5e8dea9"><code>44c4742</code></a> feat: add Python 3.13 beta 1 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1815">#1815</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/c0217440e839c9fe641b7c841016c4325bf99875"><code>c021744</code></a> feat: move default <code>musllinux</code> build to <code>musllinux_1_2</code> (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1817">#1817</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/cf18014fce969628c72a266396f72ef672e9a8d4"><code>cf18014</code></a> fix: do not pre-seed setuptools / wheel in virtual environment (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1819">#1819</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/3ea0a6c2f0219d97ff8387b87bd2448dcfb7452c"><code>3ea0a6c</code></a> fix: respect constraints when building with pip (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1818">#1818</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/30a0decb47aff80ee8909c918eb89b75ff422643"><code>30a0dec</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1812">#1812</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/988d512fb3dfde407a9e2da943902d6227e34fa0"><code>988d512</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1813">#1813</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/98d57d9547203fa3b5676ef6960d639989295cf8"><code>98d57d9</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1766">#1766</a> from Czaki/delocate_info</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.17.0...v2.18.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.17.0&new-version=2.18.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 4c66aff0ec0..1bf829e326a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -351,7 +351,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.17.0 + uses: pypa/cibuildwheel@v2.18.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v4 From 5ccbe7702dfc91f50d371550f097f6159036b7a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 11:20:26 +0000 Subject: [PATCH 0209/1511] Bump freezegun from 1.5.0 to 1.5.1 (#8413) Bumps [freezegun](https://github.com/spulec/freezegun) from 1.5.0 to 1.5.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/spulec/freezegun/blob/master/CHANGELOG">freezegun's changelog</a>.</em></p> <blockquote> <h2>1.5.1</h2> <ul> <li>Fix the typing of the <code>tick()</code> method, and improve it's behaviour.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/spulec/freezegun/commit/3f9fac4852447cec9cbd8bcadc5227e7b383fbab"><code>3f9fac4</code></a> Increase version number</li> <li><a href="https://github.com/spulec/freezegun/commit/e0f2c3c71d9f2adfc36376d9601acca76a3a8ee1"><code>e0f2c3c</code></a> CHANGELOG for 1.5.1</li> <li><a href="https://github.com/spulec/freezegun/commit/ea054a3269250a39e5c406f6f9024082641ab937"><code>ea054a3</code></a> Merge pull request <a href="https://redirect.github.com/spulec/freezegun/issues/546">#546</a> from robsdedude/patch-1</li> <li><a href="https://github.com/spulec/freezegun/commit/df263dcec48f43154a5873eb0dff2d4ba94374da"><code>df263dc</code></a> Extend type checking</li> <li><a href="https://github.com/spulec/freezegun/commit/aecc78ad3dfe029ef19003622510b063b3fa6d87"><code>aecc78a</code></a> Keep <code>numbers</code> for runtime type checks, use <code>float</code> for type hints</li> <li><a href="https://github.com/spulec/freezegun/commit/023c7a382fe15ba2f680b43de0e027c460ffc1ed"><code>023c7a3</code></a> Revert runtime type-check to <code>numbers.Real</code></li> <li><a href="https://github.com/spulec/freezegun/commit/be779f4b18dc1227cc68775992f3b46c39b2a739"><code>be779f4</code></a> Add test for manually ticking StepTickTimeFactory</li> <li><a href="https://github.com/spulec/freezegun/commit/d2872d0afd5b5cce3b6b523bf74a60d0b8191e17"><code>d2872d0</code></a> Fix instance checks</li> <li><a href="https://github.com/spulec/freezegun/commit/c9636086130edad9d4fca44368995b612ebc484b"><code>c963608</code></a> Fix <code>tick</code> delta type handling</li> <li>See full diff in <a href="https://github.com/spulec/freezegun/compare/1.5.0...1.5.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=freezegun&package-manager=pip&previous-version=1.5.0&new-version=1.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f659e99701e..8e9442eadef 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -74,7 +74,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv -freezegun==1.5.0 +freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.4.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 76b9ef91f5e..6b0f7644389 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -70,7 +70,7 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv -freezegun==1.5.0 +freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.4.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index 58c62233bcf..a4cb333d325 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -38,7 +38,7 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest -freezegun==1.5.0 +freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.4.1 # via From 37f2dd141882c69bf020169bffcea30384c7483e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 May 2024 10:35:39 +0000 Subject: [PATCH 0210/1511] Bump pytest from 8.2.0 to 8.2.1 (#8417) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.2.0 to 8.2.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.2.1</h2> <h1>pytest 8.2.1 (2024-05-19)</h1> <h2>Improvements</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12334">#12334</a>: Support for Python 3.13 (beta1 at the time of writing).</li> </ul> <h2>Bug Fixes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12120">#12120</a>: Fix [PermissionError]{.title-ref} crashes arising from directories which are not selected on the command-line.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12191">#12191</a>: Keyboard interrupts and system exits are now properly handled during the test collection.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12300">#12300</a>: Fixed handling of 'Function not implemented' error under squashfuse_ll, which is a different way to say that the mountpoint is read-only.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12308">#12308</a>: Fix a regression in pytest 8.2.0 where the permissions of automatically-created <code>.pytest_cache</code> directories became <code>rwx------</code> instead of the expected <code>rwxr-xr-x</code>.</li> </ul> <h2>Trivial/Internal Changes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12333">#12333</a>: pytest releases are now attested using the recent <a href="https://github.blog/2024-05-02-introducing-artifact-attestations-now-in-public-beta/">Artifact Attestation</a> support from GitHub, allowing users to verify the provenance of pytest's sdist and wheel artifacts.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/66ff8dffdf9eee9b3dd6686de34542c49ff80dcd"><code>66ff8df</code></a> Prepare release version 8.2.1</li> <li><a href="https://github.com/pytest-dev/pytest/commit/3ffcfd122cf4674ac45f6233d9b50be6c49abeea"><code>3ffcfd1</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12340">#12340</a> from pytest-dev/backport-12334-to-8.2.x</li> <li><a href="https://github.com/pytest-dev/pytest/commit/0b28313b46a04de08bddc18896b3e61312a0c5b3"><code>0b28313</code></a> [8.2.x] Add Python 3.13 (beta) support</li> <li><a href="https://github.com/pytest-dev/pytest/commit/f3dd93ad8d62eb0a260d3090f31be82aafbcff13"><code>f3dd93a</code></a> [8.2.x] Attest package provenance (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12335">#12335</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/bb5a1257b0aafe5932377fa8e9fd92ab39418ac7"><code>bb5a125</code></a> [8.2.x] Spelling (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12331">#12331</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/f179bf252fe2c1d0afce64b4b4bab4449e366e84"><code>f179bf2</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12327">#12327</a> from pytest-dev/backport-12325-to-8.2.x</li> <li><a href="https://github.com/pytest-dev/pytest/commit/2b671b5f9208650e8e42e07782d95477cc41f42a"><code>2b671b5</code></a> [8.2.x] cacheprovider: fix <code>.pytest_cache</code> not being world-readable</li> <li><a href="https://github.com/pytest-dev/pytest/commit/65ab7cb96c95f83e922f21bb4a8a44eda2b79707"><code>65ab7cb</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12324">#12324</a> from pytest-dev/backport-12320-to-8.2.x</li> <li><a href="https://github.com/pytest-dev/pytest/commit/4d5fb7d71ccc069e2f882bee0e4253eaf484d2a9"><code>4d5fb7d</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12319">#12319</a> from pytest-dev/backport-12311-to-8.2.x</li> <li><a href="https://github.com/pytest-dev/pytest/commit/cbe5996cc684b00397494d9590f3179de232c3ee"><code>cbe5996</code></a> [8.2.x] changelog: document unittest 8.2 change as breaking</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.2.0...8.2.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.2.0&new-version=8.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8e9442eadef..76c0e1697e0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.2.0 +pytest==8.2.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 6b0f7644389..20eb3a9d478 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -157,7 +157,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.2.0 +pytest==8.2.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 17f71a6b917..fe2e4306772 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -64,7 +64,7 @@ pydantic-core==2.18.2 # via pydantic pygments==2.17.2 # via rich -pytest==8.2.0 +pytest==8.2.1 # via -r requirements/lint.in python-on-whales==0.71.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index a4cb333d325..9c4b62e1a3d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,7 +77,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==8.2.0 +pytest==8.2.1 # via # -r requirements/test.in # pytest-cov From 4e714c787a03f1f5856b3c8695b5dd1434fe1539 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 May 2024 11:21:27 +0000 Subject: [PATCH 0211/1511] Bump pypa/cibuildwheel from 2.18.0 to 2.18.1 (#8422) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.18.0 to 2.18.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.18.1</h2> <ul> <li>🌟 Add free-threaded Linux and Windows builds for 3.13. New identifiers <code>cp313t-*</code>, new option <code>CIBW_FREE_THREADED_SUPPORT</code>/<code>tool.cibuildwheel.free-threaded-support</code> required to opt-in. <a href="https://cibuildwheel.pypa.io/en/stable/options/#free-threaded-support">See the docs</a> for more information. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1831">#1831</a>)</li> <li>✨ The <code>container-engine</code> is now a build (non-global) option. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1792">#1792</a>)</li> <li>🛠 The build backend for cibuildwheel is now hatchling. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1297">#1297</a>)</li> <li>🛠 Significant improvements and modernization to our noxfile. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1823">#1823</a>)</li> <li>🛠 Use pylint's new GitHub Actions reporter instead of a custom matcher. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1823">#1823</a>)</li> <li>🛠 Unpin virtualenv updates for Python 3.7+ (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1830">#1830</a>)</li> <li>🐛 Fix running linux tests from Windows or macOS ARM. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1788">#1788</a>)</li> <li>📚 Fix our documentation build. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1821">#1821</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.18.1</h3> <ul> <li>🌟 Add free-threaded Linux and Windows builds for 3.13. New identifiers <code>cp313t-*</code>, new option <code>CIBW_FREE_THREADED_SUPPORT</code>/<code>tool.cibuildwheel.free-threaded-support</code> required to opt-in. <a href="https://cibuildwheel.pypa.io/en/stable/options/#free-threaded-support">See the docs</a> for more information. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1831">#1831</a>)</li> <li>✨ The <code>container-engine</code> is now a build (non-global) option. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1792">#1792</a>)</li> <li>🛠 The build backend for cibuildwheel is now hatchling. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1297">#1297</a>)</li> <li>🛠 Significant improvements and modernization to our noxfile. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1823">#1823</a>)</li> <li>🛠 Use pylint's new GitHub Actions reporter instead of a custom matcher. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1823">#1823</a>)</li> <li>🛠 Unpin virtualenv updates for Python 3.7+ (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1830">#1830</a>)</li> <li>🐛 Fix running linux tests from Windows or macOS ARM. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1788">#1788</a>)</li> <li>📚 Fix our documentation build. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1821">#1821</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/ba8be0d98853f5744f24e7f902c8adef7ae2e7f3"><code>ba8be0d</code></a> Bump version: v2.18.1</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/90dd47640f7210d60b436105969d121393fa1e45"><code>90dd476</code></a> doc: add documentation for the <code>CIBW_FREE_THREADED_SUPPORT</code> option</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/b61324914b688af015b30ab2e00390e6de47e503"><code>b613249</code></a> chore: pass a PythonConfiguration to install_cpython</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/3992d5719c52159bd5afe5daf1fcb603f6bb8027"><code>3992d57</code></a> feat: add option to opt-in free-threaded builds</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/345467c6cfff27a5576ac80755eecaa9316b196d"><code>345467c</code></a> feat: add support for free-threaded (no-gil) Python 3.13</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/791e41cfece162323465328775c53443faf6390f"><code>791e41c</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1832">#1832</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/9d5f5e60b85cdf68c627f4531d3b9ab033378e2b"><code>9d5f5e6</code></a> Make <code>container-engine</code> a build (non-global) option (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1792">#1792</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/1b354cfa6382745855f4c47b84a151302f56cba8"><code>1b354cf</code></a> chore: move to hatchling (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1297">#1297</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/d1a4c9c07436ecbd2132842167071c25cf5295ca"><code>d1a4c9c</code></a> chore: un-pin virtualenv update (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1830">#1830</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/78bca57cb40bb6ccf69eb0c7763906d010920e48"><code>78bca57</code></a> fix(tests): linux tests from macOS/Windows arm64 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1829">#1829</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.18.0...v2.18.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.18.0&new-version=2.18.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 1bf829e326a..dad28300e29 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -351,7 +351,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.18.0 + uses: pypa/cibuildwheel@v2.18.1 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v4 From f8daef6cb7e959239c4df08e2828b17d568ba4c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 10:45:05 +0000 Subject: [PATCH 0212/1511] Bump coverage from 7.5.1 to 7.5.2 (#8424) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.5.1 to 7.5.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.5.2 — 2024-05-24</h2> <ul> <li> <p>Fix: nested matches of exclude patterns could exclude too much code, as reported in <code>issue 1779</code>_. This is now fixed.</p> </li> <li> <p>Changed: previously, coverage.py would consider a module docstring to be an executable statement if it appeared after line 1 in the file, but not executable if it was the first line. Now module docstrings are never counted as executable statements. This can change coverage.py's count of the number of statements in a file, which can slightly change the coverage percentage reported.</p> </li> <li> <p>In the HTML report, the filter term and "hide covered" checkbox settings are remembered between viewings, thanks to <code>Daniel Diniz <pull 1776_></code>_.</p> </li> <li> <p>Python 3.13.0b1 is supported.</p> </li> <li> <p>Fix: parsing error handling is improved to ensure bizarre source files are handled gracefully, and to unblock oss-fuzz fuzzing, thanks to <code>Liam DeVoe <pull 1788_></code><em>. Closes <code>issue 1787</code></em>.</p> </li> </ul> <p>.. _pull 1776: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1776">nedbat/coveragepy#1776</a> .. _issue 1779: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1779">nedbat/coveragepy#1779</a> .. _issue 1787: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1787">nedbat/coveragepy#1787</a> .. _pull 1788: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1788">nedbat/coveragepy#1788</a></p> <p>.. _changes_7-5-1:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/242adea66966b941d2e62bd261652b14f0c88e95"><code>242adea</code></a> build: don't claim pre-alpha-1 in classifiers</li> <li><a href="https://github.com/nedbat/coveragepy/commit/7f336228d78d8338239d4da4d4de3ade0abd0901"><code>7f33622</code></a> docs: sample HTML for 7.5.2</li> <li><a href="https://github.com/nedbat/coveragepy/commit/946fa3a2a6bda18b629832f1a3aa62649f45554c"><code>946fa3a</code></a> docs: prep for 7.5.2</li> <li><a href="https://github.com/nedbat/coveragepy/commit/535ddc3a77c256e163e13c574f91c269bb5610d7"><code>535ddc3</code></a> build: pylint can run in parallel</li> <li><a href="https://github.com/nedbat/coveragepy/commit/60a5d65eade29adbbb16f22a333dab1ca2e99993"><code>60a5d65</code></a> docs: explain partial coverage reports on generator expressions (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1789">#1789</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/07000185188b0090478ca75843f3076bc02ccb0a"><code>0700018</code></a> docs: changelog for <a href="https://redirect.github.com/nedbat/coveragepy/issues/1788">#1788</a> <a href="https://redirect.github.com/nedbat/coveragepy/issues/1787">#1787</a>. Thanks Liam DeVoe</li> <li><a href="https://github.com/nedbat/coveragepy/commit/364282ea7db7e9515299fe8f9cc5ad9523ea1820"><code>364282e</code></a> fix: catch TokenError on parse (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1788">#1788</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/81089de4bfc3033b976316ac96012681ae2ec36f"><code>81089de</code></a> fix: module docstrings are never counted as statements</li> <li><a href="https://github.com/nedbat/coveragepy/commit/96bd93071f9e6ce5e7fd5e5c41f234d302c8611b"><code>96bd930</code></a> fix: rework exclusion parsing to fix <a href="https://redirect.github.com/nedbat/coveragepy/issues/1779">#1779</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/75f9d51ca564bebeef4d0f143e7d966360d68c79"><code>75f9d51</code></a> test(build): when running metacov, create json report</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.5.1...7.5.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.5.1&new-version=7.5.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 76c0e1697e0..536e7be4e5a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.5.1 +coverage==7.5.2 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 20eb3a9d478..626261a0bc2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.5.1 +coverage==7.5.2 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 9c4b62e1a3d..55d738b5379 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.5.1 +coverage==7.5.2 # via # -r requirements/test.in # pytest-cov From 25e3e41ad0012381f214886ff437b924a4fda2f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 May 2024 10:50:54 +0000 Subject: [PATCH 0213/1511] Bump coverage from 7.5.2 to 7.5.3 (#8428) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.5.2 to 7.5.3. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.5.3 — 2024-05-28</h2> <ul> <li> <p>Performance improvements for combining data files, especially when measuring line coverage. A few different quadratic behaviors were eliminated. In one extreme case of combining 700+ data files, the time dropped from more than three hours to seven minutes. Thanks for Kraken Tech for funding the fix.</p> </li> <li> <p>Performance improvements for generating HTML reports, with a side benefit of reducing memory use, closing <code>issue 1791</code>_. Thanks to Daniel Diniz for helping to diagnose the problem.</p> </li> </ul> <p>.. _issue 1791: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1791">nedbat/coveragepy#1791</a></p> <p>.. _changes_7-5-2:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/f310d7e65d6d153f7e5abddccf63582ce35b66f3"><code>f310d7e</code></a> docs: sample HTML for 7.5.3</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a51d52fe36b20f83de21a5a3b8a568308e08e9b1"><code>a51d52f</code></a> docs: prep for 7.5.3</li> <li><a href="https://github.com/nedbat/coveragepy/commit/b666f3af275aa499e3e9811bfb09a081dded7513"><code>b666f3a</code></a> perf: it's faster in all versions if we don't cache tokenize <a href="https://redirect.github.com/nedbat/coveragepy/issues/1791">#1791</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/a2b49290af1c5976bd0deb4ca5f7bbc2e858b7fe"><code>a2b4929</code></a> docs: changelog entry for <code>combine</code> performance improvements</li> <li><a href="https://github.com/nedbat/coveragepy/commit/b9aff50de98eb9789b6e592a9e8e01adf287fd16"><code>b9aff50</code></a> perf: don't read full line_bits table each time</li> <li><a href="https://github.com/nedbat/coveragepy/commit/c45ebac1b83656bfdd92d6675f08fa33627785e6"><code>c45ebac</code></a> perf: cache alias mapping</li> <li><a href="https://github.com/nedbat/coveragepy/commit/390cb97233c6ec8861de44e030fefc34af1db886"><code>390cb97</code></a> perf: avoid quadratic behavior when combining line coverage</li> <li><a href="https://github.com/nedbat/coveragepy/commit/d3caf539a799ca39dbbf9eba75e68e84ecef1a97"><code>d3caf53</code></a> docs(build): tweaks to howto</li> <li><a href="https://github.com/nedbat/coveragepy/commit/909e887b693befb4b84cb3cbcdd4c93f04294564"><code>909e887</code></a> build: bump version</li> <li>See full diff in <a href="https://github.com/nedbat/coveragepy/compare/7.5.2...7.5.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.5.2&new-version=7.5.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 536e7be4e5a..7f3162c44fb 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.5.2 +coverage==7.5.3 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 626261a0bc2..76aab13223d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.5.2 +coverage==7.5.3 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 55d738b5379..3fd220308af 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.5.2 +coverage==7.5.3 # via # -r requirements/test.in # pytest-cov From 4f27ab9f5e31aa7afc9a45ff9215d76c1e017737 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 5 Jun 2024 10:39:32 +0000 Subject: [PATCH 0214/1511] Bump pytest from 8.2.1 to 8.2.2 (#8440) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.2.1 to 8.2.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.2.2</h2> <h1>pytest 8.2.2 (2024-06-04)</h1> <h2>Bug Fixes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12355">#12355</a>: Fix possible catastrophic performance slowdown on a certain parametrization pattern involving many higher-scoped parameters.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12367">#12367</a>: Fix a regression in pytest 8.2.0 where unittest class instances (a fresh one is created for each test) were not released promptly on test teardown but only on session teardown.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12381">#12381</a>: Fix possible "Directory not empty" crashes arising from concurent cache dir (<code>.pytest_cache</code>) creation. Regressed in pytest 8.2.0.</li> </ul> <h2>Improved Documentation</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12290">#12290</a>: Updated Sphinx theme to use Furo instead of Flask, enabling Dark mode theme.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12356">#12356</a>: Added a subsection to the documentation for debugging flaky tests to mention lack of thread safety in pytest as a possible source of flakyness.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12363">#12363</a>: The documentation webpages now links to a canonical version to reduce outdated documentation in search engine results.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/329d3712146e69c471be3e30883d54bdde2f76cb"><code>329d371</code></a> Prepare release version 8.2.2</li> <li><a href="https://github.com/pytest-dev/pytest/commit/214d098fcce88940f5ce9353786b3cc8f0bd3938"><code>214d098</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12414">#12414</a> from bluetech/backport-12409</li> <li><a href="https://github.com/pytest-dev/pytest/commit/153a436bc40c9e89d90d62255ef5a89e9a762dca"><code>153a436</code></a> [8.2.x] fixtures: fix catastrophic performance problem in <code>reorder_items</code></li> <li><a href="https://github.com/pytest-dev/pytest/commit/b41d5a52bbb808780ab310456d71e5ce509fd402"><code>b41d5a5</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12412">#12412</a> from pytest-dev/backport-12408-to-8.2.x</li> <li><a href="https://github.com/pytest-dev/pytest/commit/9bb73d734ff40f52d7bbebd708b5e3ab1ba20798"><code>9bb73d7</code></a> [8.2.x] cacheprovider: fix "Directory not empty" crash from cache directory c...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/4569a01e3d20d64811d48b0b09539596520ea5a6"><code>4569a01</code></a> [8.2.x] doc: Update trainings/events (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12402">#12402</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/1d103e5cdc1cb08f332e61a5b20fb205fa5228e7"><code>1d103e5</code></a> [8.2.x] Clarify pytest_ignore_collect docs (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12386">#12386</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/240a252d34fff26efad5b3a92e62be4c9af94b70"><code>240a252</code></a> [8.2.x] Add html_baseurl to sphinx conf.py (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12372">#12372</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/a5ee3c41268199c2c0af59c33050326b1c4a342e"><code>a5ee3c4</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12370">#12370</a> from pytest-dev/backport-12368-to-8.2.x</li> <li><a href="https://github.com/pytest-dev/pytest/commit/f7358aec2884720b4de4594ffd0811b46316514c"><code>f7358ae</code></a> [8.2.x] unittest: fix class instances no longer released on test teardown sin...</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.2.1...8.2.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.2.1&new-version=8.2.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7f3162c44fb..4e61fde89af 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.2.1 +pytest==8.2.2 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 76aab13223d..b07d0d5da0c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -157,7 +157,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.2.1 +pytest==8.2.2 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index fe2e4306772..f581d7b545e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -64,7 +64,7 @@ pydantic-core==2.18.2 # via pydantic pygments==2.17.2 # via rich -pytest==8.2.1 +pytest==8.2.2 # via -r requirements/lint.in python-on-whales==0.71.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 3fd220308af..4bf994164af 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,7 +77,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==8.2.1 +pytest==8.2.2 # via # -r requirements/test.in # pytest-cov From 53983092ec683d1b03db363120ca2cbb515f8d6b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jun 2024 10:56:41 +0000 Subject: [PATCH 0215/1511] Bump pypa/cibuildwheel from 2.18.1 to 2.19.1 (#8459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.18.1 to 2.19.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>Version 2.19.1</h2> <ul> <li>🐛 Don't require setup-python on GHA for Pyodide (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1868">#1868</a>)</li> <li>🐛 Specify full python path for uv (fixes issue in 0.2.10 & 0.2.11) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1881">#1881</a>)</li> <li>🛠 Update for pip 24.1b2 on CPython 3.13. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1879">#1879</a>)</li> <li>🛠 Fix a warning in our schema generation script. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1866">#1866</a>)</li> <li>🛠 Cleaner output on pytest 8-8.2. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1865">#1865</a>)</li> </ul> <h2>Version 2.19.0</h2> <p>Release post: <a href="https://iscinumpy.dev/post/cibuildwheel-2-19-0/">https://iscinumpy.dev/post/cibuildwheel-2-19-0/</a></p> <ul> <li>🌟 Add a Pyodide platform. Set with <code>--platform pyodide</code> or <code>CIBW_PLATFORM: pyodide</code> on Linux with a host Python 3.12 to build WebAssembly wheels. Not accepted on PyPI currently, but usable directly in a website using Pyodide, for live docs, etc. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1456">#1456</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1859">#1859</a>)</li> <li>🌟 Add <code>build[uv]</code> backend, which will take a pre-existing uv install (or install <code>cibuildwheel[uv]</code>) and use <code>uv</code> for all environment setup and installs on Python 3.8+. This is significantly faster in most cases. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1856">#1856</a>)</li> <li>✨ Add free-threaded macOS builds and update CPython to 3.13.0b2. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1854">#1854</a>)</li> <li>🐛 Issue copying a wheel to a non-existent output dir fixed. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1851">#1851</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1862">#1862</a>)</li> <li>🐛 Better determinism for the test environment seeding. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1835">#1835</a>)</li> <li>🛠 <code>VIRTUAL_ENV</code> variable now set. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1842">#1842</a>)</li> <li>🛠 Remove a pip<21.3 workaround. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1842">#1842</a>)</li> <li>🛠 Error handling was refactored to use exceptions. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1719">#1719</a>)</li> <li>🛠 Hardcoded paths in tests avoided. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1834">#1834</a>)</li> <li>🛠 Single Python tests made more generic. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1835">#1835</a>)</li> <li>🛠 Sped up our ci by splitting up emulation tests. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1839">#1839</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.19.1</h3> <ul> <li>🐛 Don't require setup-python on GHA for Pyodide (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1868">#1868</a>)</li> <li>🐛 Specify full python path for uv (fixes issue in 0.2.10 & 0.2.11) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1881">#1881</a>)</li> <li>🛠 Update for pip 24.1b2 on CPython 3.13. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1879">#1879</a>)</li> <li>🛠 Fix a warning in our schema generation script. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1866">#1866</a>)</li> <li>🛠 Cleaner output on pytest 8-8.2. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1865">#1865</a>)</li> </ul> <h3>v2.19.0</h3> <p>See the <a href="https://iscinumpy.dev/post/cibuildwheel-2-19-0/">release post</a> for more info on new features!</p> <ul> <li>🌟 Add Pyodide platform. Set with <code>--platform pyodide</code> or <code>CIBW_PLATFORM: pyodide</code> on Linux with a host Python 3.12 to build WebAssembly wheels. Not accepted on PyPI currently, but usable directly in a website using Pyodide, for live docs, etc. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1456">#1456</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1859">#1859</a>)</li> <li>🌟 Add <code>build[uv]</code> backend, which will take a pre-existing uv install (or install <code>cibuildwheel[uv]</code>) and use <code>uv</code> for all environment setup and installs on Python 3.8+. This is significantly faster in most cases. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1856">#1856</a>)</li> <li>✨ Add free-threaded macOS builds and update CPython to 3.13.0b2. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1854">#1854</a>)</li> <li>🐛 Issue copying a wheel to a non-existent output dir fixed. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1851">#1851</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1862">#1862</a>)</li> <li>🐛 Better determinism for the test environment seeding. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1835">#1835</a>)</li> <li>🛠 <code>VIRTUAL_ENV</code> variable now set. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1842">#1842</a>)</li> <li>🛠 Remove a pip<21.3 workaround. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1842">#1842</a>)</li> <li>🛠 Error handling was refactored to use exceptions. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1719">#1719</a>)</li> <li>🛠 Hardcoded paths in tests avoided. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1834">#1834</a>)</li> <li>🛠 Single Python tests made more generic. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1835">#1835</a>)</li> <li>🛠 Sped up our ci by splitting up emulation tests. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1839">#1839</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/932529cab190fafca8c735a551657247fa8f8eaf"><code>932529c</code></a> Bump version: v2.19.1</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/70fb1c4a1a92c1d7bb1756a4326adeeae31af60b"><code>70fb1c4</code></a> fix: specify full path to non-venv Python (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1881">#1881</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/405a475ac76720d53b519cb9dd8408c03923e94e"><code>405a475</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1879">#1879</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/78da7bafcc41b0af99c3fe002a5f9f492505a354"><code>78da7ba</code></a> docs: update README.md (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1874">#1874</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/8d86d3122b59914dc19c88eafe22e83bc3bae000"><code>8d86d31</code></a> tests: don't print xfails (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1865">#1865</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/89a5cfe2721c179f4368a2790669e697759b6644"><code>89a5cfe</code></a> DOC: Fix bug with link (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1870">#1870</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/8d5d84e0fc10b6a93d9997e61e1f9a48bf373987"><code>8d5d84e</code></a> fix: accept current Python version if acceptable for Pyodide (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1868">#1868</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/4ada77dea40d564c9f791323f1f90c06ffbbccff"><code>4ada77d</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1869">#1869</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/829441f4a0f9bf77900e3078dbdc3a5c4e76c637"><code>829441f</code></a> fix: warning in generate schema script (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1866">#1866</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/a8d190a111314a07eb5116036c4b3fb26a4e3162"><code>a8d190a</code></a> Bump version: v2.19.0</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.18.1...v2.19.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.18.1&new-version=2.19.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index dad28300e29..f17fa805004 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -351,7 +351,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.18.1 + uses: pypa/cibuildwheel@v2.19.1 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v4 From 1b5d9f6c313e81821b173fdc76edc8c23e0cbbfe Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 13 Jun 2024 19:40:10 +0100 Subject: [PATCH 0216/1511] [PR #8457/db1b9129 backport][3.10] Use async fixture in testing docs (#8461) **This is a backport of PR #8457 as merged into master (db1b9129e5f9e3c3730d81a2ae8de059c3e38732).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/testing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/testing.rst b/docs/testing.rst index 027ba63a039..c2937b82282 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -111,11 +111,11 @@ app test client:: body='value: {}'.format(request.app[value]).encode('utf-8')) @pytest.fixture - def cli(loop, aiohttp_client): + async def cli(aiohttp_client): app = web.Application() app.router.add_get('/', previous) app.router.add_post('/', previous) - return loop.run_until_complete(aiohttp_client(app)) + return await aiohttp_client(app) async def test_set_value(cli): resp = await cli.post('/', data={'value': 'foo'}) From dc08517161ec2b023a3d1c24493f24f1f1c69b77 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Jun 2024 11:33:25 +0000 Subject: [PATCH 0217/1511] Bump coverage from 7.5.3 to 7.5.4 (#8468) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.5.3 to 7.5.4. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.5.4 — 2024-06-22</h2> <ul> <li> <p>If you attempt to combine statement coverage data with branch coverage data, coverage.py used to fail with the message "Can't combine arc data with line data" or its reverse, "Can't combine line data with arc data." These messages used internal terminology, making it hard for people to understand the problem. They are now changed to mention "branch coverage data" and "statement coverage data."</p> </li> <li> <p>Fixed a minor branch coverage problem with wildcard match/case cases using names or guard clauses.</p> </li> <li> <p>Started testing on 3.13 free-threading (nogil) builds of Python. I'm not claiming full support yet. Closes <code>issue 1799</code>_.</p> </li> </ul> <p>.. _issue 1799: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1799">nedbat/coveragepy#1799</a></p> <p>.. _changes_7-5-3:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/22c09c67ff321835a79e00fa234086b216e3d8d0"><code>22c09c6</code></a> docs: sample HTML for 7.5.4</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9e16381103085fd10bedb290f93a5ae64cbbc586"><code>9e16381</code></a> docs: prep for 7.5.4</li> <li><a href="https://github.com/nedbat/coveragepy/commit/fba9b9e39264661b6c145ffebcff36a1a96c1598"><code>fba9b9e</code></a> docs: link issue 1799 from the changelog</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f124de87bfbd8575a06c6a9eeb332f3375ad2441"><code>f124de8</code></a> build: no longer download kits to upload them</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9516cf6ba29089895dc2cf32e12170605aec55ef"><code>9516cf6</code></a> build: hash-pin all actions</li> <li><a href="https://github.com/nedbat/coveragepy/commit/c6e0985d1a37f9e0d587ed35bdfa66a3f14aeb17"><code>c6e0985</code></a> build: finish up the publish action</li> <li><a href="https://github.com/nedbat/coveragepy/commit/4a49458c61dbadb359cbb6d7a99bb87af1b7ddaf"><code>4a49458</code></a> build: get the latest dist run id for publishing</li> <li><a href="https://github.com/nedbat/coveragepy/commit/fb15efad3ea08c32391a7265bb60b7f3110c3cc1"><code>fb15efa</code></a> build: pin hashes for publishing actions</li> <li><a href="https://github.com/nedbat/coveragepy/commit/c20af957ee1d6af7a17b6c062a2771354164bf3f"><code>c20af95</code></a> build: use the correct item: github.event.action</li> <li><a href="https://github.com/nedbat/coveragepy/commit/ccbab1529dd22de80e2c7ef4327c3288e265e2de"><code>ccbab15</code></a> build: dump all the github actions data</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.5.3...7.5.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.5.3&new-version=7.5.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4e61fde89af..17a6810835b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.5.3 +coverage==7.5.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index b07d0d5da0c..e12ad6664ce 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.5.3 +coverage==7.5.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 4bf994164af..d4c59ddb6ef 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.5.3 +coverage==7.5.4 # via # -r requirements/test.in # pytest-cov From 10e3940aff0945180b71193a45cedf927b4d448d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 11:03:31 +0000 Subject: [PATCH 0218/1511] Bump mypy from 1.10.0 to 1.10.1 (#8471) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.10.0 to 1.10.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/mypy/blob/master/CHANGELOG.md">mypy's changelog</a>.</em></p> <blockquote> <h4>Mypy 1.10.1</h4> <ul> <li>Fix error reporting on cached run after uninstallation of third party library (Shantanu, PR <a href="https://redirect.github.com/python/mypy/pull/17420">17420</a>)</li> </ul> <h4>Acknowledgements</h4> <p>Thanks to all mypy contributors who contributed to this release:</p> <ul> <li>Alex Waygood</li> <li>Ali Hamdan</li> <li>Edward Paget</li> <li>Evgeniy Slobodkin</li> <li>Hashem</li> <li>hesam</li> <li>Hugo van Kemenade</li> <li>Ihor</li> <li>James Braza</li> <li>Jelle Zijlstra</li> <li>jhance</li> <li>Jukka Lehtosalo</li> <li>Loïc Simon</li> <li>Marc Mueller</li> <li>Matthieu Devlin</li> <li>Michael R. Crusoe</li> <li>Nikita Sobolev</li> <li>Oskari Lehto</li> <li>Riccardo Di Maio</li> <li>Richard Si</li> <li>roberfi</li> <li>Roman Solomatin</li> <li>Sam Xifaras</li> <li>Shantanu</li> <li>Spencer Brown</li> <li>Srinivas Lade</li> <li>Tamir Duberstein</li> <li>youkaichao</li> </ul> <p>I’d also like to thank my employer, Dropbox, for supporting mypy development.</p> <h2>Mypy 1.9</h2> <p>We’ve just uploaded mypy 1.9 to the Python Package Index (<a href="https://pypi.org/project/mypy/">PyPI</a>). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:</p> <pre><code>python3 -m pip install -U mypy </code></pre> <p>You can read the full documentation for this release on <a href="http://mypy.readthedocs.io">Read the Docs</a>.</p> <h4>Breaking Changes</h4> <p>Because the version of typeshed we use in mypy 1.9 doesn't support 3.7, neither does mypy 1.9. (Jared Hance, PR <a href="https://redirect.github.com/python/mypy/pull/16883">16883</a>)</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy/commit/c28b5257b528f65a028e7d0dbecbcd81c7997356"><code>c28b525</code></a> [1.10 backport] Fix error reporting on cached run after uninstallation of thi...</li> <li>See full diff in <a href="https://github.com/python/mypy/compare/v1.10.0...v1.10.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.10.0&new-version=1.10.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 17a6810835b..cdaafe60ae2 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -116,7 +116,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.10.0 ; implementation_name == "cpython" +mypy==1.10.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index e12ad6664ce..4d3a136e5ac 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -111,7 +111,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.10.0 ; implementation_name == "cpython" +mypy==1.10.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index f581d7b545e..eb14ed5eb9a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.10.0 ; implementation_name == "cpython" +mypy==1.10.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index d4c59ddb6ef..8295d7f6854 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.10.0 ; implementation_name == "cpython" +mypy==1.10.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From 6d2c5cbaaa72aa99a6f395b6af841cb421747e00 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:03:59 +0000 Subject: [PATCH 0219/1511] Bump pypa/cibuildwheel from 2.19.1 to 2.19.2 (#8479) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.19.1 to 2.19.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>Version 2.19.2</h2> <ul> <li>🐛 Update manylinux2014 pins to versions that support past-EoL CentOS 7 mirrors. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1917">#1917</a>)</li> <li>🐛 Support <code>--no-isolation</code> with <code>build[uv]</code> build-frontend. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1889">#1889</a>)</li> <li>🛠 Provide attestations for releases at <a href="https://github.com/pypa/cibuildwheel/attestations">https://github.com/pypa/cibuildwheel/attestations</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1916">#1916</a>)</li> <li>🛠 Provide CPython 3.13.0b3. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1913">#1913</a>)</li> <li>🛠 Remove some workarounds now that pip 24.1 is available. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1891">#1891</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1892">#1892</a>)</li> <li>📚 Remove nosetest from our docs. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1821">#1821</a>)</li> <li>📚 Document the macOS ARM workaround for 3.8 on GHA. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1971">#1971</a>)</li> <li>📚 GitLab CI + macOS is now a supported platform with an example. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1911">#1911</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.19.2</h3> <ul> <li>🐛 Update manylinux2014 pins to versions that support past-EoL CentOS 7 mirrors. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1917">#1917</a>)</li> <li>🐛 Support <code>--no-isolation</code> with <code>build[uv]</code> build-frontend. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1889">#1889</a>)</li> <li>🛠 Provide attestations for releases at <a href="https://github.com/pypa/cibuildwheel/attestations">https://github.com/pypa/cibuildwheel/attestations</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1916">#1916</a>)</li> <li>🛠 Provide CPython 3.13.0b3. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1913">#1913</a>)</li> <li>🛠 Remove some workarounds now that pip 21.1 is available. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1891">#1891</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1892">#1892</a>)</li> <li>📚 Remove nosetest from our docs. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1821">#1821</a>)</li> <li>📚 Document the macOS ARM workaround for 3.8 on GHA. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1971">#1971</a>)</li> <li>📚 GitLab CI + macOS is now a supported platform with an example. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1911">#1911</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/7e5a838a63ac8128d71ab2dfd99e4634dd1bca09"><code>7e5a838</code></a> Bump version: v2.19.2</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/19e1b8a4b019a447b8c163c07b860a145181e1cb"><code>19e1b8a</code></a> chore: attestations (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1916">#1916</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/73581ae5797410fe710ba5326ee2beb2928fe482"><code>73581ae</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1917">#1917</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/f21ff5e84ca5ff1a850e19fee97b18ee6d3021bc"><code>f21ff5e</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1914">#1914</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/973946b1720a6d6e0665cc89cc786f9369652058"><code>973946b</code></a> fix: support --no-isolation with build[uv] (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1889">#1889</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/5ea40e6ff3000ff4242ed976c227f5eadbaa8906"><code>5ea40e6</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1913">#1913</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/6a36f6494bf6b5cbf151408016a1b18ca55cea5b"><code>6a36f64</code></a> feat: add macOS on GitLab (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1911">#1911</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/31de15bcd9015065fbae8f2d6622a5f6b194cd04"><code>31de15b</code></a> chore: remove nosetests from tests and docs (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1898">#1898</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/3179fd215aa4f910fe4196260f8f6a621b0c9964"><code>3179fd2</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1900">#1900</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/184d4e12801a6db0925a44babbb2e8b8aa2cab96"><code>184d4e1</code></a> fix(ci): CircleCI / Azure Pipelines / GitLab (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1899">#1899</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.19.1...v2.19.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.19.1&new-version=2.19.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index f17fa805004..0eb640794f1 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -351,7 +351,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.19.1 + uses: pypa/cibuildwheel@v2.19.2 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v4 From 9562eef754ca7b611dd03f3ed6406a74eea3fd21 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 6 Jul 2024 00:52:34 +0000 Subject: [PATCH 0220/1511] [PR #8463/93868548 backport][3.10] Add full request typing signatures for session HTTP methods (#8484) **This is a backport of PR #8463 as merged into master (9386854800af026dfa60bcef80615eae1cea94ac).** Co-authored-by: Max Muoto <maxmuoto@gmail.com> --- CHANGES/8463.misc.rst | 1 + aiohttp/client.py | 176 +++++++++++++++++++++++++++++++----------- 2 files changed, 130 insertions(+), 47 deletions(-) create mode 100644 CHANGES/8463.misc.rst diff --git a/CHANGES/8463.misc.rst b/CHANGES/8463.misc.rst new file mode 100644 index 00000000000..1d42136ebd7 --- /dev/null +++ b/CHANGES/8463.misc.rst @@ -0,0 +1 @@ +Added a 3.11-specific overloads to ``ClientSession`` -- by :user:`max-muoto`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 6288fb8f89c..d47d0facc27 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -27,6 +27,7 @@ Set, Tuple, Type, + TypedDict, TypeVar, Union, ) @@ -150,6 +151,37 @@ else: SSLContext = None +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + + +class _RequestOptions(TypedDict, total=False): + params: Union[Mapping[str, str], None] + data: Any + json: Any + cookies: Union[LooseCookies, None] + headers: Union[LooseHeaders, None] + skip_auto_headers: Union[Iterable[str], None] + auth: Union[BasicAuth, None] + allow_redirects: bool + max_redirects: int + compress: Union[str, None] + chunked: Union[bool, None] + expect100: bool + raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] + read_until_eof: bool + proxy: Union[StrOrURL, None] + proxy_auth: Union[BasicAuth, None] + timeout: "Union[ClientTimeout, _SENTINEL, None]" + ssl: Union[SSLContext, bool, Fingerprint] + server_hostname: Union[str, None] + proxy_headers: Union[LooseHeaders, None] + trace_request_ctx: Union[SimpleNamespace, None] + read_bufsize: Union[int, None] + auto_decompress: Union[bool, None] + max_line_size: Union[int, None] + max_field_size: Union[int, None] + @attr.s(auto_attribs=True, frozen=True, slots=True) class ClientTimeout: @@ -1024,61 +1056,111 @@ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str] added_names.add(key) return result - def get( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP GET request.""" - return _RequestContextManager( - self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) - ) + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def get( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def options( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def head( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def post( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def put( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def patch( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def delete( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request( + hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs + ) + ) - def options( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP OPTIONS request.""" - return _RequestContextManager( - self._request( - hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) ) - ) - def head( - self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP HEAD request.""" - return _RequestContextManager( - self._request( - hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) ) - ) - def post( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP POST request.""" - return _RequestContextManager( - self._request(hdrs.METH_POST, url, data=data, **kwargs) - ) + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) - def put( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PUT request.""" - return _RequestContextManager( - self._request(hdrs.METH_PUT, url, data=data, **kwargs) - ) + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) - def patch( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_PATCH, url, data=data, **kwargs) - ) + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) - def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": - """Perform HTTP DELETE request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, url, **kwargs) + ) async def close(self) -> None: """Close underlying connector. From 143209973646b2defcaa93308a8039fbd26bf3ee Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 13:59:50 +0000 Subject: [PATCH 0221/1511] [PR #8473/5e366198 backport][3.10] add: asgi library to third party list (#8488) **This is a backport of PR #8473 as merged into master (5e366198761d8eeba9e1e773fcf47daaea388c94).** Co-authored-by: Elias Gabriel <me@eliasfgabriel.com> --- docs/third_party.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/third_party.rst b/docs/third_party.rst index 29522e5364f..797f9f011ec 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -298,3 +298,6 @@ ask to raise the status. - `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ An aiohttp middleware library for asymmetric encryption of data transmitted via http and/or websocket connections. + +- `aiohttp-asgi-connector <https://github.com/thearchitector/aiohttp-asgi-connector>`_ + An aiohttp connector for using a ``ClientSession`` to interface directly with separate ASGI applications. From 1e0d0e80ea026a3471c7744648e837024cc75105 Mon Sep 17 00:00:00 2001 From: Steve Repsher <steverep@users.noreply.github.com> Date: Wed, 10 Jul 2024 12:02:53 -0400 Subject: [PATCH 0222/1511] [3.10] Document `web.FileResponse` (#8490) Missed backport of PR #3991 by @Transfusion (cherry picked from commit 7a59a1480b7cca4779c62fe35be778415121cd71) --- CHANGES/3958.doc | 1 + docs/web_reference.rst | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 CHANGES/3958.doc diff --git a/CHANGES/3958.doc b/CHANGES/3958.doc new file mode 100644 index 00000000000..9f3a9de1743 --- /dev/null +++ b/CHANGES/3958.doc @@ -0,0 +1 @@ +Add documentation for ``aiohttp.web.FileResponse``. diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 05f8085842d..f96fd59f56a 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -925,6 +925,31 @@ and :ref:`aiohttp-web-signals` handlers:: :attr:`~aiohttp.StreamResponse.body`, represented as :class:`str`. +.. class:: FileResponse(*, path, chunk_size=256*1024, status=200, reason=None, headers=None) + + The response class used to send files, inherited from :class:`StreamResponse`. + + Supports the ``Content-Range`` and ``If-Range`` HTTP Headers in requests. + + The actual :attr:`body` sending happens in overridden :meth:`~StreamResponse.prepare`. + + :param path: Path to file. Accepts both :class:`str` and :class:`pathlib.Path`. + :param int chunk_size: Chunk size in bytes which will be passed into + :meth:`io.RawIOBase.read` in the event that the + ``sendfile`` system call is not supported. + + :param int status: HTTP status code, ``200`` by default. + + :param str reason: HTTP reason. If param is ``None`` reason will be + calculated basing on *status* + parameter. Otherwise pass :class:`str` with + arbitrary *status* explanation.. + + :param collections.abc.Mapping headers: HTTP headers that should be added to + response's ones. The ``Content-Type`` response header + will be overridden if provided. + + .. class:: WebSocketResponse(*, timeout=10.0, receive_timeout=None, \ autoclose=True, autoping=True, heartbeat=None, \ protocols=(), compress=True, max_msg_size=4194304) From 9bfc8660ff2fcb80437e644ddbc26ebb44f921ee Mon Sep 17 00:00:00 2001 From: Steve Repsher <steverep@users.noreply.github.com> Date: Thu, 11 Jul 2024 14:01:18 -0400 Subject: [PATCH 0223/1511] [3.10] Simplify static path checks and resolve strictly (#8491) (#8492) (cherry picked from commit 20d5f6e5421b1794b1d8370ab80b9deddf24f6da) --- CHANGES/8491.misc.rst | 1 + aiohttp/web_urldispatcher.py | 13 +++++-------- tests/test_urldispatch.py | 15 +++++++++++++++ tests/test_web_sendfile_functional.py | 9 --------- 4 files changed, 21 insertions(+), 17 deletions(-) create mode 100644 CHANGES/8491.misc.rst diff --git a/CHANGES/8491.misc.rst b/CHANGES/8491.misc.rst new file mode 100644 index 00000000000..223c549b2e2 --- /dev/null +++ b/CHANGES/8491.misc.rst @@ -0,0 +1 @@ +Simplified path checks for ``UrlDispatcher.add_static()`` method -- by :user:`steverep`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index d8f148f7618..7fbe70ba6a3 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -557,14 +557,11 @@ def __init__( ) -> None: super().__init__(prefix, name=name) try: - directory = Path(directory) - if str(directory).startswith("~"): - directory = Path(os.path.expanduser(str(directory))) - directory = directory.resolve() - if not directory.is_dir(): - raise ValueError("Not a directory") - except (FileNotFoundError, ValueError) as error: - raise ValueError(f"No directory exists at '{directory}'") from error + directory = Path(directory).expanduser().resolve(strict=True) + except FileNotFoundError as error: + raise ValueError(f"'{directory}' does not exist") from error + if not directory.is_dir(): + raise ValueError(f"'{directory}' is not a directory") self._directory = directory self._show_index = show_index self._chunk_size = chunk_size diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index cbd6395e238..2453ab5a235 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -339,6 +339,21 @@ def test_route_dynamic(router) -> None: assert route is route2 +def test_add_static_path_checks(router: any, tmp_path: pathlib.Path) -> None: + """Test that static paths must exist and be directories.""" + with pytest.raises(ValueError, match="does not exist"): + router.add_static("/", tmp_path / "does-not-exist") + with pytest.raises(ValueError, match="is not a directory"): + router.add_static("/", __file__) + + +def test_add_static_path_resolution(router: any) -> None: + """Test that static paths are expanded and absolute.""" + res = router.add_static("/", "~/..") + directory = str(res.get_info()["directory"]) + assert directory == str(pathlib.Path.home().parent) + + def test_add_static(router) -> None: resource = router.add_static( "/st", pathlib.Path(aiohttp.__file__).parent, name="static" diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 93645094ef7..be866d6f1de 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -595,15 +595,6 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: await client.close() -def test_static_route_path_existence_check() -> None: - directory = pathlib.Path(__file__).parent - web.StaticResource("/", directory) - - nodirectory = directory / "nonexistent-uPNiOEAg5d" - with pytest.raises(ValueError): - web.StaticResource("/", nodirectory) - - async def test_static_file_huge(aiohttp_client, tmp_path) -> None: file_path = tmp_path / "huge_data.unknown_mime_type" From 31879b8e5a5cdf22cfa13f363bd7b5e24ecd4cea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Jul 2024 10:56:54 +0000 Subject: [PATCH 0224/1511] Bump coverage from 7.5.4 to 7.6.0 (#8493) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.5.4 to 7.6.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.6.0 — 2024-07-11</h2> <ul> <li> <p>Exclusion patterns can now be multi-line, thanks to <code>Daniel Diniz <pull 1807_></code><em>. This enables many interesting exclusion use-cases, including those requested in issues <code>118 <issue 118_></code></em> (entire files), <code>996 <issue 996_></code>_ (multiple lines only when appearing together), <code>1741 <issue 1741_></code>_ (remainder of a function), and <code>1803 <issue 1803_></code>_ (arbitrary sequence of marked lines). See the :ref:<code>multi_line_exclude</code> section of the docs for more details and examples.</p> </li> <li> <p>The JSON report now includes per-function and per-class coverage information. Thanks to <code>Daniel Diniz <pull 1809_></code>_ for getting the work started. This closes <code>issue 1793</code>_ and <code>issue 1532</code>_.</p> </li> <li> <p>Fixed an incorrect calculation of "(no class)" lines in the HTML classes report.</p> </li> <li> <p>Python 3.13.0b3 is supported.</p> </li> </ul> <p>.. _issue 118: <a href="https://redirect.github.com/nedbat/coveragepy/issues/118">nedbat/coveragepy#118</a> .. _issue 996: <a href="https://redirect.github.com/nedbat/coveragepy/issues/996">nedbat/coveragepy#996</a> .. _issue 1532: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1532">nedbat/coveragepy#1532</a> .. _issue 1741: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1741">nedbat/coveragepy#1741</a> .. _issue 1793: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1793">nedbat/coveragepy#1793</a> .. _issue 1803: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1803">nedbat/coveragepy#1803</a> .. _pull 1807: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1807">nedbat/coveragepy#1807</a> .. _pull 1809: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1809">nedbat/coveragepy#1809</a></p> <p>.. _changes_7-5-4:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/59a3cd7cecbf45378b9d1f4eda90826258233d62"><code>59a3cd7</code></a> docs: sample HTML for 7.6.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/7f27fa7810e494a75af08b54c4f97b94df7364f4"><code>7f27fa7</code></a> docs: prep for 7.6.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/6a268b059515e2768931ce6454dcd27304520d8a"><code>6a268b0</code></a> docs: issues closed by the json region reporting</li> <li><a href="https://github.com/nedbat/coveragepy/commit/5bfe9e770304e0b0b346de2441c83300f9da0edf"><code>5bfe9e7</code></a> chore: bump actions/setup-python from 5.1.0 to 5.1.1 (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1814">#1814</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/ab609ef0fb235454050cf8383427ce5f1b0ec8e9"><code>ab609ef</code></a> docs: mention json region reporting in the changes</li> <li><a href="https://github.com/nedbat/coveragepy/commit/92d96b91b78639cdb50cbba9f7848dd9e75382d7"><code>92d96b9</code></a> fix: json report needs 'no class' and 'no function' also</li> <li><a href="https://github.com/nedbat/coveragepy/commit/e47e7e758bfc48537f0f21d40cef8e5fa2a076c6"><code>e47e7e7</code></a> refactor: move duplicate code into methods</li> <li><a href="https://github.com/nedbat/coveragepy/commit/3d6be2b3284d30d1668afeeb383430ddc402ce4d"><code>3d6be2b</code></a> fix: json format should bump for regions</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a9992d2bff1f12db61c114e6d61d5f35873ae84a"><code>a9992d2</code></a> test: add a test of json regions with branches</li> <li><a href="https://github.com/nedbat/coveragepy/commit/8b8976462b8b7be74716e83efaf05e22f477ef72"><code>8b89764</code></a> test: json expectations should have explicit format number</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.5.4...7.6.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.5.4&new-version=7.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index cdaafe60ae2..6551c88eff6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.5.4 +coverage==7.6.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 4d3a136e5ac..d79bb1ffec6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.5.4 +coverage==7.6.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 8295d7f6854..79006772a71 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.5.4 +coverage==7.6.0 # via # -r requirements/test.in # pytest-cov From 37f9a0d9031b84f0b9722923252f7b9322409cc9 Mon Sep 17 00:00:00 2001 From: Steve Repsher <steverep@users.noreply.github.com> Date: Sat, 13 Jul 2024 15:16:59 -0400 Subject: [PATCH 0225/1511] [3.10] Fix response headers for compressed file requests (#8485) (#8497) (cherry picked from commit c086795452bc8fe9c5a476dc1d6b9d5a3120dc7c) --- CHANGES/4462.bugfix.rst | 7 ++++ aiohttp/web_fileresponse.py | 49 ++++++++++++++++++--------- tests/test_web_sendfile_functional.py | 19 ++++++++--- 3 files changed, 54 insertions(+), 21 deletions(-) create mode 100644 CHANGES/4462.bugfix.rst diff --git a/CHANGES/4462.bugfix.rst b/CHANGES/4462.bugfix.rst new file mode 100644 index 00000000000..fe897a08b39 --- /dev/null +++ b/CHANGES/4462.bugfix.rst @@ -0,0 +1,7 @@ +Fixed server response headers for ``Content-Type`` and ``Content-Encoding`` for +static compressed files -- by :user:`steverep`. + +Server will now respond with a ``Content-Type`` appropriate for the compressed +file (e.g. ``"application/gzip"``), and omit the ``Content-Encoding`` header. +Users should expect that most clients will no longer decompress such responses +by default. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 5b03bcc8350..a3521f2b263 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,9 +1,9 @@ import asyncio -import mimetypes import os import pathlib import sys from contextlib import suppress +from mimetypes import MimeTypes from types import MappingProxyType from typing import ( # noqa IO, @@ -43,14 +43,35 @@ NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) +CONTENT_TYPES: Final[MimeTypes] = MimeTypes() + if sys.version_info < (3, 9): - mimetypes.encodings_map[".br"] = "br" + CONTENT_TYPES.encodings_map[".br"] = "br" # File extension to IANA encodings map that will be checked in the order defined. ENCODING_EXTENSIONS = MappingProxyType( - {ext: mimetypes.encodings_map[ext] for ext in (".br", ".gz")} + {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} +) + +FALLBACK_CONTENT_TYPE = "application/octet-stream" + +# Provide additional MIME type/extension pairs to be recognized. +# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only +ADDITIONAL_CONTENT_TYPES = MappingProxyType( + { + "application/gzip": ".gz", + "application/x-brotli": ".br", + "application/x-bzip2": ".bz2", + "application/x-compress": ".Z", + "application/x-xz": ".xz", + } ) +# Add custom pairs and clear the encodings map so guess_type ignores them. +CONTENT_TYPES.encodings_map.clear() +for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): + CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] + class FileResponse(StreamResponse): """A response object can be used to send files.""" @@ -195,15 +216,6 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter ): return await self._not_modified(request, etag_value, last_modified) - if hdrs.CONTENT_TYPE not in self.headers: - ct, encoding = mimetypes.guess_type(str(file_path)) - if not ct: - ct = "application/octet-stream" - should_set_ct = True - else: - encoding = file_encoding - should_set_ct = False - status = self._status file_size = st.st_size count = file_size @@ -278,11 +290,16 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # return a HTTP 206 for a Range request. self.set_status(status) - if should_set_ct: - self.content_type = ct # type: ignore[assignment] - if encoding: - self.headers[hdrs.CONTENT_ENCODING] = encoding + # If the Content-Type header is not already set, guess it based on the + # extension of the request path. The encoding returned by guess_type + # can be ignored since the map was cleared above. + if hdrs.CONTENT_TYPE not in self.headers: + self.content_type = ( + CONTENT_TYPES.guess_type(self._path)[0] or FALLBACK_CONTENT_TYPE + ) + if file_encoding: + self.headers[hdrs.CONTENT_ENCODING] = file_encoding self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING # Disable compression if we are already sending # a compressed file since we don't want to double diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index be866d6f1de..a3e9a1ab76f 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,4 +1,5 @@ import asyncio +import bz2 import gzip import pathlib import socket @@ -36,10 +37,12 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz"), "br": txt.with_suffix(f"{txt.suffix}.br"), + "bzip2": txt.with_suffix(f"{txt.suffix}.bz2"), } hello[None].write_bytes(HELLO_AIOHTTP) hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) hello["br"].write_bytes(brotli.compress(HELLO_AIOHTTP)) + hello["bzip2"].write_bytes(bz2.compress(HELLO_AIOHTTP)) encoding = getattr(request, "param", None) return hello[encoding] @@ -322,10 +325,16 @@ async def handler(request): @pytest.mark.parametrize( - ("hello_txt", "expect_encoding"), [["gzip"] * 2, ["br"] * 2], indirect=["hello_txt"] + ("hello_txt", "expect_type"), + [ + ("gzip", "application/gzip"), + ("br", "application/x-brotli"), + ("bzip2", "application/x-bzip2"), + ], + indirect=["hello_txt"], ) async def test_static_file_with_content_encoding( - hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_type: str ) -> None: """Test requesting static compressed files returns the correct content type and encoding.""" @@ -338,9 +347,9 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == expect_encoding - assert resp.headers["Content-Type"] == "text/plain" - assert await resp.read() == HELLO_AIOHTTP + assert resp.headers.get("Content-Encoding") is None + assert resp.headers["Content-Type"] == expect_type + assert await resp.read() == hello_txt.read_bytes() resp.close() await resp.release() From 1bb5b5f368a44430cd88e15914bd4eaf4518ef4f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 14 Jul 2024 15:55:04 +0000 Subject: [PATCH 0226/1511] [PR #8499/ab141c31 backport][3.10] Fix broken link (#8500) **This is a backport of PR #8499 as merged into master (ab141c31152874a9b41f439f5a561b77bf8f0427).** Co-authored-by: Anita Hammer <166057949+anitahammer@users.noreply.github.com> --- aiohttp/connector.py | 2 +- docs/client_reference.rst | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 8dac891383c..d62e245cb5f 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -737,7 +737,7 @@ class TCPConnector(BaseConnector): fingerprint - Pass the binary sha256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. See also - https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning resolver - Enable DNS lookups and use this resolver use_dns_cache - Use memory cache for DNS lookups. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index ef31003c201..738892c6cc6 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -516,7 +516,7 @@ The client session supports the context manager protocol for self closing. :param bytes fingerprint: Pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning - <https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning>`_. + <https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning>`_. Warning: use of MD5 or SHA1 digests is insecure and removed. @@ -785,7 +785,7 @@ The client session supports the context manager protocol for self closing. :param bytes fingerprint: Pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning - <https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning>`_. + <https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning>`_. Note: use of MD5 or SHA1 digests is insecure and deprecated. @@ -1107,7 +1107,7 @@ is controlled by *force_close* constructor's parameter). :param bytes fingerprint: pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning - <https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning>`_. + <https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning>`_. Note: use of MD5 or SHA1 digests is insecure and deprecated. From aae7ac50bee52d18bf0e21c318e3f5cadb3ef6c2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 14 Jul 2024 17:22:21 +0000 Subject: [PATCH 0227/1511] [PR #8501/f4574887 backport][3.10] Use identity checks for all WSMsgType type compares (#8502) Co-authored-by: J. Nick Koston <nick@koston.org> This is a backport of PR #8501 as merged into master (f4574887f394c921b262de00b3cfc2411f8f8aa8). --- CHANGES/8501.misc.rst | 1 + aiohttp/client_ws.py | 14 +++++++------- aiohttp/web_ws.py | 14 +++++++------- 3 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8501.misc.rst diff --git a/CHANGES/8501.misc.rst b/CHANGES/8501.misc.rst new file mode 100644 index 00000000000..183c370178e --- /dev/null +++ b/CHANGES/8501.misc.rst @@ -0,0 +1 @@ +Use identity checks for all ``WSMsgType`` type compares -- by :user:`bdraco`. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 1ba6f78f514..0a9dc843a40 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -223,7 +223,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._response.close() return True - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._close_code = msg.data self._response.close() return True @@ -273,30 +273,30 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.ERROR, exc, None) - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._closing = True self._close_code = msg.data if not self._closed and self._autoclose: await self.close() - elif msg.type == WSMsgType.CLOSING: + elif msg.type is WSMsgType.CLOSING: self._closing = True - elif msg.type == WSMsgType.PING and self._autoping: + elif msg.type is WSMsgType.PING and self._autoping: await self.pong(msg.data) continue - elif msg.type == WSMsgType.PONG and self._autoping: + elif msg.type is WSMsgType.PONG and self._autoping: continue return msg async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: + if msg.type is not WSMsgType.TEXT: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") return cast(str, msg.data) async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: + if msg.type is not WSMsgType.BINARY: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") return cast(bytes, msg.data) diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index c34e30e62de..590c0abd8ae 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -415,7 +415,7 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._set_code_close_transport(msg.data) return True @@ -482,7 +482,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.ERROR, exc, None) - if msg.type == WSMsgType.CLOSE: + if msg.type is WSMsgType.CLOSE: self._set_closing(msg.data) # Could be closed while awaiting reader. if not self._closed and self._autoclose: @@ -491,19 +491,19 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: # want to drain any pending writes as it will # likely result writing to a broken pipe. await self.close(drain=False) - elif msg.type == WSMsgType.CLOSING: + elif msg.type is WSMsgType.CLOSING: self._set_closing(WSCloseCode.OK) - elif msg.type == WSMsgType.PING and self._autoping: + elif msg.type is WSMsgType.PING and self._autoping: await self.pong(msg.data) continue - elif msg.type == WSMsgType.PONG and self._autoping: + elif msg.type is WSMsgType.PONG and self._autoping: continue return msg async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: + if msg.type is not WSMsgType.TEXT: raise TypeError( "Received message {}:{!r} is not WSMsgType.TEXT".format( msg.type, msg.data @@ -513,7 +513,7 @@ async def receive_str(self, *, timeout: Optional[float] = None) -> str: async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: + if msg.type is not WSMsgType.BINARY: raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") return cast(bytes, msg.data) From 3bc89fef7d2df86e4a03560a0b6b8bbfb2c966ee Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 14 Jul 2024 12:39:23 -0500 Subject: [PATCH 0228/1511] [PR #8498/7bf6ee1 backport][3.10] Avoid creating a future on every websocket receive (#8503) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8498.misc.rst | 1 + aiohttp/client_ws.py | 19 +++++++----- aiohttp/web_ws.py | 20 ++++++++----- tests/test_client_ws_functional.py | 39 ++++++++++++++++++++++-- tests/test_web_websocket_functional.py | 41 ++++++++++++++++++++++++++ 5 files changed, 101 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8498.misc.rst diff --git a/CHANGES/8498.misc.rst b/CHANGES/8498.misc.rst new file mode 100644 index 00000000000..5fcf3efd884 --- /dev/null +++ b/CHANGES/8498.misc.rst @@ -0,0 +1 @@ +Avoid creating a future on every websocket receive -- by :user:`bdraco`. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 0a9dc843a40..608c659e543 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -66,7 +66,8 @@ def __init__( self._pong_heartbeat = heartbeat / 2.0 self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._loop = loop - self._waiting: Optional[asyncio.Future[bool]] = None + self._waiting: bool = False + self._close_wait: Optional[asyncio.Future[None]] = None self._exception: Optional[BaseException] = None self._compress = compress self._client_notakeover = client_notakeover @@ -185,10 +186,12 @@ async def send_json( async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: # we need to break `receive()` cycle first, # `close()` may be called from different task - if self._waiting is not None and not self._closing: + if self._waiting and not self._closing: + assert self._loop is not None + self._close_wait = self._loop.create_future() self._closing = True self._reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting + await self._close_wait if not self._closed: self._cancel_heartbeat() @@ -232,7 +235,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo async def receive(self, timeout: Optional[float] = None) -> WSMessage: while True: - if self._waiting is not None: + if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") if self._closed: @@ -242,15 +245,15 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WS_CLOSED_MESSAGE try: - self._waiting = self._loop.create_future() + self._waiting = True try: async with async_timeout.timeout(timeout or self._receive_timeout): msg = await self._reader.read() self._reset_heartbeat() finally: - waiter = self._waiting - self._waiting = None - set_result(waiter, True) + self._waiting = False + if self._close_wait: + set_result(self._close_wait, None) except (asyncio.CancelledError, asyncio.TimeoutError): self._close_code = WSCloseCode.ABNORMAL_CLOSURE raise diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 590c0abd8ae..b74bfd688c9 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -81,7 +81,8 @@ def __init__( self._conn_lost = 0 self._close_code: Optional[int] = None self._loop: Optional[asyncio.AbstractEventLoop] = None - self._waiting: Optional[asyncio.Future[bool]] = None + self._waiting: bool = False + self._close_wait: Optional[asyncio.Future[None]] = None self._exception: Optional[BaseException] = None self._timeout = timeout self._receive_timeout = receive_timeout @@ -376,9 +377,12 @@ async def close( # we need to break `receive()` cycle first, # `close()` may be called from different task - if self._waiting is not None and not self._closed: + if self._waiting and not self._closed: + if not self._close_wait: + assert self._loop is not None + self._close_wait = self._loop.create_future() reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting + await self._close_wait if self._closed: return False @@ -445,7 +449,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: loop = self._loop assert loop is not None while True: - if self._waiting is not None: + if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") if self._closed: @@ -457,15 +461,15 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WS_CLOSING_MESSAGE try: - self._waiting = loop.create_future() + self._waiting = True try: async with async_timeout.timeout(timeout or self._receive_timeout): msg = await self._reader.read() self._reset_heartbeat() finally: - waiter = self._waiting - set_result(waiter, True) - self._waiting = None + self._waiting = False + if self._close_wait: + set_result(self._close_wait, None) except asyncio.TimeoutError: raise except EofStream: diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 6270675276e..584a8d6aa27 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,5 +1,6 @@ import asyncio import sys +from typing import Any import pytest @@ -245,7 +246,7 @@ async def handler(request): await client_ws.close() msg = await ws.receive() - assert msg.type == aiohttp.WSMsgType.CLOSE + assert msg.type is aiohttp.WSMsgType.CLOSE return ws app = web.Application() @@ -256,11 +257,43 @@ async def handler(request): await ws.send_bytes(b"ask") msg = await ws.receive() - assert msg.type == aiohttp.WSMsgType.CLOSING + assert msg.type is aiohttp.WSMsgType.CLOSING await asyncio.sleep(0.01) msg = await ws.receive() - assert msg.type == aiohttp.WSMsgType.CLOSED + assert msg.type is aiohttp.WSMsgType.CLOSED + + +async def test_concurrent_close_multiple_tasks(aiohttp_client: Any) -> None: + async def handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + await ws.receive_bytes() + await ws.send_str("test") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + ws = await client.ws_connect("/") + + await ws.send_bytes(b"ask") + + task1 = asyncio.create_task(ws.close()) + task2 = asyncio.create_task(ws.close()) + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + + await task1 + await task2 + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED async def test_concurrent_task_close(aiohttp_client) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index b471b131c1e..ce338cdf92d 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -313,6 +313,47 @@ async def handler(request): assert msg.type == WSMsgType.CLOSED +async def test_concurrent_close_multiple_tasks(loop: Any, aiohttp_client: Any) -> None: + srv_ws = None + + async def handler(request): + nonlocal srv_ws + ws = srv_ws = web.WebSocketResponse(autoclose=False, protocols=("foo", "bar")) + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + await asyncio.sleep(0) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoclose=False, protocols=("eggs", "bar")) + + task1 = asyncio.create_task(srv_ws.close(code=WSCloseCode.INVALID_TEXT)) + task2 = asyncio.create_task(srv_ws.close(code=WSCloseCode.INVALID_TEXT)) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + + await task1 + await task2 + + await asyncio.sleep(0) + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: srv_ws: Optional[web.WebSocketResponse] = None From 1edeb9d1a025b6a7d55dbdccaf866462351c4958 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Jul 2024 10:42:07 +0000 Subject: [PATCH 0229/1511] Bump sigstore/gh-action-sigstore-python from 2.1.1 to 3.0.0 (#8508) Bumps [sigstore/gh-action-sigstore-python](https://github.com/sigstore/gh-action-sigstore-python) from 2.1.1 to 3.0.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/sigstore/gh-action-sigstore-python/releases">sigstore/gh-action-sigstore-python's releases</a>.</em></p> <blockquote> <h2>v3.0.0</h2> <h3>Added</h3> <ul> <li><code>inputs</code> now allows recursive globbing with <code>**</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/106">#106</a>)</li> </ul> <h3>Removed</h3> <ul> <li>The following settings have been removed: <code>fulcio-url</code>, <code>rekor-url</code>, <code>ctfe</code>, <code>rekor-root-pubkey</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/140">#140</a>)</li> <li>The following output settings have been removed: <code>signature</code>, <code>certificate</code>, <code>bundle</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/146">#146</a>)</li> </ul> <h3>Changed</h3> <ul> <li> <p><code>inputs</code> is now parsed according to POSIX shell lexing rules, improving the action's consistency when used with filenames containing whitespace or other significant characters (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/104">#104</a>)</p> </li> <li> <p><code>inputs</code> is now optional <em>if</em> <code>release-signing-artifacts</code> is true <em>and</em> the action's event is a <code>release</code> event. In this case, the action takes no explicit inputs, but signs the source archives already attached to the associated release (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/110">#110</a>)</p> </li> <li> <p>The default suffix has changed from <code>.sigstore</code> to <code>.sigstore.json</code>, per Sigstore's client specification (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/140">#140</a>)</p> </li> <li> <p><code>release-signing-artifacts</code> now defaults to <code>true</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/142">#142</a>)</p> </li> </ul> <h3>Fixed</h3> <ul> <li> <p>The <code>release-signing-artifacts</code> setting no longer causes a hard error when used under the incorrect event (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/103">#103</a>)</p> </li> <li> <p>Various deprecations present in <code>sigstore-python</code>'s 2.x series have been resolved (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/140">#140</a>)</p> </li> <li> <p>This workflow now supports CI runners that use PEP 668 to constrain global package prefixes (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/145">#145</a>)</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/sigstore/gh-action-sigstore-python/blob/main/CHANGELOG.md">sigstore/gh-action-sigstore-python's changelog</a>.</em></p> <blockquote> <h2>[3.0.0]</h2> <h3>Added</h3> <ul> <li><code>inputs</code> now allows recursive globbing with <code>**</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/106">#106</a>)</li> </ul> <h3>Removed</h3> <ul> <li>The following settings have been removed: <code>fulcio-url</code>, <code>rekor-url</code>, <code>ctfe</code>, <code>rekor-root-pubkey</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/140">#140</a>)</li> <li>The following output settings have been removed: <code>signature</code>, <code>certificate</code>, <code>bundle</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/146">#146</a>)</li> </ul> <h3>Changed</h3> <ul> <li> <p><code>inputs</code> is now parsed according to POSIX shell lexing rules, improving the action's consistency when used with filenames containing whitespace or other significant characters (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/104">#104</a>)</p> </li> <li> <p><code>inputs</code> is now optional <em>if</em> <code>release-signing-artifacts</code> is true <em>and</em> the action's event is a <code>release</code> event. In this case, the action takes no explicit inputs, but signs the source archives already attached to the associated release (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/110">#110</a>)</p> </li> <li> <p>The default suffix has changed from <code>.sigstore</code> to <code>.sigstore.json</code>, per Sigstore's client specification (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/140">#140</a>)</p> </li> <li> <p><code>release-signing-artifacts</code> now defaults to <code>true</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/142">#142</a>)</p> </li> </ul> <h3>Fixed</h3> <ul> <li> <p>The <code>release-signing-artifacts</code> setting no longer causes a hard error when used under the incorrect event (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/103">#103</a>)</p> </li> <li> <p>Various deprecations present in <code>sigstore-python</code>'s 2.x series have been resolved (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/140">#140</a>)</p> </li> <li> <p>This workflow now supports CI runners that use PEP 668 to constrain global package prefixes (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/pull/145">#145</a>)</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/f514d46b907ebcd5bedc05145c03b69c1edd8b46"><code>f514d46</code></a> Prep 3.0.0 (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/143">#143</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/da238ad4806ad4bceff0a421e715ba34c3c4f962"><code>da238ad</code></a> Cleanup workflows (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/148">#148</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/551a497f0abe7bcba261fd45a195f3d17eebb0c0"><code>551a497</code></a> action: remove old output settings (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/146">#146</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/16fbe9a8d335cfde2d487c8c459707abdd1c3704"><code>16fbe9a</code></a> action: flip <code>release-signing-artifacts</code> (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/142">#142</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/1ddeb829cc81aadc391a78096478d61db0dee7e6"><code>1ddeb82</code></a> action: use a venv to prevent PEP 668 errors (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/145">#145</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/94661007ff419d4795b935732494905162e79738"><code>9466100</code></a> requirements: sigstore ~3.0 (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/140">#140</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/26de7459ab0625282c11ecbcf6e65941b2886b09"><code>26de745</code></a> schedule-selftest: reduce nagging (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/134">#134</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/4dde77f8178a041d4cd24f34a5624231b525513d"><code>4dde77f</code></a> build(deps): bump the actions group with 1 update (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/111">#111</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/08a568c3d1b0d7483cb913510a741887d37c57e0"><code>08a568c</code></a> Allow empty inputs with release artifacts (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/110">#110</a>)</li> <li><a href="https://github.com/sigstore/gh-action-sigstore-python/commit/8579d4832209d59081f278b17073a30dffc5da9a"><code>8579d48</code></a> build(deps): bump the actions group with 1 update (<a href="https://redirect.github.com/sigstore/gh-action-sigstore-python/issues/107">#107</a>)</li> <li>Additional commits viewable in <a href="https://github.com/sigstore/gh-action-sigstore-python/compare/v2.1.1...v3.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sigstore/gh-action-sigstore-python&package-manager=github_actions&previous-version=2.1.1&new-version=3.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 0eb640794f1..79211d42419 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -406,7 +406,7 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 - name: Sign the dists with Sigstore - uses: sigstore/gh-action-sigstore-python@v2.1.1 + uses: sigstore/gh-action-sigstore-python@v3.0.0 with: inputs: >- ./dist/*.tar.gz From 1caebc90a9949f71fd057513d9dc905907dd7b15 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 17 Jul 2024 15:15:57 +0100 Subject: [PATCH 0230/1511] Add a Request.wait_for_disconnection() method (#4200) (#8504) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> Co-authored-by: Gustavo J. A. M. Carneiro <gjcarneiro@gmail.com> Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/2492.feature | 1 + aiohttp/web_protocol.py | 1 + aiohttp/web_request.py | 19 +++++++++++++++++++ docs/web_reference.rst | 12 ++++++++++++ 4 files changed, 33 insertions(+) create mode 100644 CHANGES/2492.feature diff --git a/CHANGES/2492.feature b/CHANGES/2492.feature new file mode 100644 index 00000000000..5c98dbbbcf2 --- /dev/null +++ b/CHANGES/2492.feature @@ -0,0 +1 @@ +Add a Request.wait_for_disconnection() method, as means of allowing request handlers to be notified of premature client disconnections. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index f083b13eb0f..88df4b31d24 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -609,6 +609,7 @@ async def finish_response( can get exception information. Returns True if the client disconnects prematurely. """ + request._finish() if self._request_parser is not None: self._request_parser.set_upgraded(False) self._upgrade = False diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 7d1694584ea..d059a166884 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -19,6 +19,7 @@ MutableMapping, Optional, Pattern, + Set, Tuple, Union, cast, @@ -49,6 +50,7 @@ reify, sentinel, set_exception, + set_result, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -144,6 +146,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): "_loop", "_transport_sslcontext", "_transport_peername", + "_disconnection_waiters", ] ) @@ -191,6 +194,7 @@ def __init__( self._task = task self._client_max_size = client_max_size self._loop = loop + self._disconnection_waiters: Set[asyncio.Future[None]] = set() transport = self._protocol.transport assert transport is not None @@ -818,6 +822,21 @@ async def _prepare_hook(self, response: StreamResponse) -> None: def _cancel(self, exc: BaseException) -> None: set_exception(self._payload, exc) + for fut in self._disconnection_waiters: + set_result(fut, None) + + def _finish(self) -> None: + for fut in self._disconnection_waiters: + fut.cancel() + + async def wait_for_disconnection(self) -> None: + loop = asyncio.get_event_loop() + fut = loop.create_future() # type: asyncio.Future[None] + self._disconnection_waiters.add(fut) + try: + await fut + finally: + self._disconnection_waiters.remove(fut) class Request(BaseRequest): diff --git a/docs/web_reference.rst b/docs/web_reference.rst index f96fd59f56a..ddd5a3c264c 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -510,6 +510,18 @@ and :ref:`aiohttp-web-signals` handlers. required work will be processed by :mod:`aiohttp.web` internal machinery. + .. method:: wait_for_disconnection() + + Returns when the connection that sent this request closes + + If there is no client disconnection during request handling, this + coroutine gets cancelled automatically at the end of this request being + handled. + + This can be used in handlers as a means of receiving a notification of + premature client disconnection. + + .. versionadded:: 3.10 .. class:: Request From c12a143c82beb492acc5885fb12d206bcba725f3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 17 Jul 2024 09:22:46 -0500 Subject: [PATCH 0231/1511] [PR #8482/62173be backport][3.10] Fix incorrect rejection of ws:// and wss:// urls (#8511) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Sam Bull <aa6bs0@sambull.org> Co-authored-by: AraHaan <seandhunt_7@yahoo.com> --- CHANGES/8481.bugfix.rst | 2 ++ aiohttp/client.py | 4 ++- tests/conftest.py | 31 +++++++++++++++++++- tests/test_client_session.py | 56 +++++++++++++++++++++++++++++++++++- tests/test_client_ws.py | 18 ++---------- 5 files changed, 92 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8481.bugfix.rst diff --git a/CHANGES/8481.bugfix.rst b/CHANGES/8481.bugfix.rst new file mode 100644 index 00000000000..b185780174e --- /dev/null +++ b/CHANGES/8481.bugfix.rst @@ -0,0 +1,2 @@ +Fixed the incorrect rejection of ``ws://`` and ``wss://`` urls +-- by :user:` AraHaan`. diff --git a/aiohttp/client.py b/aiohttp/client.py index d47d0facc27..b2ee5b40604 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -211,6 +211,8 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) HTTP_SCHEMA_SET = frozenset({"http", "https", ""}) +WS_SCHEMA_SET = frozenset({"ws", "wss"}) +ALLOWED_PROTOCOL_SCHEMA_SET = HTTP_SCHEMA_SET | WS_SCHEMA_SET _RetType = TypeVar("_RetType") _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -505,7 +507,7 @@ async def _request( except ValueError as e: raise InvalidUrlClientError(str_or_url) from e - if url.scheme not in HTTP_SCHEMA_SET: + if url.scheme not in ALLOWED_PROTOCOL_SCHEMA_SET: raise NonHttpUrlClientError(url) skip_headers = set(self._skip_auto_headers) diff --git a/tests/conftest.py b/tests/conftest.py index fcdb482a59f..1cb64b3a6f8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,16 +1,19 @@ import asyncio +import base64 import os import socket import ssl import sys -from hashlib import md5, sha256 +from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory +from typing import Any from unittest import mock from uuid import uuid4 import pytest +from aiohttp.http import WS_KEY from aiohttp.test_utils import loop_context try: @@ -168,6 +171,17 @@ def pipe_name(): return name +@pytest.fixture +def create_mocked_conn(loop: Any): + def _proto_factory(conn_closing_result=None, **kwargs): + proto = mock.Mock(**kwargs) + proto.closed = loop.create_future() + proto.closed.set_result(conn_closing_result) + return proto + + yield _proto_factory + + @pytest.fixture def selector_loop(): policy = asyncio.WindowsSelectorEventLoopPolicy() @@ -208,3 +222,18 @@ def start_connection(): spec_set=True, ) as start_connection_mock: yield start_connection_mock + + +@pytest.fixture +def key_data(): + return os.urandom(16) + + +@pytest.fixture +def key(key_data: Any): + return base64.b64encode(key_data) + + +@pytest.fixture +def ws_key(key: Any): + return base64.b64encode(sha1(key + WS_KEY).digest()).decode() diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 416b6bbce5d..52b4cb2e1c9 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -471,7 +471,61 @@ async def create_connection(req, traces, timeout): c.__del__() -async def test_cookie_jar_usage(loop, aiohttp_client) -> None: +@pytest.mark.parametrize("protocol", ["http", "https", "ws", "wss"]) +async def test_ws_connect_allowed_protocols( + create_session: Any, + create_mocked_conn: Any, + protocol: str, + ws_key: Any, + key_data: Any, +) -> None: + resp = mock.create_autospec(aiohttp.ClientResponse) + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + resp.url = URL(f"{protocol}://example.com") + resp.cookies = SimpleCookie() + resp.start = mock.AsyncMock() + + req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req_factory = mock.Mock(return_value=req) + req.send = mock.AsyncMock(return_value=resp) + + session = await create_session(request_class=req_factory) + + connections = [] + original_connect = session._connector.connect + + async def connect(req, traces, timeout): + conn = await original_connect(req, traces, timeout) + connections.append(conn) + return conn + + async def create_connection(req, traces, timeout): + return create_mocked_conn() + + connector = session._connector + with mock.patch.object(connector, "connect", connect), mock.patch.object( + connector, "_create_connection", create_connection + ), mock.patch.object(connector, "_release"), mock.patch( + "aiohttp.client.os" + ) as m_os: + m_os.urandom.return_value = key_data + await session.ws_connect(f"{protocol}://example.com") + + # normally called during garbage collection. triggers an exception + # if the connection wasn't already closed + for c in connections: + c.close() + del c + + await session.close() + + +async def test_cookie_jar_usage(loop: Any, aiohttp_client: Any) -> None: req_url = None jar = mock.Mock() diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index f0b7757e420..4be404f7752 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -2,6 +2,7 @@ import base64 import hashlib import os +from typing import Any from unittest import mock import pytest @@ -13,22 +14,7 @@ from aiohttp.test_utils import make_mocked_coro -@pytest.fixture -def key_data(): - return os.urandom(16) - - -@pytest.fixture -def key(key_data): - return base64.b64encode(key_data) - - -@pytest.fixture -def ws_key(key): - return base64.b64encode(hashlib.sha1(key + WS_KEY).digest()).decode() - - -async def test_ws_connect(ws_key, loop, key_data) -> None: +async def test_ws_connect(ws_key: Any, loop: Any, key_data: Any) -> None: resp = mock.Mock() resp.status = 101 resp.headers = { From 266559cb1e3bd3c28a8ece53f5f77a189229e7fe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 17 Jul 2024 10:47:37 -0500 Subject: [PATCH 0232/1511] [PR #5278/0f0c3759 backport][3.10] Drop Python 3.6 support (#8512) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- aiohttp/abc.py | 3 +-- aiohttp/client.py | 3 +-- aiohttp/connector.py | 4 ++-- aiohttp/helpers.py | 39 +++------------------------------ aiohttp/pytest_plugin.py | 4 ++-- aiohttp/resolver.py | 3 +-- aiohttp/web_server.py | 3 +-- tests/test_client_functional.py | 4 +--- tests/test_connector.py | 3 ++- tests/test_helpers.py | 12 ---------- 10 files changed, 14 insertions(+), 64 deletions(-) diff --git a/aiohttp/abc.py b/aiohttp/abc.py index d9e7725eab2..3fb024048a4 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -21,7 +21,6 @@ from multidict import CIMultiDict from yarl import URL -from .helpers import get_running_loop from .typedefs import LooseCookies if TYPE_CHECKING: @@ -170,7 +169,7 @@ class AbstractCookieJar(Sized, IterableBase): """Abstract Cookie Jar.""" def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) + self._loop = loop or asyncio.get_running_loop() @abstractmethod def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: diff --git a/aiohttp/client.py b/aiohttp/client.py index b2ee5b40604..2541addcd06 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -88,7 +88,6 @@ TimeoutHandle, ceil_timeout, get_env_proxy_for_url, - get_running_loop, method_must_be_empty_body, sentinel, strip_auth_from_url, @@ -294,7 +293,7 @@ def __init__( if connector is not None: loop = connector._loop - loop = get_running_loop(loop) + loop = loop or asyncio.get_running_loop() if base_url is None or isinstance(base_url, URL): self._base_url: Optional[URL] = base_url diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d62e245cb5f..cd89ea641d3 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -49,7 +49,7 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel +from .helpers import ceil_timeout, is_ip_address, noop, sentinel from .locks import EventResultOrError from .resolver import DefaultResolver @@ -231,7 +231,7 @@ def __init__( if keepalive_timeout is sentinel: keepalive_timeout = 15.0 - loop = get_running_loop(loop) + loop = loop or asyncio.get_running_loop() self._timeout_ceil_threshold = timeout_ceil_threshold self._closed = False diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index afaa0d7e3b8..b3cc1b6b6e6 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -14,7 +14,6 @@ import re import sys import time -import warnings import weakref from collections import namedtuple from contextlib import suppress @@ -52,7 +51,7 @@ from yarl import URL from . import hdrs -from .log import client_logger, internal_logger +from .log import client_logger if sys.version_info >= (3, 11): import asyncio as async_timeout @@ -287,38 +286,6 @@ def proxies_from_env() -> Dict[str, ProxyInfo]: return ret -def current_task( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> "Optional[asyncio.Task[Any]]": - return asyncio.current_task(loop=loop) - - -def get_running_loop( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> asyncio.AbstractEventLoop: - if loop is None: - loop = asyncio.get_event_loop() - if not loop.is_running(): - warnings.warn( - "The object should be created within an async function", - DeprecationWarning, - stacklevel=3, - ) - if loop.get_debug(): - internal_logger.warning( - "The object should be created within an async function", stack_info=True - ) - return loop - - -def isasyncgenfunction(obj: Any) -> bool: - func = getattr(inspect, "isasyncgenfunction", None) - if func is not None: - return func(obj) # type: ignore[no-any-return] - else: - return False - - def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: """Get a permitted proxy for the given URL from the env.""" if url.host is not None and proxy_bypass(url.host): @@ -709,7 +676,7 @@ def assert_timeout(self) -> None: raise asyncio.TimeoutError from None def __enter__(self) -> BaseTimerContext: - task = current_task(loop=self._loop) + task = asyncio.current_task(loop=self._loop) if task is None: raise RuntimeError( @@ -749,7 +716,7 @@ def ceil_timeout( if delay is None or delay <= 0: return async_timeout.timeout(None) - loop = get_running_loop() + loop = asyncio.get_running_loop() now = loop.time() when = now + delay if delay > ceil_threshold: diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 5754747bf48..6225fdf2be0 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -1,11 +1,11 @@ import asyncio import contextlib +import inspect import warnings from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union import pytest -from aiohttp.helpers import isasyncgenfunction from aiohttp.web import Application from .test_utils import ( @@ -57,7 +57,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] """ func = fixturedef.func - if isasyncgenfunction(func): + if inspect.isasyncgenfunction(func): # async generator fixture is_async_gen = True elif asyncio.iscoroutinefunction(func): diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 2ac204a4e32..3d14d2fcb16 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -4,7 +4,6 @@ from typing import Any, Dict, List, Optional, Tuple, Type, Union from .abc import AbstractResolver, ResolveResult -from .helpers import get_running_loop __all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") @@ -30,7 +29,7 @@ class ThreadedResolver(AbstractResolver): """ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) + self._loop = loop or asyncio.get_running_loop() async def resolve( self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index 3cd31c8ff10..f6bbdb89a77 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -4,7 +4,6 @@ from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa from .abc import AbstractStreamWriter -from .helpers import get_running_loop from .http_parser import RawRequestMessage from .streams import StreamReader from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler @@ -23,7 +22,7 @@ def __init__( loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any ) -> None: - self._loop = get_running_loop(loop) + self._loop = loop or asyncio.get_running_loop() self._connections: Dict[RequestHandler, asyncio.Transport] = {} self._kwargs = kwargs self.requests_count = 0 diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 4a24196a28e..63d5c67e51e 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -579,8 +579,6 @@ async def handler(request): async def test_format_task_get(aiohttp_server) -> None: - loop = asyncio.get_event_loop() - async def handler(request): return web.Response(body=b"OK") @@ -588,7 +586,7 @@ async def handler(request): app.router.add_route("GET", "/", handler) server = await aiohttp_server(app) client = aiohttp.ClientSession() - task = loop.create_task(client.get(server.make_url("/"))) + task = asyncio.create_task(client.get(server.make_url("/"))) assert f"{task}".startswith("<Task pending") resp = await task resp.close() diff --git a/tests/test_connector.py b/tests/test_connector.py index 58f6c6a116d..2065adf7414 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -2295,7 +2295,8 @@ async def handler(request): session = aiohttp.ClientSession(connector=conn) url = srv.make_url("/") - with pytest.raises(aiohttp.ClientConnectorCertificateError) as ctx: + err = aiohttp.ClientConnectorCertificateError + with pytest.raises(err) as ctx: await session.get(url) assert isinstance(ctx.value, aiohttp.ClientConnectorCertificateError) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index b59528d3468..67af32dc3be 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -607,18 +607,6 @@ def test_proxies_from_env_http_with_auth(url_input, expected_scheme) -> None: assert proxy_auth.encoding == "latin1" -# ------------ get_running_loop --------------------------------- - - -def test_get_running_loop_not_running(loop) -> None: - with pytest.warns(DeprecationWarning): - helpers.get_running_loop() - - -async def test_get_running_loop_ok(loop) -> None: - assert helpers.get_running_loop() is loop - - # --------------------- get_env_proxy_for_url ------------------------------ From 776ebc6530eb41212feb8cbc4176d303830b3566 Mon Sep 17 00:00:00 2001 From: Arcadiy Ivanov <arcadiy@ivanov.biz> Date: Wed, 17 Jul 2024 12:09:37 -0400 Subject: [PATCH 0233/1511] [PR #8445 backport][3.10] Fixes read_timeout on WS connection not respecting ws_connect's timeouts #8444 (#8447) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8444.bugfix | 2 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 10 ++++ aiohttp/client_proto.py | 8 +++ tests/test_client_ws.py | 109 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 130 insertions(+) create mode 100644 CHANGES/8444.bugfix diff --git a/CHANGES/8444.bugfix b/CHANGES/8444.bugfix new file mode 100644 index 00000000000..774e13064a7 --- /dev/null +++ b/CHANGES/8444.bugfix @@ -0,0 +1,2 @@ +Fix ``ws_connect`` not respecting `receive_timeout`` on WS(S) connection. +-- by :user:`arcivanov`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 4442664118f..202193375dd 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -46,6 +46,7 @@ Anes Abismail Antoine Pietri Anton Kasyanov Anton Zhdan-Pushkin +Arcadiy Ivanov Arseny Timoniq Artem Yushkovskiy Arthur Darcet diff --git a/aiohttp/client.py b/aiohttp/client.py index 2541addcd06..c70ad65c59e 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -1009,6 +1009,16 @@ async def _ws_connect( assert conn is not None conn_proto = conn.protocol assert conn_proto is not None + + # For WS connection the read_timeout must be either receive_timeout or greater + # None == no timeout, i.e. infinite timeout, so None is the max timeout possible + if receive_timeout is None: + # Reset regardless + conn_proto.read_timeout = receive_timeout + elif conn_proto.read_timeout is not None: + # If read_timeout was set check which wins + conn_proto.read_timeout = max(receive_timeout, conn_proto.read_timeout) + transport = conn.transport assert transport is not None reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 28e9d3cd9e5..f8c83240209 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -224,6 +224,14 @@ def _reschedule_timeout(self) -> None: def start_timeout(self) -> None: self._reschedule_timeout() + @property + def read_timeout(self) -> Optional[float]: + return self._read_timeout + + @read_timeout.setter + def read_timeout(self, read_timeout: Optional[float]) -> None: + self._read_timeout = read_timeout + def _on_read_timeout(self) -> None: exc = SocketTimeoutError("Timeout on reading data from socket") self.set_exception(exc) diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index 4be404f7752..ebc9d910c1a 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -23,6 +23,7 @@ async def test_ws_connect(ws_key: Any, loop: Any, key_data: Any) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -38,6 +39,97 @@ async def test_ws_connect(ws_key: Any, loop: Any, key_data: Any) -> None: assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] +async def test_ws_connect_read_timeout_is_reset_to_inf( + ws_key: Any, loop: Any, key_data: Any +) -> None: + resp = mock.Mock() + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", + } + resp.connection.protocol.read_timeout = 0.5 + with mock.patch("aiohttp.client.os") as m_os, mock.patch( + "aiohttp.client.ClientSession.request" + ) as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(resp) + + res = await aiohttp.ClientSession().ws_connect( + "http://test.org", protocols=("t1", "t2", "chat") + ) + + assert isinstance(res, client.ClientWebSocketResponse) + assert res.protocol == "chat" + assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] + assert resp.connection.protocol.read_timeout is None + + +async def test_ws_connect_read_timeout_stays_inf( + ws_key: Any, loop: Any, key_data: Any +) -> None: + resp = mock.Mock() + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", + } + resp.connection.protocol.read_timeout = None + with mock.patch("aiohttp.client.os") as m_os, mock.patch( + "aiohttp.client.ClientSession.request" + ) as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(resp) + + res = await aiohttp.ClientSession().ws_connect( + "http://test.org", + protocols=("t1", "t2", "chat"), + receive_timeout=0.5, + ) + + assert isinstance(res, client.ClientWebSocketResponse) + assert res.protocol == "chat" + assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] + assert resp.connection.protocol.read_timeout is None + + +async def test_ws_connect_read_timeout_reset_to_max( + ws_key: Any, loop: Any, key_data: Any +) -> None: + resp = mock.Mock() + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", + } + resp.connection.protocol.read_timeout = 0.5 + with mock.patch("aiohttp.client.os") as m_os, mock.patch( + "aiohttp.client.ClientSession.request" + ) as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(resp) + + res = await aiohttp.ClientSession().ws_connect( + "http://test.org", + protocols=("t1", "t2", "chat"), + receive_timeout=1.0, + ) + + assert isinstance(res, client.ClientWebSocketResponse) + assert res.protocol == "chat" + assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] + assert resp.connection.protocol.read_timeout == 1.0 + + async def test_ws_connect_with_origin(key_data, loop) -> None: resp = mock.Mock() resp.status = 403 @@ -68,6 +160,7 @@ async def test_ws_connect_with_params(ws_key, loop, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -93,6 +186,7 @@ def read(self, decode=False): hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -215,6 +309,7 @@ async def mock_get(*args, **kwargs): hdrs.SEC_WEBSOCKET_ACCEPT: accept, hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } + resp.connection.protocol.read_timeout = None return resp with mock.patch("aiohttp.client.os") as m_os: @@ -245,6 +340,7 @@ async def test_close(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -285,6 +381,7 @@ async def test_close_eofstream(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -315,6 +412,7 @@ async def test_close_exc(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -347,6 +445,7 @@ async def test_close_exc2(loop, ws_key, key_data) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -381,6 +480,7 @@ async def test_send_data_after_close(ws_key, key_data, loop) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -409,6 +509,7 @@ async def test_send_data_type_errors(ws_key, key_data, loop) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -437,6 +538,7 @@ async def test_reader_read_exception(ws_key, key_data, loop) -> None: hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + hresp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -501,6 +603,7 @@ async def test_ws_connect_non_overlapped_protocols(ws_key, loop, key_data) -> No hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "other,another", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -523,6 +626,7 @@ async def test_ws_connect_non_overlapped_protocols_2(ws_key, loop, key_data) -> hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: "other,another", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -547,6 +651,7 @@ async def test_ws_connect_deflate(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -570,6 +675,7 @@ async def test_ws_connect_deflate_per_message(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: @@ -602,6 +708,7 @@ async def test_ws_connect_deflate_server_not_support(loop, ws_key, key_data) -> hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -626,6 +733,7 @@ async def test_ws_connect_deflate_notakeover(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate; " "client_no_context_takeover", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data @@ -650,6 +758,7 @@ async def test_ws_connect_deflate_client_wbits(loop, ws_key, key_data) -> None: hdrs.SEC_WEBSOCKET_EXTENSIONS: "permessage-deflate; " "client_max_window_bits=10", } + resp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data From 716099439fa98af644fab2f8cf86f78f5e1741ec Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 19 Jul 2024 00:39:00 +0200 Subject: [PATCH 0234/1511] [PR #8515/43b10dcf backport][3.10] Update sphinxcontrib-towncrier entry point @ conf (#8517) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index c834296ceeb..23ac3e426ec 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,7 +59,7 @@ "sphinx.ext.viewcode", # Third-party extensions: "sphinxcontrib.blockdiag", - "sphinxcontrib.towncrier", # provides `towncrier-draft-entries` directive + "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ] From fe439394f993e681df999894ddf2c0802d9f24fa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 20 Jul 2024 08:58:30 -0500 Subject: [PATCH 0235/1511] [PR #8513/bbe90e5d backport][3.10] Small cleanup to wss/ws fix (#8519) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/6722.feature | 2 +- CHANGES/8481.bugfix.rst | 2 -- CHANGES/8481.feature.rst | 1 + CHANGES/8482.feature.rst | 1 + tests/test_client_session.py | 2 +- 5 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/8481.bugfix.rst create mode 120000 CHANGES/8481.feature.rst create mode 120000 CHANGES/8482.feature.rst diff --git a/CHANGES/6722.feature b/CHANGES/6722.feature index 1dd253a0997..580efa5c5e2 100644 --- a/CHANGES/6722.feature +++ b/CHANGES/6722.feature @@ -9,4 +9,4 @@ are raised instead of :py:exc:`ValueError` or :py:exc:`~aiohttp.InvalidURL` when The :py:exc:`~aiohttp.InvalidURL` now exposes a ``description`` property with the text explanation of the error details. --- by :user:`setla` +-- by :user:`setla`, :user:`AraHaan`, and :user:`bdraco` diff --git a/CHANGES/8481.bugfix.rst b/CHANGES/8481.bugfix.rst deleted file mode 100644 index b185780174e..00000000000 --- a/CHANGES/8481.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed the incorrect rejection of ``ws://`` and ``wss://`` urls --- by :user:` AraHaan`. diff --git a/CHANGES/8481.feature.rst b/CHANGES/8481.feature.rst new file mode 120000 index 00000000000..f569cd92882 --- /dev/null +++ b/CHANGES/8481.feature.rst @@ -0,0 +1 @@ +6722.feature \ No newline at end of file diff --git a/CHANGES/8482.feature.rst b/CHANGES/8482.feature.rst new file mode 120000 index 00000000000..f569cd92882 --- /dev/null +++ b/CHANGES/8482.feature.rst @@ -0,0 +1 @@ +6722.feature \ No newline at end of file diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 52b4cb2e1c9..af602b485db 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -520,7 +520,7 @@ async def create_connection(req, traces, timeout): # if the connection wasn't already closed for c in connections: c.close() - del c + c.__del__() await session.close() From cb654a283a99344ff27ee08e51446cc4d7963eae Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 21 Jul 2024 09:47:27 -0500 Subject: [PATCH 0236/1511] [PR #8507/c9d09f11 backport][3.10] Remove blocking IO for static resources and refactor exception handling (#8521) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Steve Repsher <steverep@users.noreply.github.com> --- CHANGES/8507.bugfix.rst | 8 +++ aiohttp/web_urldispatcher.py | 86 +++++++++++++++++++-------------- tests/test_web_urldispatcher.py | 66 ++++++++++++++++++------- 3 files changed, 106 insertions(+), 54 deletions(-) create mode 100644 CHANGES/8507.bugfix.rst diff --git a/CHANGES/8507.bugfix.rst b/CHANGES/8507.bugfix.rst new file mode 100644 index 00000000000..9739536202d --- /dev/null +++ b/CHANGES/8507.bugfix.rst @@ -0,0 +1,8 @@ +Removed blocking I/O in the event loop for static resources and refactored +exception handling -- by :user:`steverep`. + +File system calls when handling requests for static routes were moved to a +separate thread to potentially improve performance. Exception handling +was tightened in order to only return 403 Forbidden or 404 Not Found responses +for expected scenarios; 500 Internal Server Error would be returned for any +unknown errors. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 7fbe70ba6a3..7eb934848cb 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -8,6 +8,7 @@ import keyword import os import re +import sys import warnings from contextlib import contextmanager from functools import wraps @@ -78,6 +79,12 @@ else: BaseDict = dict +CIRCULAR_SYMLINK_ERROR = ( + OSError + if sys.version_info < (3, 10) and sys.platform.startswith("win32") + else RuntimeError +) + YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( @@ -661,59 +668,66 @@ def __iter__(self) -> Iterator[AbstractRoute]: async def _handle(self, request: Request) -> StreamResponse: rel_url = request.match_info["filename"] + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + + unresolved_path = self._directory.joinpath(filename) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, self._resolve_path_to_response, unresolved_path + ) + + def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: + """Take the unresolved path and query the file system to form a response.""" + # Check for access outside the root directory. For follow symlinks, URI + # cannot traverse out, but symlinks can. Otherwise, no access outside + # root is permitted. try: - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden() - unresolved_path = self._directory.joinpath(filename) if self._follow_symlinks: normalized_path = Path(os.path.normpath(unresolved_path)) normalized_path.relative_to(self._directory) - filepath = normalized_path.resolve() + file_path = normalized_path.resolve() else: - filepath = unresolved_path.resolve() - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError) as error: - # relatively safe - raise HTTPNotFound() from error - except HTTPForbidden: - raise - except Exception as error: - # perm error or other kind! - request.app.logger.exception(error) + file_path = unresolved_path.resolve() + file_path.relative_to(self._directory) + except (ValueError, CIRCULAR_SYMLINK_ERROR) as error: + # ValueError for relative check; RuntimeError for circular symlink. raise HTTPNotFound() from error - # on opening a dir, load its contents if allowed - if filepath.is_dir(): - if self._show_index: - try: + # if path is a directory, return the contents if permitted. Note the + # directory check will raise if a segment is not readable. + try: + if file_path.is_dir(): + if self._show_index: return Response( - text=self._directory_as_html(filepath), content_type="text/html" + text=self._directory_as_html(file_path), + content_type="text/html", ) - except PermissionError: + else: raise HTTPForbidden() - else: - raise HTTPForbidden() - elif filepath.is_file(): - return FileResponse(filepath, chunk_size=self._chunk_size) - else: - raise HTTPNotFound + except PermissionError as error: + raise HTTPForbidden() from error + + # Not a regular file or does not exist. + if not file_path.is_file(): + raise HTTPNotFound() - def _directory_as_html(self, filepath: Path) -> str: - # returns directory's index as html + return FileResponse(file_path, chunk_size=self._chunk_size) - # sanity check - assert filepath.is_dir() + def _directory_as_html(self, dir_path: Path) -> str: + """returns directory's index as html.""" + assert dir_path.is_dir() - relative_path_to_dir = filepath.relative_to(self._directory).as_posix() + relative_path_to_dir = dir_path.relative_to(self._directory).as_posix() index_of = f"Index of /{html_escape(relative_path_to_dir)}" h1 = f"<h1>{index_of}</h1>" index_list = [] - dir_index = filepath.iterdir() + dir_index = dir_path.iterdir() for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 04f2029ebaf..26453860977 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -2,7 +2,7 @@ import functools import pathlib import sys -from typing import Optional +from typing import Generator, Optional from unittest import mock from unittest.mock import MagicMock @@ -394,31 +394,61 @@ def sync_handler(request): assert route.handler.__doc__ == "Doc" -async def test_unauthorized_folder_access( - tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +@pytest.mark.skipif( + sys.platform.startswith("win32"), reason="Cannot remove read access on Windows" +) +@pytest.mark.parametrize("file_request", ["", "my_file.txt"]) +async def test_static_directory_without_read_permission( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, file_request: str ) -> None: - # Tests the unauthorized access to a folder of static file server. - # Try to list a folder content of static file server when server does not - # have permissions to do so for the folder. + """Test static directory without read permission receives forbidden response.""" my_dir = tmp_path / "my_dir" my_dir.mkdir() + my_dir.chmod(0o000) app = web.Application() + app.router.add_static("/", str(tmp_path), show_index=True) + client = await aiohttp_client(app) - with mock.patch("pathlib.Path.__new__") as path_constructor: - path = MagicMock() - path.joinpath.return_value = path - path.resolve.return_value = path - path.iterdir.return_value.__iter__.side_effect = PermissionError() - path_constructor.return_value = path + r = await client.get(f"/{my_dir.name}/{file_request}") + assert r.status == 403 - # Register global static route: - app.router.add_static("/", str(tmp_path), show_index=True) - client = await aiohttp_client(app) - # Request the root of the static directory. - r = await client.get("/" + my_dir.name) - assert r.status == 403 +@pytest.mark.parametrize("file_request", ["", "my_file.txt"]) +async def test_static_directory_with_mock_permission_error( + monkeypatch: pytest.MonkeyPatch, + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, + file_request: str, +) -> None: + """Test static directory with mock permission errors receives forbidden response.""" + my_dir = tmp_path / "my_dir" + my_dir.mkdir() + + real_iterdir = pathlib.Path.iterdir + real_is_dir = pathlib.Path.is_dir + + def mock_iterdir(self: pathlib.Path) -> Generator[pathlib.Path, None, None]: + if my_dir.samefile(self): + raise PermissionError() + return real_iterdir(self) + + def mock_is_dir(self: pathlib.Path) -> bool: + if my_dir.samefile(self.parent): + raise PermissionError() + return real_is_dir(self) + + monkeypatch.setattr("pathlib.Path.iterdir", mock_iterdir) + monkeypatch.setattr("pathlib.Path.is_dir", mock_is_dir) + + app = web.Application() + app.router.add_static("/", str(tmp_path), show_index=True) + client = await aiohttp_client(app) + + r = await client.get("/") + assert r.status == 200 + r = await client.get(f"/{my_dir.name}/{file_request}") + assert r.status == 403 async def test_access_symlink_loop( From 925d5a2004a2dbc7e6ba2bd27c6c4906e7af3946 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 21 Jul 2024 10:32:05 -0500 Subject: [PATCH 0237/1511] [PR #8510/38fc33b1 backport][3.10] Avoid scheduling the writer if it can finish synchronously (#8520) --- CHANGES/8510.misc.rst | 1 + aiohttp/client_reqrep.py | 24 ++++++++++++++++--- tests/test_client_request.py | 45 +++++++++++++++++++++++++++++------- 3 files changed, 59 insertions(+), 11 deletions(-) create mode 100644 CHANGES/8510.misc.rst diff --git a/CHANGES/8510.misc.rst b/CHANGES/8510.misc.rst new file mode 100644 index 00000000000..d0a90c7388f --- /dev/null +++ b/CHANGES/8510.misc.rst @@ -0,0 +1 @@ +When using Python 3.12 or later, the writer is no longer scheduled on the event loop if it can finish synchronously. Avoiding event loop scheduling reduces latency and improves performance. -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a5c711609a8..37d14e107fd 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -352,7 +352,12 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - if writer is not None: + if writer is None: + return + if writer.done(): + # The writer is already done, so we can reset it immediately. + self.__reset_writer() + else: writer.add_done_callback(self.__reset_writer) def is_ssl(self) -> bool: @@ -721,9 +726,17 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.method, path, v=self.version ) await writer.write_headers(status_line, self.headers) + coro = self.write_bytes(writer, conn) - self._writer = self.loop.create_task(self.write_bytes(writer, conn)) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to write + # bytes immediately to avoid having to schedule + # the task on the event loop. + task = asyncio.Task(coro, loop=self.loop, eager_start=True) + else: + task = self.loop.create_task(coro) + self._writer = task response_class = self.response_class assert response_class is not None self.response = response_class( @@ -840,7 +853,12 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - if writer is not None: + if writer is None: + return + if writer.done(): + # The writer is already done, so we can reset it immediately. + self.__reset_writer() + else: writer.add_done_callback(self.__reset_writer) @reify diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 6084f685405..7d9f69b52f0 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -996,8 +996,15 @@ async def gen(): req = ClientRequest("POST", URL("http://python.org/"), data=gen(), loop=loop) assert req.chunked assert req.headers["TRANSFER-ENCODING"] == "chunked" + original_write_bytes = req.write_bytes - resp = await req.send(conn) + async def _mock_write_bytes(*args, **kwargs): + # Ensure the task is scheduled + await asyncio.sleep(0) + return await original_write_bytes(*args, **kwargs) + + with mock.patch.object(req, "write_bytes", _mock_write_bytes): + resp = await req.send(conn) assert asyncio.isfuture(req._writer) await resp.wait_for_close() assert req._writer is None @@ -1020,9 +1027,7 @@ async def gen(writer): assert req.headers["TRANSFER-ENCODING"] == "chunked" resp = await req.send(conn) - assert asyncio.isfuture(req._writer) await resp.wait_for_close() - assert req._writer is None assert ( buf.split(b"\r\n\r\n", 1)[1] == b"b\r\nbinary data\r\n7\r\n result\r\n0\r\n\r\n" ) @@ -1203,14 +1208,28 @@ async def test_oserror_on_write_bytes(loop, conn) -> None: async def test_terminate(loop, conn) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) - resp = await req.send(conn) + + async def _mock_write_bytes(*args, **kwargs): + # Ensure the task is scheduled + await asyncio.sleep(0) + + with mock.patch.object(req, "write_bytes", _mock_write_bytes): + resp = await req.send(conn) + assert req._writer is not None - writer = req._writer = WriterMock() + assert resp._writer is not None + await resp._writer + writer = WriterMock() + writer.done = mock.Mock(return_value=False) writer.cancel = mock.Mock() + req._writer = writer + resp._writer = writer + assert req._writer is not None + assert resp._writer is not None req.terminate() - assert req._writer is None writer.cancel.assert_called_with() + writer.done.assert_called_with() resp.close() await req.close() @@ -1222,9 +1241,19 @@ def test_terminate_with_closed_loop(loop, conn) -> None: async def go(): nonlocal req, resp, writer req = ClientRequest("get", URL("http://python.org")) - resp = await req.send(conn) + + async def _mock_write_bytes(*args, **kwargs): + # Ensure the task is scheduled + await asyncio.sleep(0) + + with mock.patch.object(req, "write_bytes", _mock_write_bytes): + resp = await req.send(conn) + assert req._writer is not None - writer = req._writer = WriterMock() + writer = WriterMock() + writer.done = mock.Mock(return_value=False) + req._writer = writer + resp._writer = writer await asyncio.sleep(0.05) From 35b5508b3d0ee2973804e717fa772d1edf8cb514 Mon Sep 17 00:00:00 2001 From: "Oleg A." <t0rr@mail.ru> Date: Sun, 21 Jul 2024 18:40:31 +0300 Subject: [PATCH 0238/1511] [PR #8338/dc457842 backport][3.10] Add some coverage (#8341) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_client_functional.py | 28 +++++++++++++++++++++++++++- tests/test_client_session.py | 21 +++++++++++++++++++++ tests/test_client_ws_functional.py | 9 +++++++++ tests/test_test_utils.py | 2 +- tests/test_web_app.py | 2 +- tests/test_web_runner.py | 28 ++++++++++++++++------------ tests/test_web_urldispatcher.py | 6 +++--- 7 files changed, 78 insertions(+), 18 deletions(-) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 63d5c67e51e..872876d4a32 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -10,7 +10,7 @@ import ssl import sys import time -from typing import Any, AsyncIterator +from typing import Any, AsyncIterator, Type from unittest import mock import pytest @@ -21,6 +21,7 @@ from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( + InvalidURL, InvalidUrlClientError, InvalidUrlRedirectClientError, NonHttpUrlClientError, @@ -3621,3 +3622,28 @@ async def not_ok_handler(request): "/ok", timeout=aiohttp.ClientTimeout(total=0.01) ) as resp_ok: assert 200 == resp_ok.status + + +@pytest.mark.parametrize( + ("value", "exc_type"), + [(42, TypeError), ("InvalidUrl", InvalidURL)], +) +async def test_request_with_wrong_proxy( + aiohttp_client: AiohttpClient, value: Any, exc_type: Type[Exception] +) -> None: + app = web.Application() + session = await aiohttp_client(app) + + with pytest.raises(exc_type): + await session.get("/", proxy=value) # type: ignore[arg-type] + + +async def test_raise_for_status_is_none(aiohttp_client: AiohttpClient) -> None: + async def handler(_: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + session = await aiohttp_client(app, raise_for_status=None) # type: ignore[arg-type] + + await session.get("/") diff --git a/tests/test_client_session.py b/tests/test_client_session.py index af602b485db..a522094a287 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -6,6 +6,7 @@ from http.cookies import SimpleCookie from typing import Any, List from unittest import mock +from uuid import uuid4 import pytest from multidict import CIMultiDict, MultiDict @@ -949,3 +950,23 @@ async def test_instantiation_with_invalid_timeout_value(loop): ClientSession(timeout=1) # should not have "Unclosed client session" warning assert not logs + + +@pytest.mark.parametrize( + ("outer_name", "inner_name"), + [ + ("skip_auto_headers", "_skip_auto_headers"), + ("auth", "_default_auth"), + ("json_serialize", "_json_serialize"), + ("connector_owner", "_connector_owner"), + ("raise_for_status", "_raise_for_status"), + ("trust_env", "_trust_env"), + ("trace_configs", "_trace_configs"), + ], +) +async def test_properties( + session: ClientSession, outer_name: str, inner_name: str +) -> None: + value = uuid4() + setattr(session, inner_name, value) + assert value == getattr(session, outer_name) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 584a8d6aa27..907a362fc7e 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -7,6 +7,7 @@ import aiohttp from aiohttp import hdrs, web from aiohttp.http import WSCloseCode +from aiohttp.pytest_plugin import AiohttpClient if sys.version_info >= (3, 11): import asyncio as async_timeout @@ -868,3 +869,11 @@ async def handler(request): assert "answer" == msg.data await resp.close() + + +async def test_ws_connect_with_wrong_ssl_type(aiohttp_client: AiohttpClient) -> None: + app = web.Application() + session = await aiohttp_client(app) + + with pytest.raises(TypeError, match="ssl should be SSLContext, .*"): + await session.ws_connect("/", ssl=42) diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 1ac742f78b1..328f83c3fd4 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -259,7 +259,7 @@ async def test_test_client_props(loop) -> None: async def test_test_client_raw_server_props(loop) -> None: async def hello(request): - return web.Response(body=_hello_world_bytes) + return web.Response() # pragma: no cover client = _TestClient(_RawTestServer(hello, host="127.0.0.1", loop=loop), loop=loop) assert client.host == "127.0.0.1" diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 3688cf2b492..3d3aa2479f6 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -331,7 +331,7 @@ def test_app_run_middlewares() -> None: @web.middleware async def middleware(request: web.Request, handler: Handler) -> web.StreamResponse: - return await handler(request) + return await handler(request) # pragma: no cover root = web.Application(middlewares=[middleware]) sub = web.Application() diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index c4843d298ab..c7c94263234 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -16,7 +16,7 @@ def app(): @pytest.fixture -def make_runner(loop, app): +def make_runner(loop: Any, app: Any): asyncio.set_event_loop(loop) runners = [] @@ -30,7 +30,7 @@ def go(**kwargs): loop.run_until_complete(runner.cleanup()) -async def test_site_for_nonfrozen_app(make_runner) -> None: +async def test_site_for_nonfrozen_app(make_runner: Any) -> None: runner = make_runner() with pytest.raises(RuntimeError): web.TCPSite(runner) @@ -40,7 +40,7 @@ async def test_site_for_nonfrozen_app(make_runner) -> None: @pytest.mark.skipif( platform.system() == "Windows", reason="the test is not valid for Windows" ) -async def test_runner_setup_handle_signals(make_runner) -> None: +async def test_runner_setup_handle_signals(make_runner: Any) -> None: runner = make_runner(handle_signals=True) await runner.setup() assert signal.getsignal(signal.SIGTERM) is not signal.SIG_DFL @@ -51,7 +51,7 @@ async def test_runner_setup_handle_signals(make_runner) -> None: @pytest.mark.skipif( platform.system() == "Windows", reason="the test is not valid for Windows" ) -async def test_runner_setup_without_signal_handling(make_runner) -> None: +async def test_runner_setup_without_signal_handling(make_runner: Any) -> None: runner = make_runner(handle_signals=False) await runner.setup() assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL @@ -59,7 +59,7 @@ async def test_runner_setup_without_signal_handling(make_runner) -> None: assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL -async def test_site_double_added(make_runner) -> None: +async def test_site_double_added(make_runner: Any) -> None: _sock = get_unused_port_socket("127.0.0.1") runner = make_runner() await runner.setup() @@ -71,7 +71,7 @@ async def test_site_double_added(make_runner) -> None: assert len(runner.sites) == 1 -async def test_site_stop_not_started(make_runner) -> None: +async def test_site_stop_not_started(make_runner: Any) -> None: runner = make_runner() await runner.setup() site = web.TCPSite(runner) @@ -81,13 +81,13 @@ async def test_site_stop_not_started(make_runner) -> None: assert len(runner.sites) == 0 -async def test_custom_log_format(make_runner) -> None: +async def test_custom_log_format(make_runner: Any) -> None: runner = make_runner(access_log_format="abc") await runner.setup() assert runner.server._kwargs["access_log_format"] == "abc" -async def test_unreg_site(make_runner) -> None: +async def test_unreg_site(make_runner: Any) -> None: runner = make_runner() await runner.setup() site = web.TCPSite(runner) @@ -95,7 +95,7 @@ async def test_unreg_site(make_runner) -> None: runner._unreg_site(site) -async def test_app_property(make_runner, app) -> None: +async def test_app_property(make_runner: Any, app: Any) -> None: runner = make_runner() assert runner.app is app @@ -121,7 +121,9 @@ async def test_addresses(make_runner, unix_sockname) -> None: @pytest.mark.skipif( platform.system() != "Windows", reason="Proactor Event loop present only in Windows" ) -async def test_named_pipe_runner_wrong_loop(app, selector_loop, pipe_name) -> None: +async def test_named_pipe_runner_wrong_loop( + app: Any, selector_loop: Any, pipe_name: Any +) -> None: runner = web.AppRunner(app) await runner.setup() with pytest.raises(RuntimeError): @@ -131,7 +133,9 @@ async def test_named_pipe_runner_wrong_loop(app, selector_loop, pipe_name) -> No @pytest.mark.skipif( platform.system() != "Windows", reason="Proactor Event loop present only in Windows" ) -async def test_named_pipe_runner_proactor_loop(proactor_loop, app, pipe_name) -> None: +async def test_named_pipe_runner_proactor_loop( + proactor_loop: Any, app: Any, pipe_name: Any +) -> None: runner = web.AppRunner(app) await runner.setup() pipe = web.NamedPipeSite(runner, pipe_name) @@ -139,7 +143,7 @@ async def test_named_pipe_runner_proactor_loop(proactor_loop, app, pipe_name) -> await runner.cleanup() -async def test_tcpsite_default_host(make_runner): +async def test_tcpsite_default_host(make_runner: Any) -> None: runner = make_runner() await runner.setup() site = web.TCPSite(runner) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 26453860977..278af07f4c8 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -379,7 +379,7 @@ async def test_handler_metadata_persistence() -> None: async def async_handler(request: web.Request) -> web.Response: """Doc""" - return web.Response() + return web.Response() # pragma: no cover def sync_handler(request): """Doc""" @@ -609,7 +609,7 @@ def test_reuse_last_added_resource(path: str) -> None: app = web.Application() async def handler(request: web.Request) -> web.Response: - return web.Response() + return web.Response() # pragma: no cover app.router.add_get(path, handler, name="a") app.router.add_post(path, handler, name="a") @@ -621,7 +621,7 @@ def test_resource_raw_match() -> None: app = web.Application() async def handler(request: web.Request) -> web.Response: - return web.Response() + return web.Response() # pragma: no cover route = app.router.add_get("/a", handler, name="a") assert route.resource is not None From 95897b1286e7f29d4311be8d0fddc81c4359b732 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 21 Jul 2024 16:03:03 +0000 Subject: [PATCH 0239/1511] [PR #8522/5a9e5006 backport][3.10] Restore AsyncResolver to be the default resolver (#8523) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8522.misc.rst | 5 +++++ aiohttp/resolver.py | 6 +++--- tests/test_resolver.py | 12 +++++++----- 3 files changed, 15 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8522.misc.rst diff --git a/CHANGES/8522.misc.rst b/CHANGES/8522.misc.rst new file mode 100644 index 00000000000..04f7edcc92d --- /dev/null +++ b/CHANGES/8522.misc.rst @@ -0,0 +1,5 @@ +Restore :py:class:`~aiohttp.resolver.AsyncResolver` to be the default resolver. -- by :user:`bdraco`. + +:py:class:`~aiohttp.resolver.AsyncResolver` was disabled by default because +of IPv6 compatibility issues. These issues have been resolved and +:py:class:`~aiohttp.resolver.AsyncResolver` is again now the default resolver. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 3d14d2fcb16..10e36266abe 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -7,16 +7,16 @@ __all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") + try: import aiodns - # aiodns_default = hasattr(aiodns.DNSResolver, 'getaddrinfo') + aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo") except ImportError: # pragma: no cover aiodns = None # type: ignore[assignment] + aiodns_default = False -aiodns_default = False - _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV _SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) diff --git a/tests/test_resolver.py b/tests/test_resolver.py index fe1902180dd..f51506a6999 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -328,9 +328,11 @@ async def test_async_resolver_aiodns_not_present(loop, monkeypatch) -> None: AsyncResolver(loop=loop) -def test_default_resolver() -> None: - # if getaddrinfo: - # assert DefaultResolver is AsyncResolver - # else: - # assert DefaultResolver is ThreadedResolver +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +def test_aio_dns_is_default() -> None: + assert DefaultResolver is AsyncResolver + + +@pytest.mark.skipif(getaddrinfo, reason="aiodns <3.2.0 required") +def test_threaded_resolver_is_default() -> None: assert DefaultResolver is ThreadedResolver From 8620b237b2ca30bf188f3d74b196b954da2ad9d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jul 2024 10:43:27 +0000 Subject: [PATCH 0240/1511] Bump python-on-whales from 0.71.0 to 0.72.0 (#8525) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.71.0 to 0.72.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.72.0</h2> <h2>What's Changed</h2> <ul> <li>Fix links in images objects docs. by <a href="https://github.com/valberg"><code>@​valberg</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/582">gabrieldemarmiesse/python-on-whales#582</a></li> <li>Change DockerException message to remove mention of "docker" by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/585">gabrieldemarmiesse/python-on-whales#585</a></li> <li>fix: wheel includes tests and docs by <a href="https://github.com/betaboon"><code>@​betaboon</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/586">gabrieldemarmiesse/python-on-whales#586</a></li> <li>Allow <code>podman exec</code> with <code>--interactive</code> and no <code>--tty</code> by <a href="https://github.com/jhc4318"><code>@​jhc4318</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/583">gabrieldemarmiesse/python-on-whales#583</a></li> <li>Fix documentation and docstring issues by <a href="https://github.com/pcharmoille"><code>@​pcharmoille</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/589">gabrieldemarmiesse/python-on-whales#589</a></li> <li>Add support for 'docker version' and 'podman version' by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/593">gabrieldemarmiesse/python-on-whales#593</a></li> <li>Set up command construction to support generic iterables and mappings by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/595">gabrieldemarmiesse/python-on-whales#595</a></li> <li>Implement support for podman pods by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/544">gabrieldemarmiesse/python-on-whales#544</a></li> <li>Make the test_cpus() testcase non-strict xfail for podman by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/597">gabrieldemarmiesse/python-on-whales#597</a></li> <li>Fix merge issue in CONTRIBUTING.md by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/604">gabrieldemarmiesse/python-on-whales#604</a></li> <li>Fix test_save_load() testcase to handle the case an image has multiple tags by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/596">gabrieldemarmiesse/python-on-whales#596</a></li> <li>Replace unrecognised unicode characters rather than raising an exception by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/607">gabrieldemarmiesse/python-on-whales#607</a></li> <li>Support generic iterables and mappings in the image component by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/598">gabrieldemarmiesse/python-on-whales#598</a></li> <li>:writing_hand: Made the arguments part prettier in docs by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/610">gabrieldemarmiesse/python-on-whales#610</a></li> <li>Add test for rendering of docs by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/611">gabrieldemarmiesse/python-on-whales#611</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/valberg"><code>@​valberg</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/582">gabrieldemarmiesse/python-on-whales#582</a></li> <li><a href="https://github.com/pcharmoille"><code>@​pcharmoille</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/589">gabrieldemarmiesse/python-on-whales#589</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.71.0...v0.72.0">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.71.0...v0.72.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/bd44c6212682cc5f904c9a4f6a759d007763c4c5"><code>bd44c62</code></a> Bump version to 0.72.0</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/db2bd7ce9c8b5ba909806f62b7e4a9fa450c2b75"><code>db2bd7c</code></a> Add test for rendering of docs (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/611">#611</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/5ad01531120378da2aad0ea967a6d53e093d7258"><code>5ad0153</code></a> :writing_hand: Made the arguments part prettier in docs (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/610">#610</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/fb58644e377879ffb03ce129dda902a749d26fec"><code>fb58644</code></a> Support generic iterables and mappings in the image component (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/598">#598</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/773725c1c3e9b4cf1f547a2e5d17a981e5818a8c"><code>773725c</code></a> Replace unrecognised unicode characters rather than raising an exception (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/607">#607</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/b8fe6abb769b7b14c30ff50663eb13e3d94c6cf2"><code>b8fe6ab</code></a> Fix test_save_load() testcase to handle the case an image has multiple tags (...</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/0bf2fb53e2a34464cc1cefd192ee40c56abf7604"><code>0bf2fb5</code></a> Fix merge issue in CONTRIBUTING.md (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/604">#604</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/5683cd8a689ef67ff18f5768788d031e1336f6fa"><code>5683cd8</code></a> Make the test_cpus() testcase non-strict xfail for podman (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/597">#597</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/d09f3245603f9ddfa42ee3e2a047be13b478d69e"><code>d09f324</code></a> Implement support for podman pods (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/544">#544</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/8b05066326ea135d9a6f10b9958677cfe63078b8"><code>8b05066</code></a> Set up command construction to support generic iterables and mappings (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/595">#595</a>)</li> <li>Additional commits viewable in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.71.0...v0.72.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.71.0&new-version=0.72.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6551c88eff6..c6ac0cafe1a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -178,7 +178,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.71.0 +python-on-whales==0.72.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d79bb1ffec6..64efc459b44 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -169,7 +169,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.71.0 +python-on-whales==0.72.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index eb14ed5eb9a..eb15ce5257d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -66,7 +66,7 @@ pygments==2.17.2 # via rich pytest==8.2.2 # via -r requirements/lint.in -python-on-whales==0.71.0 +python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 79006772a71..5279e8b8321 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.71.0 +python-on-whales==0.72.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From be2a8bf35a9ab85dbc48651081e54b01077fdc34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jul 2024 11:09:57 +0000 Subject: [PATCH 0241/1511] Bump pytest from 8.2.2 to 8.3.1 (#8527) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.2.2 to 8.3.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.3.1</h2> <h1>pytest 8.3.1 (2024-07-20)</h1> <p>The 8.3.0 release failed to include the change notes and docs for the release. This patch release remedies this. There are no other changes.</p> <h2>8.3.0</h2> <h1>pytest 8.3.0 (2024-07-20)</h1> <h2>New features</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12231">#12231</a>: Added [--xfail-tb]{.title-ref} flag, which turns on traceback output for XFAIL results.</p> <ul> <li>If the [--xfail-tb]{.title-ref} flag is not given, tracebacks for XFAIL results are NOT shown.</li> <li>The style of traceback for XFAIL is set with [--tb]{.title-ref}, and can be [auto|long|short|line|native|no]{.title-ref}.</li> <li>Note: Even if you have [--xfail-tb]{.title-ref} set, you won't see them if [--tb=no]{.title-ref}.</li> </ul> <p>Some history:</p> <p>With pytest 8.0, [-rx]{.title-ref} or [-ra]{.title-ref} would not only turn on summary reports for xfail, but also report the tracebacks for xfail results. This caused issues with some projects that utilize xfail, but don't want to see all of the xfail tracebacks.</p> <p>This change detaches xfail tracebacks from [-rx]{.title-ref}, and now we turn on xfail tracebacks with [--xfail-tb]{.title-ref}. With this, the default [-rx]{.title-ref}/ [-ra]{.title-ref} behavior is identical to pre-8.0 with respect to xfail tracebacks. While this is a behavior change, it brings default behavior back to pre-8.0.0 behavior, which ultimately was considered the better course of action.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12281">#12281</a>: Added support for keyword matching in marker expressions.</p> <p>Now tests can be selected by marker keyword arguments. Supported values are <code>int</code>{.interpreted-text role="class"}, (unescaped) <code>str</code>{.interpreted-text role="class"}, <code>bool</code>{.interpreted-text role="class"} & <code>None</code>{.interpreted-text role="data"}.</p> <p>See <code>marker examples <marker_keyword_expression_example></code>{.interpreted-text role="ref"} for more information.</p> <p>-- by <code>lovetheguitar</code>{.interpreted-text role="user"}</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12567">#12567</a>: Added <code>--no-fold-skipped</code> command line option.</p> <p>If this option is set, then skipped tests in short summary are no longer grouped by reason but all tests are printed individually with their nodeid in the same way as other statuses.</p> <p>-- by <code>pbrezina</code>{.interpreted-text role="user"}</p> </li> </ul> <h2>Improvements in existing functionality</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12469">#12469</a>: The console output now uses the "third-party plugins" terminology, replacing the previously established but confusing and outdated reference to <code>setuptools <setuptools:index></code>{.interpreted-text role="std:doc"} -- by <code>webknjaz</code>{.interpreted-text role="user"}.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12544">#12544</a>, <a href="https://redirect.github.com/pytest-dev/pytest/issues/12545">#12545</a>: Python virtual environment detection was improved by checking for a <code>pyvenv.cfg</code>{.interpreted-text role="file"} file, ensuring reliable detection on various platforms -- by <code>zachsnickers</code>{.interpreted-text role="user"}.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/de98446075cc07c768387cf64ba497dd75c205de"><code>de98446</code></a> Prepare release version 8.3.1</li> <li><a href="https://github.com/pytest-dev/pytest/commit/bd0a0424037825bc23b9bf299115e92c53a67a9c"><code>bd0a042</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12636">#12636</a> from pytest-dev/update-release-notes</li> <li><a href="https://github.com/pytest-dev/pytest/commit/664325bc9fd90217f51fe7cc0e9fff2f29a41a15"><code>664325b</code></a> doc/changelog: update 8.3.0 notes</li> <li><a href="https://github.com/pytest-dev/pytest/commit/19d225d0ab0f586a9a1fc878dff871495c12bd06"><code>19d225d</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12635">#12635</a> from pytest-dev/release-8.3.0</li> <li><a href="https://github.com/pytest-dev/pytest/commit/bc3302850c52ec945eea1b2bbde7ec3b91fc3e44"><code>bc33028</code></a> Prepare release version 8.3.0</li> <li><a href="https://github.com/pytest-dev/pytest/commit/a7d5a8eba9addd119432fa71880b51052a89812f"><code>a7d5a8e</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12557">#12557</a> from x612skm/maintainence/11771-pypy-3.9-bump</li> <li><a href="https://github.com/pytest-dev/pytest/commit/ced7072bb4f7653ad2f1d0d33639d87e7bc5f358"><code>ced7072</code></a> Add a change note for PR <a href="https://redirect.github.com/pytest-dev/pytest/issues/11771">#11771</a></li> <li><a href="https://github.com/pytest-dev/pytest/commit/d42b76daadb88d993ee74753766e22711a27395f"><code>d42b76d</code></a> Adjust test_errors_in_xfail_skip_expressions for PyPy</li> <li><a href="https://github.com/pytest-dev/pytest/commit/9eee45a7479cf5fa23b79057708a994a3b8d0eee"><code>9eee45a</code></a> Bump PyPy runtime to v3.9 @ GHA</li> <li><a href="https://github.com/pytest-dev/pytest/commit/d489247505a953885a156e61d4473497cbc167ea"><code>d489247</code></a> Fix caching of parameterized fixtures (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12600">#12600</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.2.2...8.3.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.2.2&new-version=8.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c6ac0cafe1a..0caeed0f7a0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.2.2 +pytest==8.3.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 64efc459b44..9516acb529a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -157,7 +157,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.2.2 +pytest==8.3.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index eb15ce5257d..c5f67695cec 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -64,7 +64,7 @@ pydantic-core==2.18.2 # via pydantic pygments==2.17.2 # via rich -pytest==8.2.2 +pytest==8.3.1 # via -r requirements/lint.in python-on-whales==0.72.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 5279e8b8321..2bc1c1321be 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,7 +77,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==8.2.2 +pytest==8.3.1 # via # -r requirements/test.in # pytest-cov From 6f17a67c7280fcd22f6ad7216cece941b6f50ce1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 22 Jul 2024 10:44:45 -0500 Subject: [PATCH 0242/1511] [PR #8495/549c95b9 backport][3.10] Shutdown logic: Only wait on handlers (#8530) Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Sam Bull <git@sambull.org> --- aiohttp/web.py | 26 ------------------- aiohttp/web_protocol.py | 8 ++++-- aiohttp/web_runner.py | 16 ++---------- aiohttp/web_server.py | 7 ++++- tests/test_run_app.py | 43 +++++++------------------------ tests/test_web_request_handler.py | 8 +++--- 6 files changed, 29 insertions(+), 79 deletions(-) diff --git a/aiohttp/web.py b/aiohttp/web.py index e9116507f4e..8708f1fcbec 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -6,8 +6,6 @@ import warnings from argparse import ArgumentParser from collections.abc import Iterable -from contextlib import suppress -from functools import partial from importlib import import_module from typing import ( Any, @@ -21,7 +19,6 @@ Union, cast, ) -from weakref import WeakSet from .abc import AbstractAccessLogger from .helpers import AppKey as AppKey @@ -320,23 +317,6 @@ async def _run_app( reuse_port: Optional[bool] = None, handler_cancellation: bool = False, ) -> None: - async def wait( - starting_tasks: "WeakSet[asyncio.Task[object]]", shutdown_timeout: float - ) -> None: - # Wait for pending tasks for a given time limit. - t = asyncio.current_task() - assert t is not None - starting_tasks.add(t) - with suppress(asyncio.TimeoutError): - await asyncio.wait_for(_wait(starting_tasks), timeout=shutdown_timeout) - - async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None: - t = asyncio.current_task() - assert t is not None - exclude.add(t) - while tasks := asyncio.all_tasks().difference(exclude): - await asyncio.wait(tasks) - # An internal function to actually do all dirty job for application running if asyncio.iscoroutine(app): app = await app @@ -355,12 +335,6 @@ async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None: ) await runner.setup() - # On shutdown we want to avoid waiting on tasks which run forever. - # It's very likely that all tasks which run forever will have been created by - # the time we have completed the application startup (in runner.setup()), - # so we just record all running tasks here and exclude them later. - starting_tasks: "WeakSet[asyncio.Task[object]]" = WeakSet(asyncio.all_tasks()) - runner.shutdown_callback = partial(wait, starting_tasks, shutdown_timeout) sites: List[BaseSite] = [] diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 88df4b31d24..d4ddbba55eb 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -262,7 +262,12 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._waiter: self._waiter.cancel() - # wait for handlers + # Wait for graceful disconnection + if self._current_request is not None: + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + async with ceil_timeout(timeout): + await self._current_request.wait_for_disconnection() + # Then cancel handler and wait with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): if self._current_request is not None: @@ -445,7 +450,6 @@ async def _handle_request( start_time: float, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: - assert self._request_handler is not None try: try: self._current_request = request diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 19a4441658f..2fe229c4e50 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -3,7 +3,7 @@ import socket import warnings from abc import ABC, abstractmethod -from typing import Any, Awaitable, Callable, List, Optional, Set +from typing import Any, List, Optional, Set from yarl import URL @@ -238,14 +238,7 @@ async def start(self) -> None: class BaseRunner(ABC): - __slots__ = ( - "shutdown_callback", - "_handle_signals", - "_kwargs", - "_server", - "_sites", - "_shutdown_timeout", - ) + __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout") def __init__( self, @@ -254,7 +247,6 @@ def __init__( shutdown_timeout: float = 60.0, **kwargs: Any, ) -> None: - self.shutdown_callback: Optional[Callable[[], Awaitable[None]]] = None self._handle_signals = handle_signals self._kwargs = kwargs self._server: Optional[Server] = None @@ -312,10 +304,6 @@ async def cleanup(self) -> None: await asyncio.sleep(0) self._server.pre_shutdown() await self.shutdown() - - if self.shutdown_callback: - await self.shutdown_callback() - await self._server.shutdown(self._shutdown_timeout) await self._cleanup_server() diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index f6bbdb89a77..ffc198d5780 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -43,7 +43,12 @@ def connection_lost( self, handler: RequestHandler, exc: Optional[BaseException] = None ) -> None: if handler in self._connections: - del self._connections[handler] + if handler._task_handler: + handler._task_handler.add_done_callback( + lambda f: self._connections.pop(handler, None) + ) + else: + del self._connections[handler] def _make_request( self, diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 5696928b219..eb69d620ced 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -15,7 +15,7 @@ import pytest -from aiohttp import ClientConnectorError, ClientSession, WSCloseCode, web +from aiohttp import ClientConnectorError, ClientSession, ClientTimeout, WSCloseCode, web from aiohttp.test_utils import make_mocked_coro from aiohttp.web_runner import BaseRunner @@ -920,8 +920,12 @@ async def test() -> None: async with ClientSession() as sess: for _ in range(5): # pragma: no cover try: - async with sess.get(f"http://localhost:{port}/"): - pass + with pytest.raises(asyncio.TimeoutError): + async with sess.get( + f"http://localhost:{port}/", + timeout=ClientTimeout(total=0.1), + ): + pass except ClientConnectorError: await asyncio.sleep(0.5) else: @@ -941,6 +945,7 @@ async def run_test(app: web.Application) -> None: async def handler(request: web.Request) -> web.Response: nonlocal t t = asyncio.create_task(task()) + await t return web.Response(text="FOO") t = test_task = None @@ -953,7 +958,7 @@ async def handler(request: web.Request) -> web.Response: assert test_task.exception() is None return t - def test_shutdown_wait_for_task( + def test_shutdown_wait_for_handler( self, aiohttp_unused_port: Callable[[], int] ) -> None: port = aiohttp_unused_port() @@ -970,7 +975,7 @@ async def task(): assert t.done() assert not t.cancelled() - def test_shutdown_timeout_task( + def test_shutdown_timeout_handler( self, aiohttp_unused_port: Callable[[], int] ) -> None: port = aiohttp_unused_port() @@ -987,34 +992,6 @@ async def task(): assert t.done() assert t.cancelled() - def test_shutdown_wait_for_spawned_task( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() - finished = False - finished_sub = False - sub_t = None - - async def sub_task(): - nonlocal finished_sub - await asyncio.sleep(1.5) - finished_sub = True - - async def task(): - nonlocal finished, sub_t - await asyncio.sleep(0.5) - sub_t = asyncio.create_task(sub_task()) - finished = True - - t = self.run_app(port, 3, task) - - assert finished is True - assert t.done() - assert not t.cancelled() - assert finished_sub is True - assert sub_t.done() - assert not sub_t.cancelled() - def test_shutdown_timeout_not_reached( self, aiohttp_unused_port: Callable[[], int] ) -> None: diff --git a/tests/test_web_request_handler.py b/tests/test_web_request_handler.py index 06f99be76c0..4837cab030e 100644 --- a/tests/test_web_request_handler.py +++ b/tests/test_web_request_handler.py @@ -22,19 +22,21 @@ async def test_connections() -> None: manager = web.Server(serve) assert manager.connections == [] - handler = object() + handler = mock.Mock(spec_set=web.RequestHandler) + handler._task_handler = None transport = object() manager.connection_made(handler, transport) # type: ignore[arg-type] assert manager.connections == [handler] - manager.connection_lost(handler, None) # type: ignore[arg-type] + manager.connection_lost(handler, None) assert manager.connections == [] async def test_shutdown_no_timeout() -> None: manager = web.Server(serve) - handler = mock.Mock() + handler = mock.Mock(spec_set=web.RequestHandler) + handler._task_handler = None handler.shutdown = make_mocked_coro(mock.Mock()) transport = mock.Mock() manager.connection_made(handler, transport) From 948a4c599161503193f822294e3b8d35b4ebe354 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 22 Jul 2024 11:22:26 -0500 Subject: [PATCH 0243/1511] Release 3.10.0b0 (#8531) --- CHANGES.rst | 270 +++++++++++++++++++++++++++++++++++++++ CHANGES/2492.feature | 1 - CHANGES/2507.feature.rst | 1 - CHANGES/3315.feature.rst | 1 - CHANGES/3958.doc | 1 - CHANGES/4462.bugfix.rst | 7 - CHANGES/6722.feature | 12 -- CHANGES/7297.feature | 1 - CHANGES/7583.feature | 14 -- CHANGES/7784.bugfix | 1 - CHANGES/7801.feature | 1 - CHANGES/7829.misc | 3 - CHANGES/7954.feature | 1 - CHANGES/8062.feature.rst | 1 - CHANGES/8088.contrib.rst | 1 - CHANGES/8270.bugfix.rst | 9 -- CHANGES/8346.misc.rst | 1 - CHANGES/8364.misc.rst | 1 - CHANGES/8403.doc.rst | 1 - CHANGES/8444.bugfix | 2 - CHANGES/8463.misc.rst | 1 - CHANGES/8481.feature.rst | 1 - CHANGES/8482.feature.rst | 1 - CHANGES/8491.misc.rst | 1 - CHANGES/8498.misc.rst | 1 - CHANGES/8501.misc.rst | 1 - CHANGES/8507.bugfix.rst | 8 -- CHANGES/8510.misc.rst | 1 - CHANGES/8522.misc.rst | 5 - aiohttp/__init__.py | 2 +- 30 files changed, 271 insertions(+), 81 deletions(-) delete mode 100644 CHANGES/2492.feature delete mode 120000 CHANGES/2507.feature.rst delete mode 120000 CHANGES/3315.feature.rst delete mode 100644 CHANGES/3958.doc delete mode 100644 CHANGES/4462.bugfix.rst delete mode 100644 CHANGES/6722.feature delete mode 100644 CHANGES/7297.feature delete mode 100644 CHANGES/7583.feature delete mode 100644 CHANGES/7784.bugfix delete mode 100644 CHANGES/7801.feature delete mode 100644 CHANGES/7829.misc delete mode 100644 CHANGES/7954.feature delete mode 100644 CHANGES/8062.feature.rst delete mode 100644 CHANGES/8088.contrib.rst delete mode 100644 CHANGES/8270.bugfix.rst delete mode 100644 CHANGES/8346.misc.rst delete mode 100644 CHANGES/8364.misc.rst delete mode 100644 CHANGES/8403.doc.rst delete mode 100644 CHANGES/8444.bugfix delete mode 100644 CHANGES/8463.misc.rst delete mode 120000 CHANGES/8481.feature.rst delete mode 120000 CHANGES/8482.feature.rst delete mode 100644 CHANGES/8491.misc.rst delete mode 100644 CHANGES/8498.misc.rst delete mode 100644 CHANGES/8501.misc.rst delete mode 100644 CHANGES/8507.bugfix.rst delete mode 100644 CHANGES/8510.misc.rst delete mode 100644 CHANGES/8522.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 5b02623067a..95b573b1125 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,276 @@ .. towncrier release notes start +3.10.0b0 (2024-07-22) +========================= + +Bug fixes +--------- + +- Fixed server response headers for ``Content-Type`` and ``Content-Encoding`` for + static compressed files -- by :user:`steverep`. + + Server will now respond with a ``Content-Type`` appropriate for the compressed + file (e.g. ``"application/gzip"``), and omit the ``Content-Encoding`` header. + Users should expect that most clients will no longer decompress such responses + by default. + + + *Related issues and pull requests on GitHub:* + :issue:`4462`. + + + +- Fix duplicate cookie expiration calls in the CookieJar implementation + + + *Related issues and pull requests on GitHub:* + :issue:`7784`. + + + +- Fix ``AsyncResolver`` to match ``ThreadedResolver`` behavior + -- by :user:`bdraco`. + + On system with IPv6 support, the :py:class:`~aiohttp.resolver.AsyncResolver` would not fallback + to providing A records when AAAA records were not available. + Additionally, unlike the :py:class:`~aiohttp.resolver.ThreadedResolver`, the :py:class:`~aiohttp.resolver.AsyncResolver` + did not handle link-local addresses correctly. + + This change makes the behavior consistent with the :py:class:`~aiohttp.resolver.ThreadedResolver`. + + + *Related issues and pull requests on GitHub:* + :issue:`8270`. + + + +- Fix ``ws_connect`` not respecting `receive_timeout`` on WS(S) connection. + -- by :user:`arcivanov`. + + + *Related issues and pull requests on GitHub:* + :issue:`8444`. + + + +- Removed blocking I/O in the event loop for static resources and refactored + exception handling -- by :user:`steverep`. + + File system calls when handling requests for static routes were moved to a + separate thread to potentially improve performance. Exception handling + was tightened in order to only return 403 Forbidden or 404 Not Found responses + for expected scenarios; 500 Internal Server Error would be returned for any + unknown errors. + + + *Related issues and pull requests on GitHub:* + :issue:`8507`. + + + + +Features +-------- + +- Add a Request.wait_for_disconnection() method, as means of allowing request handlers to be notified of premature client disconnections. + + + *Related issues and pull requests on GitHub:* + :issue:`2492`. + + + +- Added 5 new exceptions: :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, + :py:exc:`~aiohttp.NonHttpUrlClientError`, :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, + :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` + + :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` + are raised instead of :py:exc:`ValueError` or :py:exc:`~aiohttp.InvalidURL` when the redirect URL is invalid. Classes + :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, + :py:exc:`~aiohttp.NonHttpUrlClientError` are base for them. + + The :py:exc:`~aiohttp.InvalidURL` now exposes a ``description`` property with the text explanation of the error details. + + -- by :user:`setla`, :user:`AraHaan`, and :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`2507`, :issue:`3315`, :issue:`6722`, :issue:`8481`, :issue:`8482`. + + + +- Added a feature to retry closed connections automatically for idempotent methods. -- by :user:`Dreamsorcerer` + + + *Related issues and pull requests on GitHub:* + :issue:`7297`. + + + +- Implement filter_cookies() with domain-matching and path-matching on the keys, instead of testing every single cookie. + This may break existing cookies that have been saved with `CookieJar.save()`. Cookies can be migrated with this script:: + + import pickle + with file_path.open("rb") as f: + cookies = pickle.load(f) + + morsels = [(name, m) for c in cookies.values() for name, m in c.items()] + cookies.clear() + for name, m in morsels: + cookies[(m["domain"], m["path"].rstrip("/"))][name] = m + + with file_path.open("wb") as f: + pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) + + + *Related issues and pull requests on GitHub:* + :issue:`7583`. + + + +- Separated connection and socket timeout errors, from ServerTimeoutError. + + + *Related issues and pull requests on GitHub:* + :issue:`7801`. + + + +- Implement happy eyeballs + + + *Related issues and pull requests on GitHub:* + :issue:`7954`. + + + +- Added server capability to check for static files with Brotli compression via a ``.br`` extension -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8062`. + + + + +Improved documentation +---------------------- + +- Add documentation for ``aiohttp.web.FileResponse``. + + + *Related issues and pull requests on GitHub:* + :issue:`3958`. + + + +- Improve the docs for the `ssl` params. + + + *Related issues and pull requests on GitHub:* + :issue:`8403`. + + + + +Contributor-facing changes +-------------------------- + +- Enabled HTTP parser tests originally intended for 3.9.2 release -- by :user:`pajod`. + + + *Related issues and pull requests on GitHub:* + :issue:`8088`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved URL handler resolution time by indexing resources in the UrlDispatcher. + For applications with a large number of handlers, this should increase performance significantly. + -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`7829`. + + + +- Add `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ to the list of middlewares in the third party section of the documentation. + + + *Related issues and pull requests on GitHub:* + :issue:`8346`. + + + +- Minor improvements to static typing -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8364`. + + + +- Added a 3.11-specific overloads to ``ClientSession`` -- by :user:`max-muoto`. + + + *Related issues and pull requests on GitHub:* + :issue:`8463`. + + + +- Simplified path checks for ``UrlDispatcher.add_static()`` method -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8491`. + + + +- Avoid creating a future on every websocket receive -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8498`. + + + +- Use identity checks for all ``WSMsgType`` type compares -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8501`. + + + +- When using Python 3.12 or later, the writer is no longer scheduled on the event loop if it can finish synchronously. Avoiding event loop scheduling reduces latency and improves performance. -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8510`. + + + +- Restore :py:class:`~aiohttp.resolver.AsyncResolver` to be the default resolver. -- by :user:`bdraco`. + + :py:class:`~aiohttp.resolver.AsyncResolver` was disabled by default because + of IPv6 compatibility issues. These issues have been resolved and + :py:class:`~aiohttp.resolver.AsyncResolver` is again now the default resolver. + + + *Related issues and pull requests on GitHub:* + :issue:`8522`. + + + + +---- + + 3.9.5 (2024-04-16) ================== diff --git a/CHANGES/2492.feature b/CHANGES/2492.feature deleted file mode 100644 index 5c98dbbbcf2..00000000000 --- a/CHANGES/2492.feature +++ /dev/null @@ -1 +0,0 @@ -Add a Request.wait_for_disconnection() method, as means of allowing request handlers to be notified of premature client disconnections. diff --git a/CHANGES/2507.feature.rst b/CHANGES/2507.feature.rst deleted file mode 120000 index f569cd92882..00000000000 --- a/CHANGES/2507.feature.rst +++ /dev/null @@ -1 +0,0 @@ -6722.feature \ No newline at end of file diff --git a/CHANGES/3315.feature.rst b/CHANGES/3315.feature.rst deleted file mode 120000 index f569cd92882..00000000000 --- a/CHANGES/3315.feature.rst +++ /dev/null @@ -1 +0,0 @@ -6722.feature \ No newline at end of file diff --git a/CHANGES/3958.doc b/CHANGES/3958.doc deleted file mode 100644 index 9f3a9de1743..00000000000 --- a/CHANGES/3958.doc +++ /dev/null @@ -1 +0,0 @@ -Add documentation for ``aiohttp.web.FileResponse``. diff --git a/CHANGES/4462.bugfix.rst b/CHANGES/4462.bugfix.rst deleted file mode 100644 index fe897a08b39..00000000000 --- a/CHANGES/4462.bugfix.rst +++ /dev/null @@ -1,7 +0,0 @@ -Fixed server response headers for ``Content-Type`` and ``Content-Encoding`` for -static compressed files -- by :user:`steverep`. - -Server will now respond with a ``Content-Type`` appropriate for the compressed -file (e.g. ``"application/gzip"``), and omit the ``Content-Encoding`` header. -Users should expect that most clients will no longer decompress such responses -by default. diff --git a/CHANGES/6722.feature b/CHANGES/6722.feature deleted file mode 100644 index 580efa5c5e2..00000000000 --- a/CHANGES/6722.feature +++ /dev/null @@ -1,12 +0,0 @@ -Added 5 new exceptions: :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, -:py:exc:`~aiohttp.NonHttpUrlClientError`, :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, -:py:exc:`~aiohttp.NonHttpUrlRedirectClientError` - -:py:exc:`~aiohttp.InvalidUrlRedirectClientError`, :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` -are raised instead of :py:exc:`ValueError` or :py:exc:`~aiohttp.InvalidURL` when the redirect URL is invalid. Classes -:py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, -:py:exc:`~aiohttp.NonHttpUrlClientError` are base for them. - -The :py:exc:`~aiohttp.InvalidURL` now exposes a ``description`` property with the text explanation of the error details. - --- by :user:`setla`, :user:`AraHaan`, and :user:`bdraco` diff --git a/CHANGES/7297.feature b/CHANGES/7297.feature deleted file mode 100644 index 91d769a4b32..00000000000 --- a/CHANGES/7297.feature +++ /dev/null @@ -1 +0,0 @@ -Added a feature to retry closed connections automatically for idempotent methods. -- by :user:`Dreamsorcerer` diff --git a/CHANGES/7583.feature b/CHANGES/7583.feature deleted file mode 100644 index c05e64be780..00000000000 --- a/CHANGES/7583.feature +++ /dev/null @@ -1,14 +0,0 @@ -Implement filter_cookies() with domain-matching and path-matching on the keys, instead of testing every single cookie. -This may break existing cookies that have been saved with `CookieJar.save()`. Cookies can be migrated with this script:: - - import pickle - with file_path.open("rb") as f: - cookies = pickle.load(f) - - morsels = [(name, m) for c in cookies.values() for name, m in c.items()] - cookies.clear() - for name, m in morsels: - cookies[(m["domain"], m["path"].rstrip("/"))][name] = m - - with file_path.open("wb") as f: - pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) diff --git a/CHANGES/7784.bugfix b/CHANGES/7784.bugfix deleted file mode 100644 index 1f8ba8ddb44..00000000000 --- a/CHANGES/7784.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix duplicate cookie expiration calls in the CookieJar implementation diff --git a/CHANGES/7801.feature b/CHANGES/7801.feature deleted file mode 100644 index a6fb4d8a58e..00000000000 --- a/CHANGES/7801.feature +++ /dev/null @@ -1 +0,0 @@ -Separated connection and socket timeout errors, from ServerTimeoutError. diff --git a/CHANGES/7829.misc b/CHANGES/7829.misc deleted file mode 100644 index 9eb060f4713..00000000000 --- a/CHANGES/7829.misc +++ /dev/null @@ -1,3 +0,0 @@ -Improved URL handler resolution time by indexing resources in the UrlDispatcher. -For applications with a large number of handlers, this should increase performance significantly. --- by :user:`bdraco` diff --git a/CHANGES/7954.feature b/CHANGES/7954.feature deleted file mode 100644 index e536ee4b1c4..00000000000 --- a/CHANGES/7954.feature +++ /dev/null @@ -1 +0,0 @@ -Implement happy eyeballs diff --git a/CHANGES/8062.feature.rst b/CHANGES/8062.feature.rst deleted file mode 100644 index 6e9814f09a0..00000000000 --- a/CHANGES/8062.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added server capability to check for static files with Brotli compression via a ``.br`` extension -- by :user:`steverep`. diff --git a/CHANGES/8088.contrib.rst b/CHANGES/8088.contrib.rst deleted file mode 100644 index b3aec71bdf7..00000000000 --- a/CHANGES/8088.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Enabled HTTP parser tests originally intended for 3.9.2 release -- by :user:`pajod`. diff --git a/CHANGES/8270.bugfix.rst b/CHANGES/8270.bugfix.rst deleted file mode 100644 index bda77223959..00000000000 --- a/CHANGES/8270.bugfix.rst +++ /dev/null @@ -1,9 +0,0 @@ -Fix ``AsyncResolver`` to match ``ThreadedResolver`` behavior --- by :user:`bdraco`. - -On system with IPv6 support, the :py:class:`~aiohttp.resolver.AsyncResolver` would not fallback -to providing A records when AAAA records were not available. -Additionally, unlike the :py:class:`~aiohttp.resolver.ThreadedResolver`, the :py:class:`~aiohttp.resolver.AsyncResolver` -did not handle link-local addresses correctly. - -This change makes the behavior consistent with the :py:class:`~aiohttp.resolver.ThreadedResolver`. diff --git a/CHANGES/8346.misc.rst b/CHANGES/8346.misc.rst deleted file mode 100644 index e3e1a309be1..00000000000 --- a/CHANGES/8346.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Add `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ to the list of middlewares in the third party section of the documentation. diff --git a/CHANGES/8364.misc.rst b/CHANGES/8364.misc.rst deleted file mode 100644 index 493916f0421..00000000000 --- a/CHANGES/8364.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Minor improvements to static typing -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8403.doc.rst b/CHANGES/8403.doc.rst deleted file mode 100644 index 71618c3c99c..00000000000 --- a/CHANGES/8403.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Improve the docs for the `ssl` params. diff --git a/CHANGES/8444.bugfix b/CHANGES/8444.bugfix deleted file mode 100644 index 774e13064a7..00000000000 --- a/CHANGES/8444.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``ws_connect`` not respecting `receive_timeout`` on WS(S) connection. --- by :user:`arcivanov`. diff --git a/CHANGES/8463.misc.rst b/CHANGES/8463.misc.rst deleted file mode 100644 index 1d42136ebd7..00000000000 --- a/CHANGES/8463.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Added a 3.11-specific overloads to ``ClientSession`` -- by :user:`max-muoto`. diff --git a/CHANGES/8481.feature.rst b/CHANGES/8481.feature.rst deleted file mode 120000 index f569cd92882..00000000000 --- a/CHANGES/8481.feature.rst +++ /dev/null @@ -1 +0,0 @@ -6722.feature \ No newline at end of file diff --git a/CHANGES/8482.feature.rst b/CHANGES/8482.feature.rst deleted file mode 120000 index f569cd92882..00000000000 --- a/CHANGES/8482.feature.rst +++ /dev/null @@ -1 +0,0 @@ -6722.feature \ No newline at end of file diff --git a/CHANGES/8491.misc.rst b/CHANGES/8491.misc.rst deleted file mode 100644 index 223c549b2e2..00000000000 --- a/CHANGES/8491.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Simplified path checks for ``UrlDispatcher.add_static()`` method -- by :user:`steverep`. diff --git a/CHANGES/8498.misc.rst b/CHANGES/8498.misc.rst deleted file mode 100644 index 5fcf3efd884..00000000000 --- a/CHANGES/8498.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid creating a future on every websocket receive -- by :user:`bdraco`. diff --git a/CHANGES/8501.misc.rst b/CHANGES/8501.misc.rst deleted file mode 100644 index 183c370178e..00000000000 --- a/CHANGES/8501.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Use identity checks for all ``WSMsgType`` type compares -- by :user:`bdraco`. diff --git a/CHANGES/8507.bugfix.rst b/CHANGES/8507.bugfix.rst deleted file mode 100644 index 9739536202d..00000000000 --- a/CHANGES/8507.bugfix.rst +++ /dev/null @@ -1,8 +0,0 @@ -Removed blocking I/O in the event loop for static resources and refactored -exception handling -- by :user:`steverep`. - -File system calls when handling requests for static routes were moved to a -separate thread to potentially improve performance. Exception handling -was tightened in order to only return 403 Forbidden or 404 Not Found responses -for expected scenarios; 500 Internal Server Error would be returned for any -unknown errors. diff --git a/CHANGES/8510.misc.rst b/CHANGES/8510.misc.rst deleted file mode 100644 index d0a90c7388f..00000000000 --- a/CHANGES/8510.misc.rst +++ /dev/null @@ -1 +0,0 @@ -When using Python 3.12 or later, the writer is no longer scheduled on the event loop if it can finish synchronously. Avoiding event loop scheduling reduces latency and improves performance. -- by :user:`bdraco`. diff --git a/CHANGES/8522.misc.rst b/CHANGES/8522.misc.rst deleted file mode 100644 index 04f7edcc92d..00000000000 --- a/CHANGES/8522.misc.rst +++ /dev/null @@ -1,5 +0,0 @@ -Restore :py:class:`~aiohttp.resolver.AsyncResolver` to be the default resolver. -- by :user:`bdraco`. - -:py:class:`~aiohttp.resolver.AsyncResolver` was disabled by default because -of IPv6 compatibility issues. These issues have been resolved and -:py:class:`~aiohttp.resolver.AsyncResolver` is again now the default resolver. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 5064b043006..1e4324de694 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.0.dev0" +__version__ = "3.10.0b0" from typing import TYPE_CHECKING, Tuple From 088f73489eab7ca053df7890913a0415ae577a00 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 22 Jul 2024 12:11:59 -0500 Subject: [PATCH 0244/1511] [3.10] Downgrade upload/download artifact to v3 to fix CI (#8532) --- .github/workflows/ci-cd.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 79211d42419..f072a12aa34 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -114,7 +114,7 @@ jobs: run: | make generate-llhttp - name: Upload llhttp generated files - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: llhttp path: vendor/llhttp/build @@ -179,7 +179,7 @@ jobs: python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files if: ${{ matrix.no-extensions == '' }} - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: llhttp path: vendor/llhttp/build/ @@ -280,7 +280,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: llhttp path: vendor/llhttp/build/ @@ -291,7 +291,7 @@ jobs: run: | python -m build --sdist - name: Upload artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: dist path: dist @@ -343,7 +343,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: llhttp path: vendor/llhttp/build/ @@ -354,7 +354,7 @@ jobs: uses: pypa/cibuildwheel@v2.19.2 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v3 with: name: dist path: ./wheelhouse/*.whl @@ -381,7 +381,7 @@ jobs: run: | echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token - name: Download distributions - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: dist path: dist From 9b9cec20eda00f6a2f5bb923e0fc54fcc6f6a95f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 22 Jul 2024 12:15:47 -0500 Subject: [PATCH 0245/1511] Release 3.10.0b1 (#8533) --- CHANGES.rst | 9 +++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 95b573b1125..1ea48374333 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,15 @@ .. towncrier release notes start +3.10.0b1 (2024-07-22) +======================== + +No significant changes. + + +---- + + 3.10.0b0 (2024-07-22) ========================= diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 1e4324de694..230b7a22888 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.0b0" +__version__ = "3.10.0b1" from typing import TYPE_CHECKING, Tuple From 5621ecf654af21a145cc706a434fc0c2ff697e8f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 11:25:06 +0000 Subject: [PATCH 0246/1511] Bump pytest from 8.3.1 to 8.3.2 (#8536) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.1 to 8.3.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.3.2</h2> <h1>pytest 8.3.2 (2024-07-24)</h1> <h2>Bug fixes</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12652">#12652</a>: Resolve regression [conda]{.title-ref} environments where no longer being automatically detected.</p> <p>-- by <code>RonnyPfannschmidt</code>{.interpreted-text role="user"}</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/bbcec9c46509c417ef58e4849847b4aa43f4591e"><code>bbcec9c</code></a> Prepare release version 8.3.2</li> <li><a href="https://github.com/pytest-dev/pytest/commit/78fe8b61fa716cd9775f1e37b395bab7679734a6"><code>78fe8b6</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12657">#12657</a> from pytest-dev/patchback/backports/8.3.x/6c806b499...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/238bad2d2a1f1dbb47d740c84b5b4f5224f8a965"><code>238bad2</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12656">#12656</a> from RonnyPfannschmidt/fix-12652-detect-conda-env</li> <li><a href="https://github.com/pytest-dev/pytest/commit/ae6034a781a50b572fa700911e5d8e0eb074ca17"><code>ae6034a</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12641">#12641</a> from pytest-dev/patchback/backports/8.3.x/c03989cee...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/31337aba02a7698a87a6792eacf887fceff08af2"><code>31337ab</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12640">#12640</a> from pytest-dev/update-user</li> <li><a href="https://github.com/pytest-dev/pytest/commit/ca3070b1356e5edf43f085d8c4ec1b34627061dd"><code>ca3070b</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12637">#12637</a> from pytest-dev/release-8.3.1</li> <li>See full diff in <a href="https://github.com/pytest-dev/pytest/compare/8.3.1...8.3.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.3.1&new-version=8.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0caeed0f7a0..7420ab81b9e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.3.1 +pytest==8.3.2 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 9516acb529a..ffb3bb40ca1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -157,7 +157,7 @@ pyproject-hooks==1.0.0 # via # build # pip-tools -pytest==8.3.1 +pytest==8.3.2 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index c5f67695cec..11ace69b225 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -64,7 +64,7 @@ pydantic-core==2.18.2 # via pydantic pygments==2.17.2 # via rich -pytest==8.3.1 +pytest==8.3.2 # via -r requirements/lint.in python-on-whales==0.72.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 2bc1c1321be..7b68ac4849c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,7 +77,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pytest==8.3.1 +pytest==8.3.2 # via # -r requirements/test.in # pytest-cov From 3baa6de1ec831e1b6d97127adced3f4ff1d5cb05 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 18:43:03 +0000 Subject: [PATCH 0247/1511] [PR #8535/7108d646 backport][3.10] Small speed up to cookiejar filter_cookies (#8537) **This is a backport of PR #8535 as merged into master (7108d6469d20dd48919d312b2c654aef867ebe51).** <!-- Thank you for your contribution! --> ## What do these changes do? Small speed up to cookiejar Using `str.format` is ~16% faster than the lambda followup to https://github.com/aio-libs/aiohttp/pull/7944#discussion_r1430823536. I was hoping to use `join` there but later realized `str.format` will take `*args` ## Are there changes in behavior for the user? no ## Is it a substantial burden for the maintainers to support this? no benchmark ```python import timeit import itertools _FORMAT_PATH = "{0}/{1}".format path = "lolonglonglonglonglonglongng/path/to/a/file" print( timeit.timeit( 'itertools.accumulate(path.split("/"), _FORMAT_PATH)', globals=globals() ) ) print( timeit.timeit( 'itertools.accumulate(path.split("/"), lambda x, y: f"{x}/{y}")', globals=globals(), ) ) ``` Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8535.misc.rst | 3 +++ aiohttp/cookiejar.py | 11 +++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8535.misc.rst diff --git a/CHANGES/8535.misc.rst b/CHANGES/8535.misc.rst new file mode 100644 index 00000000000..e1acc438695 --- /dev/null +++ b/CHANGES/8535.misc.rst @@ -0,0 +1,3 @@ +Improve performance of filtering cookies -- by :user:`bdraco`. + +This change is a followup to the improvements in :issue:`7583` diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 3c7629c7f33..e9997ce2935 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -36,6 +36,10 @@ CookieItem = Union[str, "Morsel[str]"] +# We cache these string methods here as their use is in performance critical code. +_FORMAT_PATH = "{}/{}".format +_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format + class CookieJar(AbstractCookieJar): """Implements cookie storage adhering to RFC 6265.""" @@ -274,12 +278,11 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": else: # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com") domains = itertools.accumulate( - reversed(hostname.split(".")), lambda x, y: f"{y}.{x}" + reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED ) + # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar") - paths = itertools.accumulate( - request_url.path.split("/"), lambda x, y: f"{x}/{y}" - ) + paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH) # Create every combination of (domain, path) pairs. pairs = itertools.product(domains, paths) From df57b9f34ed4c696d3de3f3229dc84c2f9e6ac84 Mon Sep 17 00:00:00 2001 From: Steve Repsher <steverep@users.noreply.github.com> Date: Fri, 26 Jul 2024 16:23:12 -0400 Subject: [PATCH 0248/1511] [3.10] Handle 403 and 404 issues in FileResponse class (#8538) (#8539) (cherry picked from commit 4f834b646c23f74962e181170a40d872e76f4602) <!-- Thank you for your contribution! --> ## What do these changes do? <!-- Please give a short brief about these changes. --> ## Are there changes in behavior for the user? <!-- Outline any notable behaviour for the end users. --> ## Is it a substantial burden for the maintainers to support this? <!-- Stop right there! Pause. Just for a minute... Can you think of anything obvious that would complicate the ongoing development of this project? Try to consider if you'd be able to maintain it throughout the next 5 years. Does it seem viable? Tell us your thoughts! We'd very much love to hear what the consequences of merging this patch might be... This will help us assess if your change is something we'd want to entertain early in the review process. Thank you in advance! --> ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. --- CHANGES/8182.bugfix.rst | 10 +++ aiohttp/web_fileresponse.py | 27 ++++-- aiohttp/web_urldispatcher.py | 5 +- tests/test_web_sendfile.py | 9 ++ tests/test_web_sendfile_functional.py | 2 +- tests/test_web_urldispatcher.py | 117 +++++++++++++++++++++----- 6 files changed, 138 insertions(+), 32 deletions(-) create mode 100644 CHANGES/8182.bugfix.rst diff --git a/CHANGES/8182.bugfix.rst b/CHANGES/8182.bugfix.rst new file mode 100644 index 00000000000..c960597587c --- /dev/null +++ b/CHANGES/8182.bugfix.rst @@ -0,0 +1,10 @@ +Adjusted ``FileResponse`` to check file existence and access when preparing the response -- by :user:`steverep`. + +The :py:class:`~aiohttp.web.FileResponse` class was modified to respond with + 403 Forbidden or 404 Not Found as appropriate. Previously, it would cause a + server error if the path did not exist or could not be accessed. Checks for + existence, non-regular files, and permissions were expected to be done in the + route handler. For static routes, this now permits a compressed file to exist + without its uncompressed variant and still be served. In addition, this + changes the response status for files without read permission to 403, and for + non-regular files from 404 to 403 for consistency. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index a3521f2b263..7fc5b3d787f 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -4,6 +4,7 @@ import sys from contextlib import suppress from mimetypes import MimeTypes +from stat import S_ISREG from types import MappingProxyType from typing import ( # noqa IO, @@ -25,6 +26,8 @@ from .helpers import ETAG_ANY, ETag, must_be_empty_body from .typedefs import LooseHeaders, PathLike from .web_exceptions import ( + HTTPForbidden, + HTTPNotFound, HTTPNotModified, HTTPPartialContent, HTTPPreconditionFailed, @@ -180,13 +183,22 @@ def _get_file_path_stat_encoding( return file_path, file_path.stat(), None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - file_path, st, file_encoding = await loop.run_in_executor( - None, self._get_file_path_stat_encoding, accept_encoding - ) + try: + file_path, st, file_encoding = await loop.run_in_executor( + None, self._get_file_path_stat_encoding, accept_encoding + ) + except FileNotFoundError: + self.set_status(HTTPNotFound.status_code) + return await super().prepare(request) + + # Forbid special files like sockets, pipes, devices, etc. + if not S_ISREG(st.st_mode): + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime @@ -323,7 +335,12 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter if count == 0 or must_be_empty_body(request.method, self.status): return await super().prepare(request) - fobj = await loop.run_in_executor(None, file_path.open, "rb") + try: + fobj = await loop.run_in_executor(None, file_path.open, "rb") + except PermissionError: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + if start: # be aware that start could be None or int=0 here. offset = start else: diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 7eb934848cb..e26fd9dc7a6 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -712,10 +712,7 @@ def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: except PermissionError as error: raise HTTPForbidden() from error - # Not a regular file or does not exist. - if not file_path.is_file(): - raise HTTPNotFound() - + # Return the file response, which handles all other checks. return FileResponse(file_path, chunk_size=self._chunk_size) def _directory_as_html(self, dir_path: Path) -> str: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index ae4434e9ff6..0ba2861c391 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -1,10 +1,13 @@ from pathlib import Path +from stat import S_IFREG, S_IRUSR, S_IWUSR from unittest import mock from aiohttp import hdrs from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web_fileresponse import FileResponse +MOCK_MODE = S_IFREG | S_IRUSR | S_IWUSR + def test_using_gzip_if_header_present_and_file_available(loop) -> None: request = make_mocked_request( @@ -17,6 +20,7 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: gz_filepath = mock.create_autospec(Path, spec_set=True) gz_filepath.stat.return_value.st_size = 1024 gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.stat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -38,12 +42,14 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: gz_filepath = mock.create_autospec(Path, spec_set=True) gz_filepath.stat.return_value.st_size = 1024 gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.stat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath) file_sender._path = filepath @@ -66,6 +72,7 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath) file_sender._path = filepath @@ -90,6 +97,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath) file_sender._path = filepath @@ -108,6 +116,7 @@ def test_status_controlled_by_user(loop) -> None: filepath.name = "logo.png" filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE file_sender = FileResponse(filepath, status=203) file_sender._path = filepath diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index a3e9a1ab76f..e2cfb7a1f0e 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -39,7 +39,7 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: "br": txt.with_suffix(f"{txt.suffix}.br"), "bzip2": txt.with_suffix(f"{txt.suffix}.bz2"), } - hello[None].write_bytes(HELLO_AIOHTTP) + # Uncompressed file is not actually written to test it is not required. hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) hello["br"].write_bytes(brotli.compress(HELLO_AIOHTTP)) hello["bzip2"].write_bytes(bz2.compress(HELLO_AIOHTTP)) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 278af07f4c8..1cda0980cc0 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,10 +1,11 @@ import asyncio import functools +import os import pathlib +import socket import sys -from typing import Generator, Optional -from unittest import mock -from unittest.mock import MagicMock +from stat import S_IFIFO, S_IMODE +from typing import Any, Generator, Optional import pytest import yarl @@ -451,6 +452,56 @@ def mock_is_dir(self: pathlib.Path) -> bool: assert r.status == 403 +@pytest.mark.skipif( + sys.platform.startswith("win32"), reason="Cannot remove read access on Windows" +) +async def test_static_file_without_read_permission( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + """Test static file without read permission receives forbidden response.""" + my_file = tmp_path / "my_file.txt" + my_file.write_text("secret") + my_file.chmod(0o000) + + app = web.Application() + app.router.add_static("/", str(tmp_path)) + client = await aiohttp_client(app) + + r = await client.get(f"/{my_file.name}") + assert r.status == 403 + + +async def test_static_file_with_mock_permission_error( + monkeypatch: pytest.MonkeyPatch, + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, +) -> None: + """Test static file with mock permission errors receives forbidden response.""" + my_file = tmp_path / "my_file.txt" + my_file.write_text("secret") + my_readable = tmp_path / "my_readable.txt" + my_readable.write_text("info") + + real_open = pathlib.Path.open + + def mock_open(self: pathlib.Path, *args: Any, **kwargs: Any) -> Any: + if my_file.samefile(self): + raise PermissionError() + return real_open(self, *args, **kwargs) + + monkeypatch.setattr("pathlib.Path.open", mock_open) + + app = web.Application() + app.router.add_static("/", str(tmp_path)) + client = await aiohttp_client(app) + + # Test the mock only applies to my_file, then test the permission error. + r = await client.get(f"/{my_readable.name}") + assert r.status == 200 + r = await client.get(f"/{my_file.name}") + assert r.status == 403 + + async def test_access_symlink_loop( tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: @@ -470,32 +521,54 @@ async def test_access_symlink_loop( async def test_access_special_resource( - tmp_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path_factory: pytest.TempPathFactory, aiohttp_client: AiohttpClient ) -> None: - # Tests the access to a resource that is neither a file nor a directory. - # Checks that if a special resource is accessed (f.e. named pipe or UNIX - # domain socket) then 404 HTTP status returned. + """Test access to non-regular files is forbidden using a UNIX domain socket.""" + if not getattr(socket, "AF_UNIX", None): + pytest.skip("UNIX domain sockets not supported") + + tmp_path = tmp_path_factory.mktemp("special") + my_special = tmp_path / "sock" + my_socket = socket.socket(socket.AF_UNIX) + my_socket.bind(str(my_special)) + assert my_special.is_socket() + app = web.Application() + app.router.add_static("/", str(tmp_path)) - with mock.patch("pathlib.Path.__new__") as path_constructor: - special = MagicMock() - special.is_dir.return_value = False - special.is_file.return_value = False + client = await aiohttp_client(app) + r = await client.get(f"/{my_special.name}") + assert r.status == 403 + my_socket.close() - path = MagicMock() - path.joinpath.side_effect = lambda p: (special if p == "special" else path) - path.resolve.return_value = path - special.resolve.return_value = special - path_constructor.return_value = path +async def test_access_mock_special_resource( + monkeypatch: pytest.MonkeyPatch, + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, +) -> None: + """Test access to non-regular files is forbidden using a mock FIFO.""" + my_special = tmp_path / "my_special" + my_special.touch() + + real_result = my_special.stat() + real_stat = pathlib.Path.stat + + def mock_stat(self: pathlib.Path) -> os.stat_result: + s = real_stat(self) + if os.path.samestat(s, real_result): + mock_mode = S_IFIFO | S_IMODE(s.st_mode) + s = os.stat_result([mock_mode] + list(s)[1:]) + return s - # Register global static route: - app.router.add_static("/", str(tmp_path), show_index=True) - client = await aiohttp_client(app) + monkeypatch.setattr("pathlib.Path.stat", mock_stat) - # Request the root of the static directory. - r = await client.get("/special") - assert r.status == 403 + app = web.Application() + app.router.add_static("/", str(tmp_path)) + client = await aiohttp_client(app) + + r = await client.get(f"/{my_special.name}") + assert r.status == 403 async def test_partially_applied_handler(aiohttp_client: AiohttpClient) -> None: From ed8de3ace43be5aca78800b6652ed63fdc75766d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 13:33:36 +0100 Subject: [PATCH 0249/1511] [PR #8541/a561fa99 backport][3.10] Cleanup for #8495 (#8544) **This is a backport of PR #8541 as merged into master (a561fa990427383358b19dc1eabc968e03a95413).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8495.breaking.rst | 12 ++++++ docs/web_advanced.rst | 66 ++++++++++++++++-------------- tests/test_run_app.py | 85 ++++++++++++++++++++++++++++++++++++--- 3 files changed, 127 insertions(+), 36 deletions(-) create mode 100644 CHANGES/8495.breaking.rst diff --git a/CHANGES/8495.breaking.rst b/CHANGES/8495.breaking.rst new file mode 100644 index 00000000000..d0b1da5ca47 --- /dev/null +++ b/CHANGES/8495.breaking.rst @@ -0,0 +1,12 @@ +The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. +In 3.10 we've changed this logic to only wait on request handlers. This means that it's +important for developers to correctly handle the lifecycle of background tasks using a +library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then +it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with +:func:`aiojobs.aiohttp.shield`. + +Please read the updated documentation on these points: +https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown +https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation + +-- by :user:`Dreamsorcerer` diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index d2ba3013e30..dc94bea33bf 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -48,6 +48,8 @@ socket closing on the peer side without reading the full server response. except OSError: # disconnected +.. _web-handler-cancellation: + Web handler cancellation ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -68,38 +70,48 @@ needed to deal with them. .. warning:: - :term:`web-handler` execution could be canceled on every ``await`` - if client drops connection without reading entire response's BODY. + :term:`web-handler` execution could be canceled on every ``await`` or + ``async with`` if client drops connection without reading entire response's BODY. Sometimes it is a desirable behavior: on processing ``GET`` request the code might fetch data from a database or other web resource, the fetching is potentially slow. -Canceling this fetch is a good idea: the peer dropped connection +Canceling this fetch is a good idea: the client dropped the connection already, so there is no reason to waste time and resources (memory etc) -by getting data from a DB without any chance to send it back to peer. +by getting data from a DB without any chance to send it back to the client. -But sometimes the cancellation is bad: on ``POST`` request very often -it is needed to save data to a DB regardless of peer closing. +But sometimes the cancellation is bad: on ``POST`` requests very often +it is needed to save data to a DB regardless of connection closing. Cancellation prevention could be implemented in several ways: -* Applying :func:`asyncio.shield` to a coroutine that saves data. -* Using aiojobs_ or another third party library. +* Applying :func:`aiojobs.aiohttp.shield` to a coroutine that saves data. +* Using aiojobs_ or another third party library to run a task in the background. + +:func:`aiojobs.aiohttp.shield` can work well. The only disadvantage is you +need to split the web handler into two async functions: one for the handler +itself and another for protected code. + +.. warning:: -:func:`asyncio.shield` can work well. The only disadvantage is you -need to split web handler into exactly two async functions: one -for handler itself and other for protected code. + We don't recommend using :func:`asyncio.shield` for this because the shielded + task cannot be tracked by the application and therefore there is a risk that + the task will get cancelled during application shutdown. The function provided + by aiojobs_ operates in the same way except the inner task will be tracked + by the Scheduler and will get waited on during the cleanup phase. For example the following snippet is not safe:: + from aiojobs.aiohttp import shield + async def handler(request): - await asyncio.shield(write_to_redis(request)) - await asyncio.shield(write_to_postgres(request)) + await shield(request, write_to_redis(request)) + await shield(request, write_to_postgres(request)) return web.Response(text="OK") -Cancellation might occur while saving data in REDIS, so -``write_to_postgres`` will not be called, potentially +Cancellation might occur while saving data in REDIS, so the +``write_to_postgres`` function will not be called, potentially leaving your data in an inconsistent state. Instead, you would need to write something like:: @@ -109,7 +121,7 @@ Instead, you would need to write something like:: await write_to_postgres(request) async def handler(request): - await asyncio.shield(write_data(request)) + await shield(request, write_data(request)) return web.Response(text="OK") Alternatively, if you want to spawn a task without waiting for @@ -160,7 +172,7 @@ restoring the default disconnection behavior only for specific handlers:: app.router.add_post("/", handler) It prevents all of the ``handler`` async function from cancellation, -so ``write_to_db`` will be never interrupted. +so ``write_to_db`` will never be interrupted. .. _aiojobs: http://aiojobs.readthedocs.io/en/latest/ @@ -936,30 +948,24 @@ always satisfactory. When aiohttp is run with :func:`run_app`, it will attempt a graceful shutdown by following these steps (if using a :ref:`runner <aiohttp-web-app-runners>`, then calling :meth:`AppRunner.cleanup` will perform these steps, excluding -steps 4 and 7). +step 7). 1. Stop each site listening on sockets, so new connections will be rejected. 2. Close idle keep-alive connections (and set active ones to close upon completion). 3. Call the :attr:`Application.on_shutdown` signal. This should be used to shutdown long-lived connections, such as websockets (see below). -4. Wait a short time for running tasks to complete. This allows any pending handlers - or background tasks to complete successfully. The timeout can be adjusted with - ``shutdown_timeout`` in :func:`run_app`. +4. Wait a short time for running handlers to complete. This allows any pending handlers + to complete successfully. The timeout can be adjusted with ``shutdown_timeout`` + in :func:`run_app`. 5. Close any remaining connections and cancel their handlers. It will wait on the canceling handlers for a short time, again adjustable with ``shutdown_timeout``. 6. Call the :attr:`Application.on_cleanup` signal. This should be used to cleanup any resources (such as DB connections). This includes completing the - :ref:`cleanup contexts<aiohttp-web-cleanup-ctx>`. + :ref:`cleanup contexts<aiohttp-web-cleanup-ctx>` which may be used to ensure + background tasks are completed successfully (see + :ref:`handler cancellation<web-handler-cancellation>` or aiojobs_ for examples). 7. Cancel any remaining tasks and wait on them to complete. -.. note:: - - When creating new tasks in a handler which _should_ be cancelled on server shutdown, - then it is important to keep track of those tasks and explicitly cancel them in a - :attr:`Application.on_shutdown` callback. As we can see from the above steps, - without this the server will wait on those new tasks to complete before it continues - with server shutdown. - Websocket shutdown ^^^^^^^^^^^^^^^^^^ diff --git a/tests/test_run_app.py b/tests/test_run_app.py index eb69d620ced..c1d5f8e14f4 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -915,6 +915,23 @@ async def stop(self, request: web.Request) -> web.Response: return web.Response() def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: + num_connections = -1 + + class DictRecordClear(dict): + def clear(self): + nonlocal num_connections + # During Server.shutdown() we want to know how many connections still + # remained before it got cleared. If the handler completed successfully + # the connection should've been removed already. If not, this may + # indicate a memory leak. + num_connections = len(self) + super().clear() + + class ServerWithRecordClear(web.Server): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._connections = DictRecordClear() + async def test() -> None: await asyncio.sleep(0.5) async with ClientSession() as sess: @@ -954,9 +971,10 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/", handler) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=timeout) + with mock.patch("aiohttp.web_app.Server", ServerWithRecordClear): + web.run_app(app, port=port, shutdown_timeout=timeout) assert test_task.exception() is None - return t + return t, num_connections def test_shutdown_wait_for_handler( self, aiohttp_unused_port: Callable[[], int] @@ -969,11 +987,12 @@ async def task(): await asyncio.sleep(2) finished = True - t = self.run_app(port, 3, task) + t, connection_count = self.run_app(port, 3, task) assert finished is True assert t.done() assert not t.cancelled() + assert connection_count == 0 def test_shutdown_timeout_handler( self, aiohttp_unused_port: Callable[[], int] @@ -986,11 +1005,12 @@ async def task(): await asyncio.sleep(2) finished = True - t = self.run_app(port, 1, task) + t, connection_count = self.run_app(port, 1, task) assert finished is False assert t.done() assert t.cancelled() + assert connection_count == 1 def test_shutdown_timeout_not_reached( self, aiohttp_unused_port: Callable[[], int] @@ -1004,10 +1024,11 @@ async def task(): finished = True start_time = time.time() - t = self.run_app(port, 15, task) + t, connection_count = self.run_app(port, 15, task) assert finished is True assert t.done() + assert connection_count == 0 # Verify run_app has not waited for timeout. assert time.time() - start_time < 10 @@ -1032,10 +1053,11 @@ async def test(sess: ClientSession) -> None: pass assert finished is False - t = self.run_app(port, 10, task, test) + t, connection_count = self.run_app(port, 10, task, test) assert finished is True assert t.done() + assert connection_count == 0 def test_shutdown_pending_handler_responds( self, aiohttp_unused_port: Callable[[], int] @@ -1168,3 +1190,54 @@ async def run_test(app: web.Application) -> None: assert time.time() - start < 5 assert client_finished assert server_finished + + def test_shutdown_handler_cancellation_suppressed( + self, aiohttp_unused_port: Callable[[], int] + ) -> None: + port = aiohttp_unused_port() + actions = [] + + async def test() -> None: + async def test_resp(sess): + t = ClientTimeout(total=0.4) + with pytest.raises(asyncio.TimeoutError): + async with sess.get(f"http://localhost:{port}/", timeout=t) as resp: + assert await resp.text() == "FOO" + actions.append("CANCELLED") + + async with ClientSession() as sess: + t = asyncio.create_task(test_resp(sess)) + await asyncio.sleep(0.5) + # Handler is in-progress while we trigger server shutdown. + actions.append("PRESTOP") + async with sess.get(f"http://localhost:{port}/stop"): + pass + + actions.append("STOPPING") + # Handler should still complete and produce a response. + await t + + async def run_test(app: web.Application) -> None: + nonlocal t + t = asyncio.create_task(test()) + yield + await t + + async def handler(request: web.Request) -> web.Response: + try: + await asyncio.sleep(5) + except asyncio.CancelledError: + actions.append("SUPPRESSED") + await asyncio.sleep(2) + actions.append("DONE") + return web.Response(text="FOO") + + t = None + app = web.Application() + app.cleanup_ctx.append(run_test) + app.router.add_get("/", handler) + app.router.add_get("/stop", self.stop) + + web.run_app(app, port=port, shutdown_timeout=2, handler_cancellation=True) + assert t.exception() is None + assert actions == ["CANCELLED", "SUPPRESSED", "PRESTOP", "STOPPING", "DONE"] From 305f67a223ec69a2013ba2e887ca2d5f2c42df31 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 29 Jul 2024 08:46:43 -0500 Subject: [PATCH 0250/1511] Release 3.10.0rc0 (#8545) --- CHANGES.rst | 64 +++++++++++++++++++++++++++++++++++++++ CHANGES/8182.bugfix.rst | 10 ------ CHANGES/8495.breaking.rst | 12 -------- CHANGES/8535.misc.rst | 3 -- aiohttp/__init__.py | 2 +- 5 files changed, 65 insertions(+), 26 deletions(-) delete mode 100644 CHANGES/8182.bugfix.rst delete mode 100644 CHANGES/8495.breaking.rst delete mode 100644 CHANGES/8535.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 1ea48374333..e2617d23ee8 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,70 @@ .. towncrier release notes start +3.10.0rc0 (2024-07-29) +======================== + +Bug fixes +--------- + +- Adjusted ``FileResponse`` to check file existence and access when preparing the response -- by :user:`steverep`. + + The :py:class:`~aiohttp.web.FileResponse` class was modified to respond with + 403 Forbidden or 404 Not Found as appropriate. Previously, it would cause a + server error if the path did not exist or could not be accessed. Checks for + existence, non-regular files, and permissions were expected to be done in the + route handler. For static routes, this now permits a compressed file to exist + without its uncompressed variant and still be served. In addition, this + changes the response status for files without read permission to 403, and for + non-regular files from 404 to 403 for consistency. + + + *Related issues and pull requests on GitHub:* + :issue:`8182`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. + In 3.10 we've changed this logic to only wait on request handlers. This means that it's + important for developers to correctly handle the lifecycle of background tasks using a + library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then + it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with + :func:`aiojobs.aiohttp.shield`. + + Please read the updated documentation on these points: + https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown + https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation + + -- by :user:`Dreamsorcerer` + + + *Related issues and pull requests on GitHub:* + :issue:`8495`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improve performance of filtering cookies -- by :user:`bdraco`. + + This change is a followup to the improvements in :issue:`7583` + + + *Related issues and pull requests on GitHub:* + :issue:`8535`. + + + + +---- + + 3.10.0b1 (2024-07-22) ======================== diff --git a/CHANGES/8182.bugfix.rst b/CHANGES/8182.bugfix.rst deleted file mode 100644 index c960597587c..00000000000 --- a/CHANGES/8182.bugfix.rst +++ /dev/null @@ -1,10 +0,0 @@ -Adjusted ``FileResponse`` to check file existence and access when preparing the response -- by :user:`steverep`. - -The :py:class:`~aiohttp.web.FileResponse` class was modified to respond with - 403 Forbidden or 404 Not Found as appropriate. Previously, it would cause a - server error if the path did not exist or could not be accessed. Checks for - existence, non-regular files, and permissions were expected to be done in the - route handler. For static routes, this now permits a compressed file to exist - without its uncompressed variant and still be served. In addition, this - changes the response status for files without read permission to 403, and for - non-regular files from 404 to 403 for consistency. diff --git a/CHANGES/8495.breaking.rst b/CHANGES/8495.breaking.rst deleted file mode 100644 index d0b1da5ca47..00000000000 --- a/CHANGES/8495.breaking.rst +++ /dev/null @@ -1,12 +0,0 @@ -The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. -In 3.10 we've changed this logic to only wait on request handlers. This means that it's -important for developers to correctly handle the lifecycle of background tasks using a -library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then -it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with -:func:`aiojobs.aiohttp.shield`. - -Please read the updated documentation on these points: -https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown -https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation - --- by :user:`Dreamsorcerer` diff --git a/CHANGES/8535.misc.rst b/CHANGES/8535.misc.rst deleted file mode 100644 index e1acc438695..00000000000 --- a/CHANGES/8535.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improve performance of filtering cookies -- by :user:`bdraco`. - -This change is a followup to the improvements in :issue:`7583` diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 230b7a22888..5e6bd6ce387 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.0b1" +__version__ = "3.10.0rc0" from typing import TYPE_CHECKING, Tuple From fc201e845dd5e98c6d96bc3bd3c9ea00e8035a29 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 30 Jul 2024 08:41:24 -0500 Subject: [PATCH 0251/1511] Release 3.10.0 (#8550) --- CHANGES.rst | 91 +++++++++++++++++---------------------------- aiohttp/__init__.py | 2 +- 2 files changed, 35 insertions(+), 58 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index e2617d23ee8..bd7a45b01f5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.10.0rc0 (2024-07-29) +3.10.0 (2024-07-30) ======================== Bug fixes @@ -33,62 +33,6 @@ Bug fixes - -Removals and backward incompatible breaking changes ---------------------------------------------------- - -- The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. - In 3.10 we've changed this logic to only wait on request handlers. This means that it's - important for developers to correctly handle the lifecycle of background tasks using a - library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then - it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with - :func:`aiojobs.aiohttp.shield`. - - Please read the updated documentation on these points: - https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown - https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation - - -- by :user:`Dreamsorcerer` - - - *Related issues and pull requests on GitHub:* - :issue:`8495`. - - - - -Miscellaneous internal changes ------------------------------- - -- Improve performance of filtering cookies -- by :user:`bdraco`. - - This change is a followup to the improvements in :issue:`7583` - - - *Related issues and pull requests on GitHub:* - :issue:`8535`. - - - - ----- - - -3.10.0b1 (2024-07-22) -======================== - -No significant changes. - - ----- - - -3.10.0b0 (2024-07-22) -========================= - -Bug fixes ---------- - - Fixed server response headers for ``Content-Type`` and ``Content-Encoding`` for static compressed files -- by :user:`steverep`. @@ -268,9 +212,42 @@ Contributor-facing changes +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. + In 3.10 we've changed this logic to only wait on request handlers. This means that it's + important for developers to correctly handle the lifecycle of background tasks using a + library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then + it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with + :func:`aiojobs.aiohttp.shield`. + + Please read the updated documentation on these points: + https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown + https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation + + -- by :user:`Dreamsorcerer` + + + *Related issues and pull requests on GitHub:* + :issue:`8495`. + + + + Miscellaneous internal changes ------------------------------ +- Improve performance of filtering cookies -- by :user:`bdraco`. + + This change is a followup to the improvements in :issue:`7583` + + + *Related issues and pull requests on GitHub:* + :issue:`8535`. + + + - Improved URL handler resolution time by indexing resources in the UrlDispatcher. For applications with a large number of handlers, this should increase performance significantly. -- by :user:`bdraco` diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 5e6bd6ce387..c90ee364f5c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.0rc0" +__version__ = "3.10.0" from typing import TYPE_CHECKING, Tuple From 7f298511915205d86ac12c0e79bfeda138ffb451 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 30 Jul 2024 14:24:33 -0500 Subject: [PATCH 0252/1511] Release 3.10.0 (attempt 2) (#8552) --- CHANGES.rst | 116 ++++++++++++++++++++++++---------------------------- 1 file changed, 53 insertions(+), 63 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index bd7a45b01f5..a7d46e94bd0 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -16,23 +16,6 @@ Bug fixes --------- -- Adjusted ``FileResponse`` to check file existence and access when preparing the response -- by :user:`steverep`. - - The :py:class:`~aiohttp.web.FileResponse` class was modified to respond with - 403 Forbidden or 404 Not Found as appropriate. Previously, it would cause a - server error if the path did not exist or could not be accessed. Checks for - existence, non-regular files, and permissions were expected to be done in the - route handler. For static routes, this now permits a compressed file to exist - without its uncompressed variant and still be served. In addition, this - changes the response status for files without read permission to 403, and for - non-regular files from 404 to 403 for consistency. - - - *Related issues and pull requests on GitHub:* - :issue:`8182`. - - - - Fixed server response headers for ``Content-Type`` and ``Content-Encoding`` for static compressed files -- by :user:`steverep`. @@ -47,7 +30,7 @@ Bug fixes -- Fix duplicate cookie expiration calls in the CookieJar implementation +- Fixed duplicate cookie expiration calls in the CookieJar implementation *Related issues and pull requests on GitHub:* @@ -55,7 +38,24 @@ Bug fixes -- Fix ``AsyncResolver`` to match ``ThreadedResolver`` behavior +- Adjusted ``FileResponse`` to check file existence and access when preparing the response -- by :user:`steverep`. + + The :py:class:`~aiohttp.web.FileResponse` class was modified to respond with + 403 Forbidden or 404 Not Found as appropriate. Previously, it would cause a + server error if the path did not exist or could not be accessed. Checks for + existence, non-regular files, and permissions were expected to be done in the + route handler. For static routes, this now permits a compressed file to exist + without its uncompressed variant and still be served. In addition, this + changes the response status for files without read permission to 403, and for + non-regular files from 404 to 403 for consistency. + + + *Related issues and pull requests on GitHub:* + :issue:`8182`. + + + +- Fixed ``AsyncResolver`` to match ``ThreadedResolver`` behavior -- by :user:`bdraco`. On system with IPv6 support, the :py:class:`~aiohttp.resolver.AsyncResolver` would not fallback @@ -71,7 +71,7 @@ Bug fixes -- Fix ``ws_connect`` not respecting `receive_timeout`` on WS(S) connection. +- Fixed ``ws_connect`` not respecting `receive_timeout`` on WS(S) connection. -- by :user:`arcivanov`. @@ -99,7 +99,7 @@ Bug fixes Features -------- -- Add a Request.wait_for_disconnection() method, as means of allowing request handlers to be notified of premature client disconnections. +- Added a Request.wait_for_disconnection() method, as means of allowing request handlers to be notified of premature client disconnections. *Related issues and pull requests on GitHub:* @@ -134,7 +134,7 @@ Features -- Implement filter_cookies() with domain-matching and path-matching on the keys, instead of testing every single cookie. +- Implemented filter_cookies() with domain-matching and path-matching on the keys, instead of testing every single cookie. This may break existing cookies that have been saved with `CookieJar.save()`. Cookies can be migrated with this script:: import pickle @@ -151,7 +151,7 @@ Features *Related issues and pull requests on GitHub:* - :issue:`7583`. + :issue:`7583`, :issue:`8535`. @@ -163,7 +163,7 @@ Features -- Implement happy eyeballs +- Implemented happy eyeballs *Related issues and pull requests on GitHub:* @@ -180,10 +180,33 @@ Features +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. + In 3.10 we've changed this logic to only wait on request handlers. This means that it's + important for developers to correctly handle the lifecycle of background tasks using a + library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then + it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with + :func:`aiojobs.aiohttp.shield`. + + Please read the updated documentation on these points: \ + https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown \ + https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation + + -- by :user:`Dreamsorcerer` + + + *Related issues and pull requests on GitHub:* + :issue:`8495`. + + + + Improved documentation ---------------------- -- Add documentation for ``aiohttp.web.FileResponse``. +- Added documentation for ``aiohttp.web.FileResponse``. *Related issues and pull requests on GitHub:* @@ -191,7 +214,7 @@ Improved documentation -- Improve the docs for the `ssl` params. +- Improved the docs for the `ssl` params. *Related issues and pull requests on GitHub:* @@ -212,42 +235,9 @@ Contributor-facing changes -Removals and backward incompatible breaking changes ---------------------------------------------------- - -- The shutdown logic in 3.9 waited on all tasks, which caused issues with some libraries. - In 3.10 we've changed this logic to only wait on request handlers. This means that it's - important for developers to correctly handle the lifecycle of background tasks using a - library such as ``aiojobs``. If an application is using ``handler_cancellation=True`` then - it is also a good idea to ensure that any :func:`asyncio.shield` calls are replaced with - :func:`aiojobs.aiohttp.shield`. - - Please read the updated documentation on these points: - https://docs.aiohttp.org/en/stable/web_advanced.html#graceful-shutdown - https://docs.aiohttp.org/en/stable/web_advanced.html#web-handler-cancellation - - -- by :user:`Dreamsorcerer` - - - *Related issues and pull requests on GitHub:* - :issue:`8495`. - - - - Miscellaneous internal changes ------------------------------ -- Improve performance of filtering cookies -- by :user:`bdraco`. - - This change is a followup to the improvements in :issue:`7583` - - - *Related issues and pull requests on GitHub:* - :issue:`8535`. - - - - Improved URL handler resolution time by indexing resources in the UrlDispatcher. For applications with a large number of handlers, this should increase performance significantly. -- by :user:`bdraco` @@ -258,7 +248,7 @@ Miscellaneous internal changes -- Add `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ to the list of middlewares in the third party section of the documentation. +- Added `nacl_middleware <https://github.com/CosmicDNA/nacl_middleware>`_ to the list of middlewares in the third party section of the documentation. *Related issues and pull requests on GitHub:* @@ -290,7 +280,7 @@ Miscellaneous internal changes -- Avoid creating a future on every websocket receive -- by :user:`bdraco`. +- Avoided creating a future on every websocket receive -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* @@ -298,7 +288,7 @@ Miscellaneous internal changes -- Use identity checks for all ``WSMsgType`` type compares -- by :user:`bdraco`. +- Updated identity checks for all ``WSMsgType`` type compares -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* @@ -314,7 +304,7 @@ Miscellaneous internal changes -- Restore :py:class:`~aiohttp.resolver.AsyncResolver` to be the default resolver. -- by :user:`bdraco`. +- Restored :py:class:`~aiohttp.resolver.AsyncResolver` to be the default resolver. -- by :user:`bdraco`. :py:class:`~aiohttp.resolver.AsyncResolver` was disabled by default because of IPv6 compatibility issues. These issues have been resolved and From bf5a66f171e34729ed4bc75470527808d3a2c7d8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 30 Jul 2024 18:14:24 -0500 Subject: [PATCH 0253/1511] Bump version number to 3.10.1.dev0 (#8554) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index c90ee364f5c..61d1c77598c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.0" +__version__ = "3.10.1.dev0" from typing import TYPE_CHECKING, Tuple From 482cca0a815af843e38b4c0b16448e8254b21672 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 31 Jul 2024 07:20:58 -0500 Subject: [PATCH 0254/1511] Bump version to 3.11.0.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 61d1c77598c..79f40d6f8f3 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.1.dev0" +__version__ = "3.11.0.dev0" from typing import TYPE_CHECKING, Tuple From 4c81115c0318aa2c7975cb92eeb8fcde9fc84f8f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 31 Jul 2024 07:22:01 -0500 Subject: [PATCH 0255/1511] Open 3.11 for business --- .github/dependabot.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d1898c69e6e..deb81163faf 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -23,7 +23,7 @@ updates: directory: "/" labels: - dependencies - target-branch: "3.10" + target-branch: "3.11" schedule: interval: "daily" open-pull-requests-limit: 10 @@ -33,7 +33,7 @@ updates: directory: "/" labels: - dependencies - target-branch: "3.10" + target-branch: "3.11" schedule: interval: "daily" open-pull-requests-limit: 10 From 0e6bbbce2d19ba2b155b4c801e95365902cae6ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 1 Aug 2024 11:55:38 -0500 Subject: [PATCH 0256/1511] [PR #8546/a561fa99 backport][3.10] Fix WebSocket server heartbeat timeout logic (#8573) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Arcadiy Ivanov <arcadiy@ivanov.biz> --- CHANGES/8540.bugfix.rst | 7 +++++++ aiohttp/client_ws.py | 8 ++++++-- tests/test_client_ws_functional.py | 32 ++++++++++++++++++++++++++++-- 3 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8540.bugfix.rst diff --git a/CHANGES/8540.bugfix.rst b/CHANGES/8540.bugfix.rst new file mode 100644 index 00000000000..ab7c4767635 --- /dev/null +++ b/CHANGES/8540.bugfix.rst @@ -0,0 +1,7 @@ +Fixed WebSocket server heartbeat timeout logic to terminate `receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. + +When a WebSocket pong message was not received, the + :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. + This change causes `_pong_not_received` to feed the `reader` an error message, causing + pending `receive` to terminate and return the error message. The error message contains + the exception :py:class:`~aiohttp.ServerTimeoutError`. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 608c659e543..c1a2c4641ba 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -4,7 +4,7 @@ import sys from typing import Any, Optional, cast -from .client_exceptions import ClientError +from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse from .helpers import call_later, set_result from .http import ( @@ -122,8 +122,12 @@ def _pong_not_received(self) -> None: if not self._closed: self._closed = True self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = asyncio.TimeoutError() + self._exception = ServerTimeoutError() self._response.close() + if self._waiting and not self._closing: + self._reader.feed_data( + WSMessage(WSMsgType.ERROR, self._exception, None) + ) @property def closed(self) -> bool: diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 907a362fc7e..dc474f96c39 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -5,7 +5,7 @@ import pytest import aiohttp -from aiohttp import hdrs, web +from aiohttp import ServerTimeoutError, WSMsgType, hdrs, web from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -624,7 +624,35 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE -async def test_send_recv_compress(aiohttp_client) -> None: +async def test_heartbeat_no_pong_concurrent_receive(aiohttp_client: Any) -> None: + ping_received = False + + async def handler(request): + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + ws._reader.feed_eof = lambda: None + await asyncio.sleep(10.0) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + resp._reader.feed_eof = lambda: None + + # Connection should be closed roughly after 1.5x heartbeat. + msg = await resp.receive(5.0) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + assert msg + assert msg.type is WSMsgType.ERROR + assert isinstance(msg.data, ServerTimeoutError) + + +async def test_send_recv_compress(aiohttp_client: Any) -> None: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) From d15dbb3be3172a4fad1fd9367baf95059100f81b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 1 Aug 2024 11:55:45 -0500 Subject: [PATCH 0257/1511] [PR #8546/a561fa99 backport][3.11] Fix WebSocket server heartbeat timeout logic (#8546) (#8574) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Arcadiy Ivanov <arcadiy@ivanov.biz> --- CHANGES/8540.bugfix.rst | 7 +++++++ aiohttp/client_ws.py | 8 ++++++-- tests/test_client_ws_functional.py | 32 ++++++++++++++++++++++++++++-- 3 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8540.bugfix.rst diff --git a/CHANGES/8540.bugfix.rst b/CHANGES/8540.bugfix.rst new file mode 100644 index 00000000000..ab7c4767635 --- /dev/null +++ b/CHANGES/8540.bugfix.rst @@ -0,0 +1,7 @@ +Fixed WebSocket server heartbeat timeout logic to terminate `receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. + +When a WebSocket pong message was not received, the + :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. + This change causes `_pong_not_received` to feed the `reader` an error message, causing + pending `receive` to terminate and return the error message. The error message contains + the exception :py:class:`~aiohttp.ServerTimeoutError`. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 608c659e543..c1a2c4641ba 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -4,7 +4,7 @@ import sys from typing import Any, Optional, cast -from .client_exceptions import ClientError +from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse from .helpers import call_later, set_result from .http import ( @@ -122,8 +122,12 @@ def _pong_not_received(self) -> None: if not self._closed: self._closed = True self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = asyncio.TimeoutError() + self._exception = ServerTimeoutError() self._response.close() + if self._waiting and not self._closing: + self._reader.feed_data( + WSMessage(WSMsgType.ERROR, self._exception, None) + ) @property def closed(self) -> bool: diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 907a362fc7e..dc474f96c39 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -5,7 +5,7 @@ import pytest import aiohttp -from aiohttp import hdrs, web +from aiohttp import ServerTimeoutError, WSMsgType, hdrs, web from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -624,7 +624,35 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE -async def test_send_recv_compress(aiohttp_client) -> None: +async def test_heartbeat_no_pong_concurrent_receive(aiohttp_client: Any) -> None: + ping_received = False + + async def handler(request): + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + ws._reader.feed_eof = lambda: None + await asyncio.sleep(10.0) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + resp._reader.feed_eof = lambda: None + + # Connection should be closed roughly after 1.5x heartbeat. + msg = await resp.receive(5.0) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + assert msg + assert msg.type is WSMsgType.ERROR + assert isinstance(msg.data, ServerTimeoutError) + + +async def test_send_recv_compress(aiohttp_client: Any) -> None: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) From 6dff116d93081673cbbf35a90ac41dddc222d991 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 18:11:58 +0000 Subject: [PATCH 0258/1511] [PR #8558/e48acaf7 backport][3.10] Add followup changelog message for #5278 (#8576) Co-authored-by: J. Nick Koston <nick@koston.org> closes #8555 --- CHANGES/8555.breaking.rst | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 CHANGES/8555.breaking.rst diff --git a/CHANGES/8555.breaking.rst b/CHANGES/8555.breaking.rst new file mode 100644 index 00000000000..cf451fcc27a --- /dev/null +++ b/CHANGES/8555.breaking.rst @@ -0,0 +1,8 @@ +Creating :class:`aiohttp.TCPConnector`, :class:`aiohttp.ClientSession`, or + :class:`aiohttp.CookieJar` instances without a running event loop now + raises a :exc:`RuntimeError`. + + -- by :user:`asvetlov` + +Creating these objects without a running event loop was deprecated +in :issue:`3372` which was released in version 3.5.0. From 13d36efe2b3eab20f4546e633cc6fac42452c9fb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 18:12:27 +0000 Subject: [PATCH 0259/1511] [PR #8558/e48acaf7 backport][3.11] Add followup changelog message for #5278 (#8577) Co-authored-by: J. Nick Koston <nick@koston.org> closes #8555 --- CHANGES/8555.breaking.rst | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 CHANGES/8555.breaking.rst diff --git a/CHANGES/8555.breaking.rst b/CHANGES/8555.breaking.rst new file mode 100644 index 00000000000..cf451fcc27a --- /dev/null +++ b/CHANGES/8555.breaking.rst @@ -0,0 +1,8 @@ +Creating :class:`aiohttp.TCPConnector`, :class:`aiohttp.ClientSession`, or + :class:`aiohttp.CookieJar` instances without a running event loop now + raises a :exc:`RuntimeError`. + + -- by :user:`asvetlov` + +Creating these objects without a running event loop was deprecated +in :issue:`3372` which was released in version 3.5.0. From f19d920658b6d6800d28c2dc0b99423da3c3750a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 18:12:35 +0000 Subject: [PATCH 0260/1511] [PR #8566/f3a1afc5 backport][3.10] Fix url dispatcher index when variable is preceded by a fixed string after a slash (#8578) Co-authored-by: J. Nick Koston <nick@koston.org> fix for a regression in 3.10.x. Regressed in #7829 fixes #8567 --- CHANGES/8566.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 10 ++++++++-- tests/test_web_urldispatcher.py | 24 ++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8566.bugfix.rst diff --git a/CHANGES/8566.bugfix.rst b/CHANGES/8566.bugfix.rst new file mode 100644 index 00000000000..61365c0bb61 --- /dev/null +++ b/CHANGES/8566.bugfix.rst @@ -0,0 +1 @@ +Fixed url dispatcher index not matching when a variable is preceded by a fixed string after a slash -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index e26fd9dc7a6..a490e100ab3 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1131,8 +1131,14 @@ def register_resource(self, resource: AbstractResource) -> None: def _get_resource_index_key(self, resource: AbstractResource) -> str: """Return a key to index the resource in the resource index.""" - # strip at the first { to allow for variables - return resource.canonical.partition("{")[0].rstrip("/") or "/" + if "{" in (index_key := resource.canonical): + # strip at the first { to allow for variables, and than + # rpartition at / to allow for variable parts in the path + # For example if the canonical path is `/core/locations{tail:.*}` + # the index key will be `/core` since index is based on the + # url parts split by `/` + index_key = index_key.partition("{")[0].rpartition("/")[0] + return index_key.rstrip("/") or "/" def index_resource(self, resource: AbstractResource) -> None: """Add a resource to the resource index.""" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 1cda0980cc0..a799f4ba146 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -937,3 +937,27 @@ async def get(self) -> web.Response: r = await client.get("///a") assert r.status == 200 await r.release() + + +async def test_route_with_regex(aiohttp_client: AiohttpClient) -> None: + """Test a route with a regex preceded by a fixed string.""" + app = web.Application() + + async def handler(request: web.Request) -> web.Response: + assert isinstance(request.match_info._route.resource, Resource) + return web.Response(text=request.match_info._route.resource.canonical) + + app.router.add_get("/core/locations{tail:.*}", handler) + client = await aiohttp_client(app) + + r = await client.get("/core/locations/tail/here") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" + + r = await client.get("/core/locations_tail_here") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" + + r = await client.get("/core/locations_tail;id=abcdef") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" From 01ed18945cf331cff6c2221116d7678855a31167 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 18:12:55 +0000 Subject: [PATCH 0261/1511] [PR #8566/f3a1afc5 backport][3.11] Fix url dispatcher index when variable is preceded by a fixed string after a slash (#8579) Co-authored-by: J. Nick Koston <nick@koston.org> fix for a regression in 3.10.x. Regressed in #7829 fixes #8567 --- CHANGES/8566.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 10 ++++++++-- tests/test_web_urldispatcher.py | 24 ++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8566.bugfix.rst diff --git a/CHANGES/8566.bugfix.rst b/CHANGES/8566.bugfix.rst new file mode 100644 index 00000000000..61365c0bb61 --- /dev/null +++ b/CHANGES/8566.bugfix.rst @@ -0,0 +1 @@ +Fixed url dispatcher index not matching when a variable is preceded by a fixed string after a slash -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index e26fd9dc7a6..a490e100ab3 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1131,8 +1131,14 @@ def register_resource(self, resource: AbstractResource) -> None: def _get_resource_index_key(self, resource: AbstractResource) -> str: """Return a key to index the resource in the resource index.""" - # strip at the first { to allow for variables - return resource.canonical.partition("{")[0].rstrip("/") or "/" + if "{" in (index_key := resource.canonical): + # strip at the first { to allow for variables, and than + # rpartition at / to allow for variable parts in the path + # For example if the canonical path is `/core/locations{tail:.*}` + # the index key will be `/core` since index is based on the + # url parts split by `/` + index_key = index_key.partition("{")[0].rpartition("/")[0] + return index_key.rstrip("/") or "/" def index_resource(self, resource: AbstractResource) -> None: """Add a resource to the resource index.""" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 1cda0980cc0..a799f4ba146 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -937,3 +937,27 @@ async def get(self) -> web.Response: r = await client.get("///a") assert r.status == 200 await r.release() + + +async def test_route_with_regex(aiohttp_client: AiohttpClient) -> None: + """Test a route with a regex preceded by a fixed string.""" + app = web.Application() + + async def handler(request: web.Request) -> web.Response: + assert isinstance(request.match_info._route.resource, Resource) + return web.Response(text=request.match_info._route.resource.canonical) + + app.router.add_get("/core/locations{tail:.*}", handler) + client = await aiohttp_client(app) + + r = await client.get("/core/locations/tail/here") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" + + r = await client.get("/core/locations_tail_here") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" + + r = await client.get("/core/locations_tail;id=abcdef") + assert r.status == 200 + assert await r.text() == "/core/locations{tail}" From 9ed3841f5d920360da1937eb440ff9cb60e7c2c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Aug 2024 19:13:31 +0000 Subject: [PATCH 0262/1511] Bump aiohappyeyeballs from 2.3.2 to 2.3.4 (#8569) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/base.txt | 4 +--- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 2fc77c5455f..888f9a77899 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in @@ -36,8 +36,6 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.11.0 - # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.9.4 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7420ab81b9e..c254226e3c6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index ffb3bb40ca1..d54fcd4f50b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index d1efee2aecf..5f98dceaf9c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 7b68ac4849c..88b64bc868d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From f287cccdeae380eb24fea64ff4916bcb755798a6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 07:46:57 -0500 Subject: [PATCH 0263/1511] [PR #8583/f185dd19 backport][3.10] Add additional clarification for creating objects that require a running event loop (#8585) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8555.breaking.rst | 15 ++++++++++----- CHANGES/8583.breaking.rst | 1 + 2 files changed, 11 insertions(+), 5 deletions(-) create mode 120000 CHANGES/8583.breaking.rst diff --git a/CHANGES/8555.breaking.rst b/CHANGES/8555.breaking.rst index cf451fcc27a..54850482c70 100644 --- a/CHANGES/8555.breaking.rst +++ b/CHANGES/8555.breaking.rst @@ -1,8 +1,13 @@ -Creating :class:`aiohttp.TCPConnector`, :class:`aiohttp.ClientSession`, or - :class:`aiohttp.CookieJar` instances without a running event loop now - raises a :exc:`RuntimeError`. - - -- by :user:`asvetlov` +Creating :py:class:`aiohttp.TCPConnector`, + :py:class:`aiohttp.ClientSession`, + :py:class:`~aiohttp.resolver.ThreadedResolver` + :py:class:`aiohttp.web.Server`, + or :py:class:`aiohttp.CookieJar` + instances without a running event loop now + raises a :exc:`RuntimeError` + -- by :user:`asvetlov`. Creating these objects without a running event loop was deprecated in :issue:`3372` which was released in version 3.5.0. + +This change first appeared in version 3.10.0 as :issue:`6378`. diff --git a/CHANGES/8583.breaking.rst b/CHANGES/8583.breaking.rst new file mode 120000 index 00000000000..f743fcc1eb0 --- /dev/null +++ b/CHANGES/8583.breaking.rst @@ -0,0 +1 @@ +8555.breaking.rst \ No newline at end of file From e190c30f3a97642a3a0bfca86374c924badae1df Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 07:47:08 -0500 Subject: [PATCH 0264/1511] [PR #8583/f185dd19 backport][3.11] Add additional clarification for creating objects that require a running event loop (#8586) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8555.breaking.rst | 15 ++++++++++----- CHANGES/8583.breaking.rst | 1 + 2 files changed, 11 insertions(+), 5 deletions(-) create mode 120000 CHANGES/8583.breaking.rst diff --git a/CHANGES/8555.breaking.rst b/CHANGES/8555.breaking.rst index cf451fcc27a..54850482c70 100644 --- a/CHANGES/8555.breaking.rst +++ b/CHANGES/8555.breaking.rst @@ -1,8 +1,13 @@ -Creating :class:`aiohttp.TCPConnector`, :class:`aiohttp.ClientSession`, or - :class:`aiohttp.CookieJar` instances without a running event loop now - raises a :exc:`RuntimeError`. - - -- by :user:`asvetlov` +Creating :py:class:`aiohttp.TCPConnector`, + :py:class:`aiohttp.ClientSession`, + :py:class:`~aiohttp.resolver.ThreadedResolver` + :py:class:`aiohttp.web.Server`, + or :py:class:`aiohttp.CookieJar` + instances without a running event loop now + raises a :exc:`RuntimeError` + -- by :user:`asvetlov`. Creating these objects without a running event loop was deprecated in :issue:`3372` which was released in version 3.5.0. + +This change first appeared in version 3.10.0 as :issue:`6378`. diff --git a/CHANGES/8583.breaking.rst b/CHANGES/8583.breaking.rst new file mode 120000 index 00000000000..f743fcc1eb0 --- /dev/null +++ b/CHANGES/8583.breaking.rst @@ -0,0 +1 @@ +8555.breaking.rst \ No newline at end of file From 081021994ea06ec5bc01ecec08ac1a58f5bcbba0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 2 Aug 2024 08:31:03 -0500 Subject: [PATCH 0265/1511] Release 3.10.1 (#8587) --- CHANGES.rst | 56 +++++++++++++++++++++++++++++++++++++++ CHANGES/8540.bugfix.rst | 7 ----- CHANGES/8555.breaking.rst | 13 --------- CHANGES/8566.bugfix.rst | 1 - CHANGES/8583.breaking.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 57 insertions(+), 23 deletions(-) delete mode 100644 CHANGES/8540.bugfix.rst delete mode 100644 CHANGES/8555.breaking.rst delete mode 100644 CHANGES/8566.bugfix.rst delete mode 120000 CHANGES/8583.breaking.rst diff --git a/CHANGES.rst b/CHANGES.rst index a7d46e94bd0..8da40ff025c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,62 @@ .. towncrier release notes start +3.10.1 (2024-08-02) +======================== + +Bug fixes +--------- + +- Fixed WebSocket server heartbeat timeout logic to terminate `receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. + + When a WebSocket pong message was not received, the + :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. + This change causes `_pong_not_received` to feed the `reader` an error message, causing + pending `receive` to terminate and return the error message. The error message contains + the exception :py:class:`~aiohttp.ServerTimeoutError`. + + + *Related issues and pull requests on GitHub:* + :issue:`8540`. + + + +- Fixed url dispatcher index not matching when a variable is preceded by a fixed string after a slash -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8566`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Creating :py:class:`aiohttp.TCPConnector`, + :py:class:`aiohttp.ClientSession`, + :py:class:`~aiohttp.resolver.ThreadedResolver` + :py:class:`aiohttp.web.Server`, + or :py:class:`aiohttp.CookieJar` + instances without a running event loop now + raises a :exc:`RuntimeError` + -- by :user:`asvetlov`. + + Creating these objects without a running event loop was deprecated + in :issue:`3372` which was released in version 3.5.0. + + This change first appeared in version 3.10.0 as :issue:`6378`. + + + *Related issues and pull requests on GitHub:* + :issue:`8555`, :issue:`8583`. + + + + +---- + + 3.10.0 (2024-07-30) ======================== diff --git a/CHANGES/8540.bugfix.rst b/CHANGES/8540.bugfix.rst deleted file mode 100644 index ab7c4767635..00000000000 --- a/CHANGES/8540.bugfix.rst +++ /dev/null @@ -1,7 +0,0 @@ -Fixed WebSocket server heartbeat timeout logic to terminate `receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. - -When a WebSocket pong message was not received, the - :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. - This change causes `_pong_not_received` to feed the `reader` an error message, causing - pending `receive` to terminate and return the error message. The error message contains - the exception :py:class:`~aiohttp.ServerTimeoutError`. diff --git a/CHANGES/8555.breaking.rst b/CHANGES/8555.breaking.rst deleted file mode 100644 index 54850482c70..00000000000 --- a/CHANGES/8555.breaking.rst +++ /dev/null @@ -1,13 +0,0 @@ -Creating :py:class:`aiohttp.TCPConnector`, - :py:class:`aiohttp.ClientSession`, - :py:class:`~aiohttp.resolver.ThreadedResolver` - :py:class:`aiohttp.web.Server`, - or :py:class:`aiohttp.CookieJar` - instances without a running event loop now - raises a :exc:`RuntimeError` - -- by :user:`asvetlov`. - -Creating these objects without a running event loop was deprecated -in :issue:`3372` which was released in version 3.5.0. - -This change first appeared in version 3.10.0 as :issue:`6378`. diff --git a/CHANGES/8566.bugfix.rst b/CHANGES/8566.bugfix.rst deleted file mode 100644 index 61365c0bb61..00000000000 --- a/CHANGES/8566.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed url dispatcher index not matching when a variable is preceded by a fixed string after a slash -- by :user:`bdraco`. diff --git a/CHANGES/8583.breaking.rst b/CHANGES/8583.breaking.rst deleted file mode 120000 index f743fcc1eb0..00000000000 --- a/CHANGES/8583.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -8555.breaking.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 61d1c77598c..9ec07dbe449 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.1.dev0" +__version__ = "3.10.1" from typing import TYPE_CHECKING, Tuple From e12da225c19808213fe53e0640e6205e0a0158b0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:59:59 +0000 Subject: [PATCH 0266/1511] Bump aiohappyeyeballs from 2.3.2 to 2.3.4 (#8590) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.3.2 to 2.3.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.3.4 (2024-07-31)</h1> <h2>Fix</h2> <ul> <li>fix: add missing asyncio to fix truncated package description (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/67">#67</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2644df179e21e4513da857f2aea2aa64a3fb6316"><code>2644df1</code></a>)</li> </ul> <h1>v2.3.3 (2024-07-31)</h1> <h2>Fix</h2> <ul> <li> <p>fix: add missing python version classifiers (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/65">#65</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/489016feb53d4fd5f9880f27dc40a5198d5b0be2"><code>489016f</code></a>)</p> </li> <li> <p>fix: update classifiers to include license (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/60">#60</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/a746c296b324407efef272f422a990587b9d6057"><code>a746c29</code></a>)</p> </li> <li> <p>fix: workaround broken <code>asyncio.staggered</code> on python < 3.8.2 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/61">#61</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b16f107d9493817247c27ab83522901f086a13b5"><code>b16f107</code></a>)</p> </li> <li> <p>fix: include tests in the source distribution package (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/62">#62</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/53053b6a38ef868e0170940ced5e0611ebd1be4c"><code>53053b6</code></a>)</p> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.3.4 (2024-07-31)</h2> <h3>Fix</h3> <ul> <li>Add missing asyncio to fix truncated package description (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/67">#67</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2644df179e21e4513da857f2aea2aa64a3fb6316"><code>2644df1</code></a>)</li> </ul> <h2>v2.3.3 (2024-07-31)</h2> <h3>Fix</h3> <ul> <li>Add missing python version classifiers (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/65">#65</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/489016feb53d4fd5f9880f27dc40a5198d5b0be2"><code>489016f</code></a>)</li> <li>Update classifiers to include license (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/60">#60</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/a746c296b324407efef272f422a990587b9d6057"><code>a746c29</code></a>)</li> <li>Workaround broken <code>asyncio.staggered</code> on python < 3.8.2 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/61">#61</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b16f107d9493817247c27ab83522901f086a13b5"><code>b16f107</code></a>)</li> <li>Include tests in the source distribution package (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/62">#62</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/53053b6a38ef868e0170940ced5e0611ebd1be4c"><code>53053b6</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c8fecf6d70dcd5ccc2d11b3e269dacea69f77052"><code>c8fecf6</code></a> 2.3.4</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2644df179e21e4513da857f2aea2aa64a3fb6316"><code>2644df1</code></a> fix: add missing asyncio to fix truncated package description (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/67">#67</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/0501abcd081ef63c7da7e40adfb09b5562e47618"><code>0501abc</code></a> chore: fix publishing package to github (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/66">#66</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/58b08459d322898fedf38c0d1f0d316aebc0a498"><code>58b0845</code></a> 2.3.3</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/489016feb53d4fd5f9880f27dc40a5198d5b0be2"><code>489016f</code></a> fix: add missing python version classifiers (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/65">#65</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/6ff0742de6ce7b763b298b26f84b4c8e614b0b1b"><code>6ff0742</code></a> chore: fix permissions in release CI workflow (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/64">#64</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/92c2daf5ffbf94f4433712eb4c3053ac543c5f58"><code>92c2daf</code></a> chore: bump release actions to fix release workflow (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/63">#63</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/a746c296b324407efef272f422a990587b9d6057"><code>a746c29</code></a> fix: update classifiers to include license (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/60">#60</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b16f107d9493817247c27ab83522901f086a13b5"><code>b16f107</code></a> fix: workaround broken <code>asyncio.staggered</code> on python < 3.8.2 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/61">#61</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/53053b6a38ef868e0170940ced5e0611ebd1be4c"><code>53053b6</code></a> fix: include tests in the source distribution package (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/62">#62</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.3.2...v2.3.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.2&new-version=2.3.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 4 +--- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 2fc77c5455f..888f9a77899 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in @@ -36,8 +36,6 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.11.0 - # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.9.4 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7420ab81b9e..c254226e3c6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index ffb3bb40ca1..d54fcd4f50b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index d1efee2aecf..5f98dceaf9c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 7b68ac4849c..88b64bc868d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.2 +aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 0547f808589334ad0725087ed4b221f1b6d7eec8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 23:56:45 +0000 Subject: [PATCH 0267/1511] [PR #8594/3b4cf01f backport][3.11] Close stale issues without info (#8595) **This is a backport of PR #8594 as merged into master (3b4cf01fcb464322ec3bdf84e8e232015fcae673).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/stale.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .github/workflows/stale.yml diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 00000000000..ef1b86cfa69 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,17 @@ +name: 'Close stale issues' +on: + schedule: + - cron: '50 5 * * *' + +permissions: + issues: write + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + days-before-stale: 30 + any-of-labels: needs-info + labels-to-remove-when-unstale: needs-info From f1e4213fb06634584f8d7a1eb90f5397736a18cc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 4 Aug 2024 16:17:26 -0500 Subject: [PATCH 0268/1511] Release 3.10.1 (attempt 2) (#8598) --- CHANGES.rst | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 8da40ff025c..b1331a7fe9f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,19 +10,15 @@ .. towncrier release notes start -3.10.1 (2024-08-02) +3.10.1 (2024-08-03) ======================== Bug fixes --------- -- Fixed WebSocket server heartbeat timeout logic to terminate `receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. +- Fixed WebSocket server heartbeat timeout logic to terminate :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and return :py:class:`~aiohttp.ServerTimeoutError` -- by :user:`arcivanov`. - When a WebSocket pong message was not received, the - :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. - This change causes `_pong_not_received` to feed the `reader` an error message, causing - pending `receive` to terminate and return the error message. The error message contains - the exception :py:class:`~aiohttp.ServerTimeoutError`. + When a WebSocket pong message was not received, the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` operation did not terminate. This change causes ``_pong_not_received`` to feed the ``reader`` an error message, causing pending :py:meth:`~aiohttp.ClientWebSocketResponse.receive` to terminate and return the error message. The error message contains the exception :py:class:`~aiohttp.ServerTimeoutError`. *Related issues and pull requests on GitHub:* @@ -42,17 +38,9 @@ Bug fixes Removals and backward incompatible breaking changes --------------------------------------------------- -- Creating :py:class:`aiohttp.TCPConnector`, - :py:class:`aiohttp.ClientSession`, - :py:class:`~aiohttp.resolver.ThreadedResolver` - :py:class:`aiohttp.web.Server`, - or :py:class:`aiohttp.CookieJar` - instances without a running event loop now - raises a :exc:`RuntimeError` - -- by :user:`asvetlov`. - - Creating these objects without a running event loop was deprecated - in :issue:`3372` which was released in version 3.5.0. +- Creating :py:class:`aiohttp.TCPConnector`, :py:class:`aiohttp.ClientSession`, :py:class:`~aiohttp.resolver.ThreadedResolver` :py:class:`aiohttp.web.Server`, or :py:class:`aiohttp.CookieJar` instances without a running event loop now raises a :exc:`RuntimeError` -- by :user:`asvetlov`. + + Creating these objects without a running event loop was deprecated in :issue:`3372` which was released in version 3.5.0. This change first appeared in version 3.10.0 as :issue:`6378`. From 840640928a51e8ab897fbbdb7faec077d79cabba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:36:55 +0000 Subject: [PATCH 0269/1511] Bump pypa/cibuildwheel from 2.19.2 to 2.20.0 (#8605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.19.2 to 2.20.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>Version 2.20.0</h2> <ul> <li>🌟 CPython 3.13 wheels are now built by default - without the <code>CIBW_PRERELEASE_PYTHONS</code> flag. It's time to build and upload these wheels to PyPI! This release includes CPython 3.13.0rc1, which is guaranteed to be ABI compatible with the final release. Free-threading is still behind a flag/config option. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1950">#1950</a>)</li> <li>✨ Provide a <code>CIBW_ALLOW_EMPTY</code> environment variable as an alternative to the command line flag. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1937">#1937</a>)</li> <li>🐛 Don't use uv on PyPy3.8 on Windows, it stopped working starting in 0.2.25. Note that PyPy 3.8 is EoL. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1868">#1868</a>)</li> <li>🛠 Set the <code>VSCMD_ARG_TGT_ARCH</code> variable based on target arch. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1876">#1876</a>)</li> <li>🛠 Undo cleaner output on pytest 8-8.2 now that 8.3 is out. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1943">#1943</a>)</li> <li>📚 Update examples to use Python 3.12 on host (cibuildwheel will require Python 3.11+ on the host machine starting in October 2024) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1919">#1919</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.20.0</h3> <ul> <li>🌟 CPython 3.13 wheels are now built by default - without the <code>CIBW_PRERELEASE_PYTHONS</code> flag. It's time to build and upload these wheels to PyPI! This release includes CPython 3.13.0rc1, which is guaranteed to be ABI compatible with the final release. Free-threading is still behind a flag/config option. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1950">#1950</a>)</li> <li>✨ Provide a <code>CIBW_ALLOW_EMPTY</code> environment variable as an alternative to the command line flag. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1937">#1937</a>)</li> <li>🐛 Don't use uv on PyPy3.8 on Windows, it stopped working starting in 0.2.25. Note that PyPy 3.8 is EoL. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1868">#1868</a>)</li> <li>🛠 Set the <code>VSCMD_ARG_TGT_ARCH</code> variable based on target arch. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1876">#1876</a>)</li> <li>🛠 Undo cleaner output on pytest 8-8.2 now that 8.3 is out. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1943">#1943</a>)</li> <li>📚 Update examples to use Python 3.12 on host (cibuildwheel will require Python 3.11+ on the host machine starting in October 2024) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1919">#1919</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/bd033a44476646b606efccdd5eed92d5ea1d77ad"><code>bd033a4</code></a> Bump version: v2.20.0</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/9d023cf77aa0e0d52ac3aeb8db21f8137eed2469"><code>9d023cf</code></a> Build CPython 3.13 by default (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1950">#1950</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/8a69dbd9d4dcfb315e60a3b42f79365b35857210"><code>8a69dbd</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1949">#1949</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/147de6f4f7bba00e694321b7cf3a519441a444fa"><code>147de6f</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1947">#1947</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/e217e446612356e930d6e8aec01a901cc5bc846f"><code>e217e44</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1946">#1946</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/871ef6b506b01f5ba2770bf835130110d231643c"><code>871ef6b</code></a> tests: show xfail traceback summaries again (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1943">#1943</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/fac72f024a3fe6fbbfb15c00911e7587fbca089e"><code>fac72f0</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1944">#1944</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/d4c332104d5d20ed728e6616cfef4fd6d179ba7b"><code>d4c3321</code></a> feat: add <code>CIBW_ALLOW_EMPTY</code> environment variable (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1937">#1937</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/5af5df4e4fb8b50198e0b8c6182903cd6e349c8a"><code>5af5df4</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1935">#1935</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/6c5cf878718641c3cbf21d69c670ff2e53a8263c"><code>6c5cf87</code></a> docs: fix nox docs command (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1938">#1938</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.19.2...v2.20.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.19.2&new-version=2.20.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index f072a12aa34..5d5d6fa8c62 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -351,7 +351,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.19.2 + uses: pypa/cibuildwheel@v2.20.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 09044ddf4dbddf70d83b2fba0126f1ca70dfce97 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:41:01 +0000 Subject: [PATCH 0270/1511] Bump coverage from 7.6.0 to 7.6.1 (#8606) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.0 to 7.6.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.6.1 — 2024-08-04</h2> <ul> <li> <p>Fix: coverage used to fail when measuring code using :func:<code>runpy.run_path <python:runpy.run_path></code> with a :class:<code>Path <python:pathlib.Path></code> argument. This is now fixed, thanks to <code>Ask Hjorth Larsen <pull 1819_></code>_.</p> </li> <li> <p>Fix: backslashes preceding a multi-line backslashed string could confuse the HTML report. This is now fixed, thanks to <code>LiuYinCarl <pull 1828_></code>_.</p> </li> <li> <p>Now we publish wheels for Python 3.13, both regular and free-threaded.</p> </li> </ul> <p>.. _pull 1819: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1819">nedbat/coveragepy#1819</a> .. _pull 1828: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1828">nedbat/coveragepy#1828</a></p> <p>.. _changes_7-6-0:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/29f5898c571aac9d6380203179ba9d6c28195c62"><code>29f5898</code></a> docs: sample HTML for 7.6.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9b829f1dcb57e724054e153d2ab0a2fec5d52122"><code>9b829f1</code></a> docs: prep for 7.6.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/ebbb6a2d334f67925b78200d9da2e5fb2b0e02fb"><code>ebbb6a2</code></a> build: wheels for 3.13rc1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/3872525ab8d0757b4b38f210c61977f319f094b3"><code>3872525</code></a> chore: make upgrade</li> <li><a href="https://github.com/nedbat/coveragepy/commit/7a27f401d10a5a55ba46f4bc726cac261aa7708e"><code>7a27f40</code></a> test: fix a test on free-threading, use abiflags to get site-packages path co...</li> <li><a href="https://github.com/nedbat/coveragepy/commit/2b53664329903a80af78a1f5de1a40ecbd8f6cfb"><code>2b53664</code></a> build: include gil/nogil in the version banner</li> <li><a href="https://github.com/nedbat/coveragepy/commit/da1682fdbedba7dd99d5b7c635c47e33ce8f207c"><code>da1682f</code></a> docs: changelog and contributor for <a href="https://redirect.github.com/nedbat/coveragepy/issues/1828">#1828</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/dc819ff4d2333dfbf01c37406bb73ff1aceb8b3a"><code>dc819ff</code></a> test: two tests for <a href="https://redirect.github.com/nedbat/coveragepy/issues/1828">#1828</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/9aaa404e9606e9f9784eac1400d3b2ce9ef2902d"><code>9aaa404</code></a> fix: properly handle backslash before multi-line string (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1828">#1828</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9c5027077e478d0996639445ea1e2554edaf3295"><code>9c50270</code></a> chore: make upgrade</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.6.0...7.6.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.6.0&new-version=7.6.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c254226e3c6..b9975696662 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.6.0 +coverage==7.6.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index d54fcd4f50b..fb7819ba43b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.6.0 +coverage==7.6.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 88b64bc868d..dca8273b4d4 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.6.0 +coverage==7.6.1 # via # -r requirements/test.in # pytest-cov From e3775a359ea169c32ef6b6b8e5f43f58f8336379 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:58:34 +0000 Subject: [PATCH 0271/1511] Bump attrs from 23.2.0 to 24.1.0 (#8607) Bumps [attrs](https://github.com/sponsors/hynek) from 23.2.0 to 24.1.0. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/sponsors/hynek/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=23.2.0&new-version=24.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 888f9a77899..39d376eba12 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.2.0 +attrs==24.1.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b9975696662..4e1d52f9f91 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -24,7 +24,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==23.2.0 +attrs==24.1.0 # via -r requirements/runtime-deps.in babel==2.9.1 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index fb7819ba43b..71d7054f1f7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -24,7 +24,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==23.2.0 +attrs==24.1.0 # via -r requirements/runtime-deps.in babel==2.12.1 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 5f98dceaf9c..cc28de5da47 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.2.0 +attrs==24.1.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index dca8273b4d4..c18df4a7ee3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.5.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==23.2.0 +attrs==24.1.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 89dfa1e7f65ca4e174c2851af2af1a6fad7bd2e1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 11:22:39 +0000 Subject: [PATCH 0272/1511] Bump proxy-py from 2.4.4 to 2.4.5 (#8617) Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.4 to 2.4.5. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/6ac5aedd2631985abc7630bba2b6974e30956707"><code>6ac5aed</code></a> <code>Grout Wildcard</code> documentation (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1441">#1441</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/b9fa0d5a4c690693eb57b3a020e098e81fcf518d"><code>b9fa0d5</code></a> Renable Codecov (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1440">#1440</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/2dbc8af0bb96314ae17b208c804ead137ece5c5a"><code>2dbc8af</code></a> Support Grout Wildcards (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1439">#1439</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/84c36b60c2a0e44c9dd700e27101222d070e44ff"><code>84c36b6</code></a> Static route reverse proxy always <code>needs_upstream</code> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1434">#1434</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/a7077cf8db3bb66a6667a9d968a401e8f805e092"><code>a7077cf</code></a> Add <code>ModifyRequestHeaderPlugin</code> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1420">#1420</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/e34da54323fe52553e96d7571990337c0cff1f37"><code>e34da54</code></a> Grout (ngrok alternative) using Docker doc</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/afa89bc74975cd683f7767a36bbd6864e55db220"><code>afa89bc</code></a> Grout (ngrok alternative) using Docker doc</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/7bb04c020a5b407d0f0385ac36584fcb3fc3d68b"><code>7bb04c0</code></a> Include <code>openssl</code>, <code>cryptography</code> and <code>paramiko</code> in default DockerHub image (...</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/d124d4ec6ec5047db50ece6e15bdaa4dd538dc71"><code>d124d4e</code></a> Use <code>base</code> name for base docker image</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/f19db0ce1be5b53b16439a187227e1f10d893f76"><code>f19db0c</code></a> Optimize base docker image size</li> <li>Additional commits viewable in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.4...v2.4.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.4&new-version=2.4.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4e1d52f9f91..4ef1551a210 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -142,7 +142,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4 +proxy-py==2.4.5 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index 71d7054f1f7..af5682183d6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.4 +proxy-py==2.4.5 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index c18df4a7ee3..1a00d3bd939 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ packaging==23.1 # pytest pluggy==1.5.0 # via pytest -proxy-py==2.4.4 +proxy-py==2.4.5 # via -r requirements/test.in pycares==4.3.0 # via aiodns From 200d313f7994947fe1260e03c711465f7a0bc454 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 11:30:35 +0000 Subject: [PATCH 0273/1511] Bump cython from 3.0.10 to 3.0.11 (#8618) Bumps [cython](https://github.com/cython/cython) from 3.0.10 to 3.0.11. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/cython/cython/blob/master/CHANGES.rst">cython's changelog</a>.</em></p> <blockquote> <h1>3.0.11 (2024-08-05)</h1> <h2>Features added</h2> <ul> <li>The C++11 <code>emplace*</code> methods were added to <code>libcpp.deque</code>. Patch by Somin An. (Github issue :issue:<code>6159</code>)</li> </ul> <h2>Bugs fixed</h2> <ul> <li> <p>The exception check value of functions declared in pxd files was not always applied in 3.0.10. (Github issue :issue:<code>6122</code>)</p> </li> <li> <p>A crash on exception deallocations was fixed. (Github issue :issue:<code>6022</code>)</p> </li> <li> <p>A crash was fixed when assigning a zero-length slice to a memoryview. Patch by Michael Man. (Github issue :issue:<code>6227</code>)</p> </li> <li> <p><code>libcpp.optional.value()</code> could crash if it raised a C++ exception. Patch by Alexander Condello. (Github issue :issue:<code>6190</code>)</p> </li> <li> <p>The return type of <code>str()</code> was mishandled, leading to crashes with <code>language_level=3</code>. (Github issue :issue:<code>6166</code>)</p> </li> <li> <p><code>bytes.startswith/endswith()</code> failed for non-bytes substrings (e.g. <code>bytearray</code>). (Github issue :issue:<code>6168</code>)</p> </li> <li> <p>Fused ctuples crashed Cython. (Github issue :issue:<code>6068</code>)</p> </li> <li> <p>A compiler crash was fixed when using extension types in fused types. (Github issue :issue:<code>6204</code>)</p> </li> <li> <p>The module cleanup code was incorrect for globally defined memory view slices. (Github issue :issue:<code>6276</code>)</p> </li> <li> <p>Some adaptations were made to enable compilation in Python 3.13. (Github issues :issue:<code>5997</code>, :issue:<code>6182</code>, :issue:<code>6251</code>)</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/cython/cython/commit/bb4180ad2451845d4187f6a1b6441bc2cda81af8"><code>bb4180a</code></a> Build: Make Python wheel also on regular push.</li> <li><a href="https://github.com/cython/cython/commit/cde2a8d31c83efb4be0122a90b8d67b9d2453190"><code>cde2a8d</code></a> Fix test in Py2.</li> <li><a href="https://github.com/cython/cython/commit/0fb0f581dfdb370df97c1e062cc297f6db290963"><code>0fb0f58</code></a> Prepare release of 3.0.11.</li> <li><a href="https://github.com/cython/cython/commit/5170f6678b30d0eb992edbf457044203724dab2c"><code>5170f66</code></a> Update changelog.</li> <li><a href="https://github.com/cython/cython/commit/0dfcb7026c105edc97e090784ae17699b2283444"><code>0dfcb70</code></a> Add c++11 emplace functions to deque.pxd (<a href="https://redirect.github.com/cython/cython/issues/6159">#6159</a>)</li> <li><a href="https://github.com/cython/cython/commit/6d712958841f827072ab91be7220d27e96841e29"><code>6d71295</code></a> Mark libcpp.optional.value as except + (<a href="https://redirect.github.com/cython/cython/issues/6190">#6190</a>)</li> <li><a href="https://github.com/cython/cython/commit/54dc84749bee3220564f5e8cb11d3e2860a72eab"><code>54dc847</code></a> Fix typo.</li> <li><a href="https://github.com/cython/cython/commit/0dfbd3184451a017c77852b607b659f79a00a01f"><code>0dfbd31</code></a> Fix segfault on zero-length slice assignment on memoryview (<a href="https://redirect.github.com/cython/cython/issues/6230">GH-6230</a>)</li> <li><a href="https://github.com/cython/cython/commit/3984b76c5afad4be4f9cad3f85ff420e87dce917"><code>3984b76</code></a> Test runner: Make thread-ID printing work in Py2.7.</li> <li><a href="https://github.com/cython/cython/commit/ad71a9ccdae41288514924301871f81d2e497887"><code>ad71a9c</code></a> Update changelog.</li> <li>Additional commits viewable in <a href="https://github.com/cython/cython/compare/3.0.10...3.0.11">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.10&new-version=3.0.11)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4ef1551a210..5f46cf30107 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -64,7 +64,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.10 +cython==3.0.11 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 72b9a67af98..ae232fdd2ee 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,9 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.10 +cython==3.0.11 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.11.0 - # via -r requirements/typing-extensions.in From 3e9fd716725150d7d399d127b7e04021161c738b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 22:18:16 +0000 Subject: [PATCH 0274/1511] Bump mypy from 1.10.1 to 1.11.1 (#8593) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.10.1 to 1.11.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/mypy/blob/master/CHANGELOG.md">mypy's changelog</a>.</em></p> <blockquote> <h1>Mypy Release Notes</h1> <h2>Next release</h2> <h2>Mypy 1.11</h2> <p>We’ve just uploaded mypy 1.11 to the Python Package Index (<a href="https://pypi.org/project/mypy/">PyPI</a>). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:</p> <pre><code>python3 -m pip install -U mypy </code></pre> <p>You can read the full documentation for this release on <a href="http://mypy.readthedocs.io">Read the Docs</a>.</p> <h4>Support Python 3.12 Syntax for Generics (PEP 695)</h4> <p>Mypy now supports the new type parameter syntax introduced in Python 3.12 (<a href="https://peps.python.org/pep-0695/">PEP 695</a>). This feature is still experimental and must be enabled with the <code>--enable-incomplete-feature=NewGenericSyntax</code> flag, or with <code>enable_incomplete_feature = NewGenericSyntax</code> in the mypy configuration file. We plan to enable this by default in the next mypy feature release.</p> <p>This example demonstrates the new syntax:</p> <pre lang="python"><code># Generic function def f[T](https://github.com/python/mypy/blob/master/x: T) -> T: ... <p>reveal_type(f(1)) # Revealed type is 'int'</p> <h1>Generic class</h1> <p>class C[T]: def <strong>init</strong>(self, x: T) -> None: self.x = x</p> <p>c = C('a') reveal_type(c.x) # Revealed type is 'str'</p> <h1>Type alias</h1> <p>type A[T] = C[list[T]] </code></pre></p> <p>This feature was contributed by Jukka Lehtosalo.</p> <h4>Support for <code>functools.partial</code></h4> <p>Mypy now type checks uses of <code>functools.partial</code>. Previously mypy would accept arbitrary arguments.</p> <p>This example will now produce an error:</p> <pre lang="python"><code>from functools import partial </tr></table> </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy/commit/570b90a7a368f04c64f60af339d0ac1808c49c15"><code>570b90a</code></a> Bump version to 1.11</li> <li><a href="https://github.com/python/mypy/commit/b3a102ef31f63a8a8ba32c8dbe160ddef3c43054"><code>b3a102e</code></a> Fix <code>RawExpressionType.accept</code> crash with <code>--cache-fine-grained</code> (<a href="https://redirect.github.com/python/mypy/issues/17588">#17588</a>)</li> <li><a href="https://github.com/python/mypy/commit/aec04c74488d46a81a95ed3553b8e953a6ec59a7"><code>aec04c7</code></a> Fix PEP 604 isinstance caching (<a href="https://redirect.github.com/python/mypy/issues/17563">#17563</a>)</li> <li><a href="https://github.com/python/mypy/commit/cb44e4d8f18b9bc874f1076b33eec7ad67de165c"><code>cb44e4d</code></a> Fix <code>typing.TypeAliasType</code> being undefined on python < 3.12 (<a href="https://redirect.github.com/python/mypy/issues/17558">#17558</a>)</li> <li><a href="https://github.com/python/mypy/commit/6cf9180e1411dab2ee91b57374f696d391eb24f4"><code>6cf9180</code></a> Fix types.GenericAlias lookup crash (<a href="https://redirect.github.com/python/mypy/issues/17543">#17543</a>)</li> <li><a href="https://github.com/python/mypy/commit/64c1ebf7cff51c13b1771174e3bb6bce9fe0d5dc"><code>64c1ebf</code></a> Bump version to 1.11.1+dev</li> <li><a href="https://github.com/python/mypy/commit/dbd5f5cdb62b4dcd1e498c3a91c204b812609fdf"><code>dbd5f5c</code></a> Remove +dev from version for 1.11 release</li> <li><a href="https://github.com/python/mypy/commit/f0a8c6931485364d918f7b4920e5f2832a6be22f"><code>f0a8c69</code></a> Update CHANGELOG for mypy 1.11 (<a href="https://redirect.github.com/python/mypy/issues/17540">#17540</a>)</li> <li><a href="https://github.com/python/mypy/commit/371f7801e9bff13803a228e6cc8dd4cee6c8e472"><code>371f780</code></a> CHANGELOG.md update for 1.11 (<a href="https://redirect.github.com/python/mypy/issues/17539">#17539</a>)</li> <li><a href="https://github.com/python/mypy/commit/2563da0c721a89725bfd009da12dd6378554bfc6"><code>2563da0</code></a> Fix daemon crash on invalid type in TypedDict (<a href="https://redirect.github.com/python/mypy/issues/17495">#17495</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/mypy/compare/v1.10.1...v1.11.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.10.1&new-version=1.11.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .mypy.ini | 2 ++ aiohttp/typedefs.py | 9 ++++++++- aiohttp/web_app.py | 2 +- aiohttp/web_urldispatcher.py | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 16 insertions(+), 7 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index f1e6c5361be..78001c36e8f 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -12,6 +12,8 @@ disallow_untyped_defs = True extra_checks = True implicit_reexport = False no_implicit_optional = True +pretty = True +show_column_numbers = True show_error_codes = True show_error_code_links = True strict_equality = True diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 5e963e1a10e..80dd26e80bd 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -7,6 +7,7 @@ Callable, Iterable, Mapping, + Protocol, Tuple, Union, ) @@ -49,6 +50,12 @@ ] Handler = Callable[["Request"], Awaitable["StreamResponse"]] -Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]] + + +class Middleware(Protocol): + def __call__( + self, request: "Request", handler: Handler + ) -> Awaitable["StreamResponse"]: ... + PathLike = Union[str, "os.PathLike[str]"] diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 4d27714e3ba..3b4b6489e60 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -529,7 +529,7 @@ async def _handle(self, request: Request) -> StreamResponse: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] if new_style: handler = update_wrapper( - partial(m, handler=handler), handler + partial(m, handler=handler), handler # type: ignore[misc] ) else: handler = await m(app, handler) # type: ignore[arg-type,assignment] diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index a490e100ab3..688946626fd 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -206,7 +206,7 @@ def __init__( @wraps(handler) async def handler_wrapper(request: Request) -> StreamResponse: - result = old_handler(request) + result = old_handler(request) # type: ignore[call-arg] if asyncio.iscoroutine(result): result = await result assert isinstance(result, StreamResponse) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5f46cf30107..b72ba3d10d9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -116,7 +116,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index af5682183d6..96737a3f133 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -111,7 +111,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 11ace69b225..85b96964c05 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index 1a00d3bd939..cd09dc2a229 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From 06eedff870fed152d434b8e8a55fb2630132758d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 22:18:23 +0000 Subject: [PATCH 0275/1511] Bump mypy from 1.10.1 to 1.11.1 (#8561) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.10.1 to 1.11.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/mypy/blob/master/CHANGELOG.md">mypy's changelog</a>.</em></p> <blockquote> <h1>Mypy Release Notes</h1> <h2>Next release</h2> <h2>Mypy 1.11</h2> <p>We’ve just uploaded mypy 1.11 to the Python Package Index (<a href="https://pypi.org/project/mypy/">PyPI</a>). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:</p> <pre><code>python3 -m pip install -U mypy </code></pre> <p>You can read the full documentation for this release on <a href="http://mypy.readthedocs.io">Read the Docs</a>.</p> <h4>Support Python 3.12 Syntax for Generics (PEP 695)</h4> <p>Mypy now supports the new type parameter syntax introduced in Python 3.12 (<a href="https://peps.python.org/pep-0695/">PEP 695</a>). This feature is still experimental and must be enabled with the <code>--enable-incomplete-feature=NewGenericSyntax</code> flag, or with <code>enable_incomplete_feature = NewGenericSyntax</code> in the mypy configuration file. We plan to enable this by default in the next mypy feature release.</p> <p>This example demonstrates the new syntax:</p> <pre lang="python"><code># Generic function def f[T](https://github.com/python/mypy/blob/master/x: T) -> T: ... <p>reveal_type(f(1)) # Revealed type is 'int'</p> <h1>Generic class</h1> <p>class C[T]: def <strong>init</strong>(self, x: T) -> None: self.x = x</p> <p>c = C('a') reveal_type(c.x) # Revealed type is 'str'</p> <h1>Type alias</h1> <p>type A[T] = C[list[T]] </code></pre></p> <p>This feature was contributed by Jukka Lehtosalo.</p> <h4>Support for <code>functools.partial</code></h4> <p>Mypy now type checks uses of <code>functools.partial</code>. Previously mypy would accept arbitrary arguments.</p> <p>This example will now produce an error:</p> <pre lang="python"><code>from functools import partial </tr></table> </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy/commit/570b90a7a368f04c64f60af339d0ac1808c49c15"><code>570b90a</code></a> Bump version to 1.11</li> <li><a href="https://github.com/python/mypy/commit/b3a102ef31f63a8a8ba32c8dbe160ddef3c43054"><code>b3a102e</code></a> Fix <code>RawExpressionType.accept</code> crash with <code>--cache-fine-grained</code> (<a href="https://redirect.github.com/python/mypy/issues/17588">#17588</a>)</li> <li><a href="https://github.com/python/mypy/commit/aec04c74488d46a81a95ed3553b8e953a6ec59a7"><code>aec04c7</code></a> Fix PEP 604 isinstance caching (<a href="https://redirect.github.com/python/mypy/issues/17563">#17563</a>)</li> <li><a href="https://github.com/python/mypy/commit/cb44e4d8f18b9bc874f1076b33eec7ad67de165c"><code>cb44e4d</code></a> Fix <code>typing.TypeAliasType</code> being undefined on python < 3.12 (<a href="https://redirect.github.com/python/mypy/issues/17558">#17558</a>)</li> <li><a href="https://github.com/python/mypy/commit/6cf9180e1411dab2ee91b57374f696d391eb24f4"><code>6cf9180</code></a> Fix types.GenericAlias lookup crash (<a href="https://redirect.github.com/python/mypy/issues/17543">#17543</a>)</li> <li><a href="https://github.com/python/mypy/commit/64c1ebf7cff51c13b1771174e3bb6bce9fe0d5dc"><code>64c1ebf</code></a> Bump version to 1.11.1+dev</li> <li><a href="https://github.com/python/mypy/commit/dbd5f5cdb62b4dcd1e498c3a91c204b812609fdf"><code>dbd5f5c</code></a> Remove +dev from version for 1.11 release</li> <li><a href="https://github.com/python/mypy/commit/f0a8c6931485364d918f7b4920e5f2832a6be22f"><code>f0a8c69</code></a> Update CHANGELOG for mypy 1.11 (<a href="https://redirect.github.com/python/mypy/issues/17540">#17540</a>)</li> <li><a href="https://github.com/python/mypy/commit/371f7801e9bff13803a228e6cc8dd4cee6c8e472"><code>371f780</code></a> CHANGELOG.md update for 1.11 (<a href="https://redirect.github.com/python/mypy/issues/17539">#17539</a>)</li> <li><a href="https://github.com/python/mypy/commit/2563da0c721a89725bfd009da12dd6378554bfc6"><code>2563da0</code></a> Fix daemon crash on invalid type in TypedDict (<a href="https://redirect.github.com/python/mypy/issues/17495">#17495</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/mypy/compare/v1.10.1...v1.11.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.10.1&new-version=1.11.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .mypy.ini | 2 ++ aiohttp/typedefs.py | 9 ++++++++- aiohttp/web_app.py | 2 +- aiohttp/web_urldispatcher.py | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 16 insertions(+), 7 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index f1e6c5361be..78001c36e8f 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -12,6 +12,8 @@ disallow_untyped_defs = True extra_checks = True implicit_reexport = False no_implicit_optional = True +pretty = True +show_column_numbers = True show_error_codes = True show_error_code_links = True strict_equality = True diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 5e963e1a10e..80dd26e80bd 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -7,6 +7,7 @@ Callable, Iterable, Mapping, + Protocol, Tuple, Union, ) @@ -49,6 +50,12 @@ ] Handler = Callable[["Request"], Awaitable["StreamResponse"]] -Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]] + + +class Middleware(Protocol): + def __call__( + self, request: "Request", handler: Handler + ) -> Awaitable["StreamResponse"]: ... + PathLike = Union[str, "os.PathLike[str]"] diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 4d27714e3ba..3b4b6489e60 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -529,7 +529,7 @@ async def _handle(self, request: Request) -> StreamResponse: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] if new_style: handler = update_wrapper( - partial(m, handler=handler), handler + partial(m, handler=handler), handler # type: ignore[misc] ) else: handler = await m(app, handler) # type: ignore[arg-type,assignment] diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index a490e100ab3..688946626fd 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -206,7 +206,7 @@ def __init__( @wraps(handler) async def handler_wrapper(request: Request) -> StreamResponse: - result = old_handler(request) + result = old_handler(request) # type: ignore[call-arg] if asyncio.iscoroutine(result): result = await result assert isinstance(result, StreamResponse) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c254226e3c6..b40b4440ae0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -116,7 +116,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d54fcd4f50b..3ad4f54b209 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -111,7 +111,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 11ace69b225..85b96964c05 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index 88b64bc868d..803705f6da0 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.10.1 ; implementation_name == "cpython" +mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From cc83856925df8cb45e2bd10c31ac5ae4b6ab2d8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 11:25:57 +0000 Subject: [PATCH 0276/1511] Bump attrs from 24.1.0 to 24.2.0 (#8622) Bumps [attrs](https://github.com/sponsors/hynek) from 24.1.0 to 24.2.0. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/sponsors/hynek/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=24.1.0&new-version=24.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 39d376eba12..dbfeaf48ab8 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.1.0 +attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b72ba3d10d9..2e543bfff5b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -24,7 +24,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==24.1.0 +attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.9.1 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 96737a3f133..a924fa6cf23 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -24,7 +24,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # aioredis -attrs==24.1.0 +attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.12.1 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index cc28de5da47..d9f33f4cb84 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.1.0 +attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index cd09dc2a229..a093c71ebbf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.5.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.1.0 +attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 6e6fe0e5551477e076691d918d31ec01b32aa35f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 07:53:44 -0500 Subject: [PATCH 0277/1511] [PR #8619/d1c8dfbb backport][3.11] Fix monkey patches for pathlib changes in Python 3.13 (#8624) Co-authored-by: Steve Repsher <steverep@users.noreply.github.com> --- CHANGES/8551.contrib.rst | 1 + tests/test_web_urldispatcher.py | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8551.contrib.rst diff --git a/CHANGES/8551.contrib.rst b/CHANGES/8551.contrib.rst new file mode 100644 index 00000000000..3505b483ca8 --- /dev/null +++ b/CHANGES/8551.contrib.rst @@ -0,0 +1 @@ +Fixed monkey patches for ``Path.stat()`` and ``Path.is_dir()`` for python 3.13 compatibility -- by :user:`steverep`. diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index a799f4ba146..de44ea0648c 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -434,10 +434,10 @@ def mock_iterdir(self: pathlib.Path) -> Generator[pathlib.Path, None, None]: raise PermissionError() return real_iterdir(self) - def mock_is_dir(self: pathlib.Path) -> bool: + def mock_is_dir(self: pathlib.Path, **kwargs: Any) -> bool: if my_dir.samefile(self.parent): raise PermissionError() - return real_is_dir(self) + return real_is_dir(self, **kwargs) monkeypatch.setattr("pathlib.Path.iterdir", mock_iterdir) monkeypatch.setattr("pathlib.Path.is_dir", mock_is_dir) @@ -554,8 +554,8 @@ async def test_access_mock_special_resource( real_result = my_special.stat() real_stat = pathlib.Path.stat - def mock_stat(self: pathlib.Path) -> os.stat_result: - s = real_stat(self) + def mock_stat(self: pathlib.Path, **kwargs: Any) -> os.stat_result: + s = real_stat(self, **kwargs) if os.path.samestat(s, real_result): mock_mode = S_IFIFO | S_IMODE(s.st_mode) s = os.stat_result([mock_mode] + list(s)[1:]) From a55d3e0cff81bcd02464393150c9afb38d295ec4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 07:54:07 -0500 Subject: [PATCH 0278/1511] [PR #8619/d1c8dfbb backport][3.10] Fix monkey patches for pathlib changes in Python 3.13 (#8623) Co-authored-by: Steve Repsher <steverep@users.noreply.github.com> --- CHANGES/8551.contrib.rst | 1 + tests/test_web_urldispatcher.py | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8551.contrib.rst diff --git a/CHANGES/8551.contrib.rst b/CHANGES/8551.contrib.rst new file mode 100644 index 00000000000..3505b483ca8 --- /dev/null +++ b/CHANGES/8551.contrib.rst @@ -0,0 +1 @@ +Fixed monkey patches for ``Path.stat()`` and ``Path.is_dir()`` for python 3.13 compatibility -- by :user:`steverep`. diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index a799f4ba146..de44ea0648c 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -434,10 +434,10 @@ def mock_iterdir(self: pathlib.Path) -> Generator[pathlib.Path, None, None]: raise PermissionError() return real_iterdir(self) - def mock_is_dir(self: pathlib.Path) -> bool: + def mock_is_dir(self: pathlib.Path, **kwargs: Any) -> bool: if my_dir.samefile(self.parent): raise PermissionError() - return real_is_dir(self) + return real_is_dir(self, **kwargs) monkeypatch.setattr("pathlib.Path.iterdir", mock_iterdir) monkeypatch.setattr("pathlib.Path.is_dir", mock_is_dir) @@ -554,8 +554,8 @@ async def test_access_mock_special_resource( real_result = my_special.stat() real_stat = pathlib.Path.stat - def mock_stat(self: pathlib.Path) -> os.stat_result: - s = real_stat(self) + def mock_stat(self: pathlib.Path, **kwargs: Any) -> os.stat_result: + s = real_stat(self, **kwargs) if os.path.samestat(s, real_result): mock_mode = S_IFIFO | S_IMODE(s.st_mode) s = os.stat_result([mock_mode] + list(s)[1:]) From 266608d2e4105a654d9fdcc9b7bfcd7990712a94 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 13:51:03 +0000 Subject: [PATCH 0279/1511] [PR #8611/1fcef940 backport][3.10] Fix handler waiting on shutdown (#8627) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8611.bugfix.rst | 1 + aiohttp/web_protocol.py | 11 ++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8611.bugfix.rst diff --git a/CHANGES/8611.bugfix.rst b/CHANGES/8611.bugfix.rst new file mode 100644 index 00000000000..2cd795cc14e --- /dev/null +++ b/CHANGES/8611.bugfix.rst @@ -0,0 +1 @@ +Fixed an edge case where shutdown would wait for timeout when handler was already completed -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index d4ddbba55eb..9ba05a08e75 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -148,6 +148,7 @@ class RequestHandler(BaseProtocol): "_lingering_time", "_messages", "_message_tail", + "_handler_waiter", "_waiter", "_task_handler", "_upgrade", @@ -204,6 +205,7 @@ def __init__( self._message_tail = b"" self._waiter: Optional[asyncio.Future[None]] = None + self._handler_waiter: Optional[asyncio.Future[None]] = None self._task_handler: Optional[asyncio.Task[None]] = None self._upgrade = False @@ -262,11 +264,11 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._waiter: self._waiter.cancel() - # Wait for graceful disconnection - if self._current_request is not None: + # Wait for graceful handler completion + if self._handler_waiter is not None: with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): - await self._current_request.wait_for_disconnection() + await self._handler_waiter # Then cancel handler and wait with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): @@ -450,6 +452,7 @@ async def _handle_request( start_time: float, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: + self._handler_waiter = self._loop.create_future() try: try: self._current_request = request @@ -479,6 +482,8 @@ async def _handle_request( ) reset = await self.finish_response(request, resp, start_time) + finally: + self._handler_waiter.set_result(None) return resp, reset From 6fbc74c079f85102ff874f7914495dca5454a446 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 13:51:05 +0000 Subject: [PATCH 0280/1511] [PR #8611/1fcef940 backport][3.11] Fix handler waiting on shutdown (#8628) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8611.bugfix.rst | 1 + aiohttp/web_protocol.py | 11 ++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8611.bugfix.rst diff --git a/CHANGES/8611.bugfix.rst b/CHANGES/8611.bugfix.rst new file mode 100644 index 00000000000..2cd795cc14e --- /dev/null +++ b/CHANGES/8611.bugfix.rst @@ -0,0 +1 @@ +Fixed an edge case where shutdown would wait for timeout when handler was already completed -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index d4ddbba55eb..9ba05a08e75 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -148,6 +148,7 @@ class RequestHandler(BaseProtocol): "_lingering_time", "_messages", "_message_tail", + "_handler_waiter", "_waiter", "_task_handler", "_upgrade", @@ -204,6 +205,7 @@ def __init__( self._message_tail = b"" self._waiter: Optional[asyncio.Future[None]] = None + self._handler_waiter: Optional[asyncio.Future[None]] = None self._task_handler: Optional[asyncio.Task[None]] = None self._upgrade = False @@ -262,11 +264,11 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._waiter: self._waiter.cancel() - # Wait for graceful disconnection - if self._current_request is not None: + # Wait for graceful handler completion + if self._handler_waiter is not None: with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): - await self._current_request.wait_for_disconnection() + await self._handler_waiter # Then cancel handler and wait with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): @@ -450,6 +452,7 @@ async def _handle_request( start_time: float, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: + self._handler_waiter = self._loop.create_future() try: try: self._current_request = request @@ -479,6 +482,8 @@ async def _handle_request( ) reset = await self.finish_response(request, resp, start_time) + finally: + self._handler_waiter.set_result(None) return resp, reset From e41592d2a739098d9e5e11a6c69fba4a8db880c5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 13:55:18 +0000 Subject: [PATCH 0281/1511] [PR #8597/c99a1e27 backport][3.11] Fix reading of body when ignoring an upgrade request (#8630) **This is a backport of PR #8597 as merged into master (c99a1e27375285149ea82cbdcc2f2c40e57596dc).** Fixes #8414. Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8597.bugfix.rst | 1 + aiohttp/_http_parser.pyx | 19 +++++++++++-------- tests/test_http_parser.py | 20 ++++++++++++++++++++ tests/test_web_server.py | 8 +------- 4 files changed, 33 insertions(+), 15 deletions(-) create mode 100644 CHANGES/8597.bugfix.rst diff --git a/CHANGES/8597.bugfix.rst b/CHANGES/8597.bugfix.rst new file mode 100644 index 00000000000..27186bb52d1 --- /dev/null +++ b/CHANGES/8597.bugfix.rst @@ -0,0 +1 @@ +Fixed request body not being read when ignoring an Upgrade request -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 7ea9b32ca55..dd317edaf79 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -47,6 +47,7 @@ include "_headers.pxi" from aiohttp cimport _find_header +ALLOWED_UPGRADES = frozenset({"websocket"}) DEF DEFAULT_FREELIST_SIZE = 250 cdef extern from "Python.h": @@ -417,7 +418,6 @@ cdef class HttpParser: cdef _on_headers_complete(self): self._process_header() - method = http_method_str(self._cparser.method) should_close = not cparser.llhttp_should_keep_alive(self._cparser) upgrade = self._cparser.upgrade chunked = self._cparser.flags & cparser.F_CHUNKED @@ -425,8 +425,13 @@ cdef class HttpParser: raw_headers = tuple(self._raw_headers) headers = CIMultiDictProxy(self._headers) - if upgrade or self._cparser.method == cparser.HTTP_CONNECT: - self._upgraded = True + if self._cparser.type == cparser.HTTP_REQUEST: + allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + if allowed or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + else: + if upgrade and self._cparser.status_code == 101: + self._upgraded = True # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in headers: @@ -441,6 +446,7 @@ cdef class HttpParser: encoding = enc if self._cparser.type == cparser.HTTP_REQUEST: + method = http_method_str(self._cparser.method) msg = _new_request_message( method, self._path, self.http_version(), headers, raw_headers, @@ -565,7 +571,7 @@ cdef class HttpParser: if self._upgraded: return messages, True, data[nb:] else: - return messages, False, b'' + return messages, False, b"" def set_upgraded(self, val): self._upgraded = val @@ -748,10 +754,7 @@ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: pyparser._last_error = exc return -1 else: - if ( - pyparser._cparser.upgrade or - pyparser._cparser.method == cparser.HTTP_CONNECT - ): + if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: return 2 else: return 0 diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 187f9d27a77..0e9aff68dc2 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -823,6 +823,26 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +async def test_http_request_upgrade_unknown(parser: Any) -> None: + text = ( + b"POST / HTTP/1.1\r\n" + b"Connection: Upgrade\r\n" + b"Content-Length: 2\r\n" + b"Upgrade: unknown\r\n" + b"Content-Type: application/json\r\n\r\n" + b"{}" + ) + messages, upgrade, tail = parser.feed_data(text) + + msg = messages[0][0] + assert not msg.should_close + assert msg.upgrade + assert not upgrade + assert not msg.chunked + assert tail == b"" + assert await messages[0][-1].read() == b"{}" + + @pytest.fixture def xfail_c_parser_url(request) -> None: if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): diff --git a/tests/test_web_server.py b/tests/test_web_server.py index d0fd95acdb4..14d78e23a85 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -4,7 +4,7 @@ import pytest -from aiohttp import client, helpers, web +from aiohttp import client, web async def test_simple_server(aiohttp_raw_server, aiohttp_client) -> None: @@ -19,12 +19,6 @@ async def handler(request): assert txt == "/path/to" -@pytest.mark.xfail( - not helpers.NO_EXTENSIONS, - raises=client.ServerDisconnectedError, - reason="The behavior of C-extensions differs from pure-Python: " - "https://github.com/aio-libs/aiohttp/issues/6446", -) async def test_unsupported_upgrade(aiohttp_raw_server, aiohttp_client) -> None: # don't fail if a client probes for an unsupported protocol upgrade # https://github.com/aio-libs/aiohttp/issues/6446#issuecomment-999032039 From 4815765a6b8632e1fd485b4a87e7356acb37413a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:32:15 +0000 Subject: [PATCH 0282/1511] [PR #8597/c99a1e27 backport][3.10] Fix reading of body when ignoring an upgrade request (#8629) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: J. Nick Koston <nick@koston.org> Fixes #8414. --- CHANGES/8597.bugfix.rst | 1 + aiohttp/_http_parser.pyx | 19 +++++++++++-------- tests/test_http_parser.py | 20 ++++++++++++++++++++ tests/test_web_server.py | 8 +------- 4 files changed, 33 insertions(+), 15 deletions(-) create mode 100644 CHANGES/8597.bugfix.rst diff --git a/CHANGES/8597.bugfix.rst b/CHANGES/8597.bugfix.rst new file mode 100644 index 00000000000..27186bb52d1 --- /dev/null +++ b/CHANGES/8597.bugfix.rst @@ -0,0 +1 @@ +Fixed request body not being read when ignoring an Upgrade request -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 7ea9b32ca55..dd317edaf79 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -47,6 +47,7 @@ include "_headers.pxi" from aiohttp cimport _find_header +ALLOWED_UPGRADES = frozenset({"websocket"}) DEF DEFAULT_FREELIST_SIZE = 250 cdef extern from "Python.h": @@ -417,7 +418,6 @@ cdef class HttpParser: cdef _on_headers_complete(self): self._process_header() - method = http_method_str(self._cparser.method) should_close = not cparser.llhttp_should_keep_alive(self._cparser) upgrade = self._cparser.upgrade chunked = self._cparser.flags & cparser.F_CHUNKED @@ -425,8 +425,13 @@ cdef class HttpParser: raw_headers = tuple(self._raw_headers) headers = CIMultiDictProxy(self._headers) - if upgrade or self._cparser.method == cparser.HTTP_CONNECT: - self._upgraded = True + if self._cparser.type == cparser.HTTP_REQUEST: + allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + if allowed or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + else: + if upgrade and self._cparser.status_code == 101: + self._upgraded = True # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in headers: @@ -441,6 +446,7 @@ cdef class HttpParser: encoding = enc if self._cparser.type == cparser.HTTP_REQUEST: + method = http_method_str(self._cparser.method) msg = _new_request_message( method, self._path, self.http_version(), headers, raw_headers, @@ -565,7 +571,7 @@ cdef class HttpParser: if self._upgraded: return messages, True, data[nb:] else: - return messages, False, b'' + return messages, False, b"" def set_upgraded(self, val): self._upgraded = val @@ -748,10 +754,7 @@ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: pyparser._last_error = exc return -1 else: - if ( - pyparser._cparser.upgrade or - pyparser._cparser.method == cparser.HTTP_CONNECT - ): + if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: return 2 else: return 0 diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 187f9d27a77..0e9aff68dc2 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -823,6 +823,26 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +async def test_http_request_upgrade_unknown(parser: Any) -> None: + text = ( + b"POST / HTTP/1.1\r\n" + b"Connection: Upgrade\r\n" + b"Content-Length: 2\r\n" + b"Upgrade: unknown\r\n" + b"Content-Type: application/json\r\n\r\n" + b"{}" + ) + messages, upgrade, tail = parser.feed_data(text) + + msg = messages[0][0] + assert not msg.should_close + assert msg.upgrade + assert not upgrade + assert not msg.chunked + assert tail == b"" + assert await messages[0][-1].read() == b"{}" + + @pytest.fixture def xfail_c_parser_url(request) -> None: if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): diff --git a/tests/test_web_server.py b/tests/test_web_server.py index d0fd95acdb4..14d78e23a85 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -4,7 +4,7 @@ import pytest -from aiohttp import client, helpers, web +from aiohttp import client, web async def test_simple_server(aiohttp_raw_server, aiohttp_client) -> None: @@ -19,12 +19,6 @@ async def handler(request): assert txt == "/path/to" -@pytest.mark.xfail( - not helpers.NO_EXTENSIONS, - raises=client.ServerDisconnectedError, - reason="The behavior of C-extensions differs from pure-Python: " - "https://github.com/aio-libs/aiohttp/issues/6446", -) async def test_unsupported_upgrade(aiohttp_raw_server, aiohttp_client) -> None: # don't fail if a client probes for an unsupported protocol upgrade # https://github.com/aio-libs/aiohttp/issues/6446#issuecomment-999032039 From c7293e1963feec47ab237afc8d12a2a252386828 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 7 Aug 2024 17:10:36 +0100 Subject: [PATCH 0283/1511] Backport #8620 as improvements to various type annotations (#8634) --- .coveragerc | 3 + .github/workflows/ci-cd.yml | 2 +- CHANGES/8634.misc.rst | 1 + aiohttp/client.py | 29 ++++--- aiohttp/client_exceptions.py | 2 +- aiohttp/client_reqrep.py | 13 ++- aiohttp/connector.py | 5 +- aiohttp/pytest_plugin.py | 34 +++++++- aiohttp/test_utils.py | 151 +++++++++++++++++++++++------------ aiohttp/tracing.py | 4 +- aiohttp/typedefs.py | 8 +- aiohttp/web_request.py | 3 +- requirements/lint.in | 3 + requirements/lint.txt | 6 ++ 14 files changed, 188 insertions(+), 76 deletions(-) create mode 100644 CHANGES/8634.misc.rst diff --git a/.coveragerc b/.coveragerc index 0b5d5bf0ad4..7792266b114 100644 --- a/.coveragerc +++ b/.coveragerc @@ -6,3 +6,6 @@ omit = site-packages [report] exclude_also = if TYPE_CHECKING + assert False + : \.\.\.(\s*#.*)?$ + ^ +\.\.\.$ diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 5d5d6fa8c62..a6a58cef9c2 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -45,7 +45,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.11 - name: Cache PyPI uses: actions/cache@v4.0.2 with: diff --git a/CHANGES/8634.misc.rst b/CHANGES/8634.misc.rst new file mode 100644 index 00000000000..cf4c68d5119 --- /dev/null +++ b/CHANGES/8634.misc.rst @@ -0,0 +1 @@ +Minor improvements to various type annotations -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client.py b/aiohttp/client.py index c70ad65c59e..1d4ccc0814a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -9,7 +9,7 @@ import traceback import warnings from contextlib import suppress -from types import SimpleNamespace, TracebackType +from types import TracebackType from typing import ( TYPE_CHECKING, Any, @@ -155,7 +155,7 @@ class _RequestOptions(TypedDict, total=False): - params: Union[Mapping[str, str], None] + params: Union[Mapping[str, Union[str, int]], str, None] data: Any json: Any cookies: Union[LooseCookies, None] @@ -175,7 +175,7 @@ class _RequestOptions(TypedDict, total=False): ssl: Union[SSLContext, bool, Fingerprint] server_hostname: Union[str, None] proxy_headers: Union[LooseHeaders, None] - trace_request_ctx: Union[SimpleNamespace, None] + trace_request_ctx: Union[Mapping[str, str], None] read_bufsize: Union[int, None] auto_decompress: Union[bool, None] max_line_size: Union[int, None] @@ -422,11 +422,22 @@ def __del__(self, _warnings: Any = warnings) -> None: context["source_traceback"] = self._source_traceback self._loop.call_exception_handler(context) - def request( - self, method: str, url: StrOrURL, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP request.""" - return _RequestContextManager(self._request(method, url, **kwargs)) + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, + method: str, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) def _build_url(self, str_or_url: StrOrURL) -> URL: url = URL(str_or_url) @@ -466,7 +477,7 @@ async def _request( ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[SimpleNamespace] = None, + trace_request_ctx: Optional[Mapping[str, str]] = None, read_bufsize: Optional[int] = None, auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index f15a9ee3d3e..ff29b3d3ca9 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -100,7 +100,7 @@ def __str__(self) -> str: return "{}, message={!r}, url={!r}".format( self.status, self.message, - self.request_info.real_url, + str(self.request_info.real_url), ) def __repr__(self) -> str: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 37d14e107fd..2c10da4ff81 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -245,7 +245,8 @@ class ClientRequest: hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), } - body = b"" + # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. + body: Any = b"" auth = None response = None @@ -441,7 +442,7 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() # type: ignore[assignment] + headers = headers.items() for key, value in headers: # type: ignore[misc] # A special case for Host header @@ -597,6 +598,10 @@ def update_proxy( raise ValueError("proxy_auth must be None or BasicAuth() tuple") self.proxy = proxy self.proxy_auth = proxy_auth + if proxy_headers is not None and not isinstance( + proxy_headers, (MultiDict, MultiDictProxy) + ): + proxy_headers = CIMultiDict(proxy_headers) self.proxy_headers = proxy_headers def keep_alive(self) -> bool: @@ -632,10 +637,10 @@ async def write_bytes( await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) # type: ignore[assignment] + self.body = (self.body,) for chunk in self.body: - await writer.write(chunk) # type: ignore[arg-type] + await writer.write(chunk) except OSError as underlying_exc: reraised_exc = underlying_exc diff --git a/aiohttp/connector.py b/aiohttp/connector.py index cd89ea641d3..2e07395aece 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -23,6 +23,7 @@ List, Literal, Optional, + Sequence, Set, Tuple, Type, @@ -833,7 +834,7 @@ def clear_dns_cache( self._cached_hosts.clear() async def _resolve_host( - self, host: str, port: int, traces: Optional[List["Trace"]] = None + self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None ) -> List[ResolveResult]: """Resolve host and return list of addresses.""" if is_ip_address(host): @@ -902,7 +903,7 @@ async def _resolve_host_with_throttle( key: Tuple[str, int], host: str, port: int, - traces: Optional[List["Trace"]], + traces: Optional[Sequence["Trace"]], ) -> List[ResolveResult]: """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 6225fdf2be0..c862b409566 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -2,7 +2,17 @@ import contextlib import inspect import warnings -from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Iterator, + Optional, + Protocol, + Type, + Union, +) import pytest @@ -24,9 +34,23 @@ except ImportError: # pragma: no cover uvloop = None # type: ignore[assignment] -AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] -AiohttpServer = Callable[[Application], Awaitable[TestServer]] + + +class AiohttpClient(Protocol): + def __call__( + self, + __param: Union[Application, BaseTestServer], + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> Awaitable[TestClient]: ... + + +class AiohttpServer(Protocol): + def __call__( + self, app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[TestServer]: ... def pytest_addoption(parser): # type: ignore[no-untyped-def] @@ -262,7 +286,9 @@ def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: """ servers = [] - async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] + async def go( + app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> TestServer: server = TestServer(app, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index a36e8599689..97c1469dd2a 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -11,17 +11,7 @@ import warnings from abc import ABC, abstractmethod from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Iterator, - List, - Optional, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, cast from unittest import IsolatedAsyncioTestCase, mock from aiosignal import Signal @@ -29,7 +19,11 @@ from yarl import URL import aiohttp -from aiohttp.client import _RequestContextManager, _WSRequestContextManager +from aiohttp.client import ( + _RequestContextManager, + _RequestOptions, + _WSRequestContextManager, +) from . import ClientSession, hdrs from .abc import AbstractCookieJar @@ -55,6 +49,9 @@ else: SSLContext = None +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" @@ -90,7 +87,7 @@ class BaseTestServer(ABC): def __init__( self, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", loop: Optional[asyncio.AbstractEventLoop] = None, host: str = "127.0.0.1", port: Optional[int] = None, @@ -135,12 +132,8 @@ async def start_server( sockets = server.sockets # type: ignore[attr-defined] assert sockets is not None self.port = sockets[0].getsockname()[1] - if self.scheme is sentinel: - if self._ssl: - scheme = "https" - else: - scheme = "http" - self.scheme = scheme + if not self.scheme: + self.scheme = "https" if self._ssl else "http" self._root = URL(f"{self.scheme}://{self.host}:{self.port}") @abstractmethod # pragma: no cover @@ -222,7 +215,7 @@ def __init__( self, app: Application, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", host: str = "127.0.0.1", port: Optional[int] = None, **kwargs: Any, @@ -239,7 +232,7 @@ def __init__( self, handler: _RequestHandler, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", host: str = "127.0.0.1", port: Optional[int] = None, **kwargs: Any, @@ -324,45 +317,101 @@ async def _request( self._responses.append(resp) return resp - def request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> _RequestContextManager: - """Routes a request to tested http server. + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions] + ) -> _RequestContextManager: ... + + def get( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def options( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def head( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def post( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def put( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def patch( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def delete( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... - The interface is identical to aiohttp.ClientSession.request, - except the loop kwarg is overridden by the instance used by the - test server. + else: - """ - return _RequestContextManager(self._request(method, path, **kwargs)) + def request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> _RequestContextManager: + """Routes a request to tested http server. - def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP GET request.""" - return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. - def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP POST request.""" - return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + """ + return _RequestContextManager(self._request(method, path, **kwargs)) - def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP OPTIONS request.""" - return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) + def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) - def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP HEAD request.""" - return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) - def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PUT request.""" - return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, path, **kwargs) + ) + + def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) - def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) + def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) - def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) + def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, path, **kwargs) + ) + + def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, path, **kwargs) + ) def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: """Initiate websocket connection. diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 66007cbeb2c..012ed7bdaf6 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -1,5 +1,5 @@ from types import SimpleNamespace -from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar +from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar import attr from aiosignal import Signal @@ -101,7 +101,7 @@ def __init__( self._trace_config_ctx_factory = trace_config_ctx_factory def trace_config_ctx( - self, trace_request_ctx: Optional[SimpleNamespace] = None + self, trace_request_ctx: Optional[Mapping[str, str]] = None ) -> SimpleNamespace: """Return a new trace_config_ctx instance""" return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 80dd26e80bd..9fb21c15f83 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -35,7 +35,13 @@ Byteish = Union[bytes, bytearray, memoryview] JSONEncoder = Callable[[Any], str] JSONDecoder = Callable[[str], Any] -LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] +LooseHeaders = Union[ + Mapping[str, str], + Mapping[istr, str], + _CIMultiDict, + _CIMultiDictProxy, + Iterable[Tuple[Union[str, istr], str]], +] RawHeaders = Tuple[Tuple[bytes, bytes], ...] StrOrURL = Union[str, URL] diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index d059a166884..28d9ef3d10b 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -239,7 +239,8 @@ def clone( # a copy semantic dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) dct["raw_headers"] = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + (k.encode("utf-8"), v.encode("utf-8")) + for k, v in dct["headers"].items() ) message = self._message._replace(**dct) diff --git a/requirements/lint.in b/requirements/lint.in index 98910e21f0e..0d46809a083 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,8 +1,11 @@ aiodns aioredis +freezegun mypy; implementation_name == "cpython" pre-commit pytest +pytest-mock python-on-whales slotscheck +trustme uvloop; platform_system != "Windows" diff --git a/requirements/lint.txt b/requirements/lint.txt index 85b96964c05..97809fe3dde 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -30,6 +30,8 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv +freezegun==1.5.1 + # via -r requirements/lint.in identify==2.5.26 # via pre-commit idna==3.7 @@ -66,6 +68,8 @@ pygments==2.17.2 # via rich pytest==8.3.2 # via -r requirements/lint.in +pytest-mock==3.14.0 + # via -r requirements/lint.in python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 @@ -85,6 +89,8 @@ tomli==2.0.1 # slotscheck tqdm==4.66.2 # via python-on-whales +trustme==1.1.0 + # via -r requirements/lint.in typer==0.12.3 # via python-on-whales typing-extensions==4.11.0 From bf83dbe19ec02199fe187d71f5d6f72cf540fb0e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 17:38:04 +0100 Subject: [PATCH 0284/1511] [PR #8634/c7293e19 backport][3.10] Backport #8620 as improvements to various type annotations (#8635) **This is a backport of PR #8634 as merged into 3.11 (c7293e1963feec47ab237afc8d12a2a252386828).** Co-authored-by: Sam Bull <git@sambull.org> --- .coveragerc | 3 + .github/workflows/ci-cd.yml | 2 +- CHANGES/8634.misc.rst | 1 + aiohttp/client.py | 29 ++++--- aiohttp/client_exceptions.py | 2 +- aiohttp/client_reqrep.py | 13 ++- aiohttp/connector.py | 5 +- aiohttp/pytest_plugin.py | 34 +++++++- aiohttp/test_utils.py | 151 +++++++++++++++++++++++------------ aiohttp/tracing.py | 4 +- aiohttp/typedefs.py | 8 +- aiohttp/web_request.py | 3 +- requirements/lint.in | 3 + requirements/lint.txt | 6 ++ 14 files changed, 188 insertions(+), 76 deletions(-) create mode 100644 CHANGES/8634.misc.rst diff --git a/.coveragerc b/.coveragerc index 0b5d5bf0ad4..7792266b114 100644 --- a/.coveragerc +++ b/.coveragerc @@ -6,3 +6,6 @@ omit = site-packages [report] exclude_also = if TYPE_CHECKING + assert False + : \.\.\.(\s*#.*)?$ + ^ +\.\.\.$ diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index f072a12aa34..93d4575da2d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -45,7 +45,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.11 - name: Cache PyPI uses: actions/cache@v4.0.2 with: diff --git a/CHANGES/8634.misc.rst b/CHANGES/8634.misc.rst new file mode 100644 index 00000000000..cf4c68d5119 --- /dev/null +++ b/CHANGES/8634.misc.rst @@ -0,0 +1 @@ +Minor improvements to various type annotations -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client.py b/aiohttp/client.py index c70ad65c59e..1d4ccc0814a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -9,7 +9,7 @@ import traceback import warnings from contextlib import suppress -from types import SimpleNamespace, TracebackType +from types import TracebackType from typing import ( TYPE_CHECKING, Any, @@ -155,7 +155,7 @@ class _RequestOptions(TypedDict, total=False): - params: Union[Mapping[str, str], None] + params: Union[Mapping[str, Union[str, int]], str, None] data: Any json: Any cookies: Union[LooseCookies, None] @@ -175,7 +175,7 @@ class _RequestOptions(TypedDict, total=False): ssl: Union[SSLContext, bool, Fingerprint] server_hostname: Union[str, None] proxy_headers: Union[LooseHeaders, None] - trace_request_ctx: Union[SimpleNamespace, None] + trace_request_ctx: Union[Mapping[str, str], None] read_bufsize: Union[int, None] auto_decompress: Union[bool, None] max_line_size: Union[int, None] @@ -422,11 +422,22 @@ def __del__(self, _warnings: Any = warnings) -> None: context["source_traceback"] = self._source_traceback self._loop.call_exception_handler(context) - def request( - self, method: str, url: StrOrURL, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP request.""" - return _RequestContextManager(self._request(method, url, **kwargs)) + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, + method: str, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) def _build_url(self, str_or_url: StrOrURL) -> URL: url = URL(str_or_url) @@ -466,7 +477,7 @@ async def _request( ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[SimpleNamespace] = None, + trace_request_ctx: Optional[Mapping[str, str]] = None, read_bufsize: Optional[int] = None, auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index f15a9ee3d3e..ff29b3d3ca9 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -100,7 +100,7 @@ def __str__(self) -> str: return "{}, message={!r}, url={!r}".format( self.status, self.message, - self.request_info.real_url, + str(self.request_info.real_url), ) def __repr__(self) -> str: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 37d14e107fd..2c10da4ff81 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -245,7 +245,8 @@ class ClientRequest: hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), } - body = b"" + # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. + body: Any = b"" auth = None response = None @@ -441,7 +442,7 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() # type: ignore[assignment] + headers = headers.items() for key, value in headers: # type: ignore[misc] # A special case for Host header @@ -597,6 +598,10 @@ def update_proxy( raise ValueError("proxy_auth must be None or BasicAuth() tuple") self.proxy = proxy self.proxy_auth = proxy_auth + if proxy_headers is not None and not isinstance( + proxy_headers, (MultiDict, MultiDictProxy) + ): + proxy_headers = CIMultiDict(proxy_headers) self.proxy_headers = proxy_headers def keep_alive(self) -> bool: @@ -632,10 +637,10 @@ async def write_bytes( await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) # type: ignore[assignment] + self.body = (self.body,) for chunk in self.body: - await writer.write(chunk) # type: ignore[arg-type] + await writer.write(chunk) except OSError as underlying_exc: reraised_exc = underlying_exc diff --git a/aiohttp/connector.py b/aiohttp/connector.py index cd89ea641d3..2e07395aece 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -23,6 +23,7 @@ List, Literal, Optional, + Sequence, Set, Tuple, Type, @@ -833,7 +834,7 @@ def clear_dns_cache( self._cached_hosts.clear() async def _resolve_host( - self, host: str, port: int, traces: Optional[List["Trace"]] = None + self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None ) -> List[ResolveResult]: """Resolve host and return list of addresses.""" if is_ip_address(host): @@ -902,7 +903,7 @@ async def _resolve_host_with_throttle( key: Tuple[str, int], host: str, port: int, - traces: Optional[List["Trace"]], + traces: Optional[Sequence["Trace"]], ) -> List[ResolveResult]: """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 6225fdf2be0..c862b409566 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -2,7 +2,17 @@ import contextlib import inspect import warnings -from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Iterator, + Optional, + Protocol, + Type, + Union, +) import pytest @@ -24,9 +34,23 @@ except ImportError: # pragma: no cover uvloop = None # type: ignore[assignment] -AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] -AiohttpServer = Callable[[Application], Awaitable[TestServer]] + + +class AiohttpClient(Protocol): + def __call__( + self, + __param: Union[Application, BaseTestServer], + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> Awaitable[TestClient]: ... + + +class AiohttpServer(Protocol): + def __call__( + self, app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[TestServer]: ... def pytest_addoption(parser): # type: ignore[no-untyped-def] @@ -262,7 +286,9 @@ def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: """ servers = [] - async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] + async def go( + app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> TestServer: server = TestServer(app, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index a36e8599689..97c1469dd2a 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -11,17 +11,7 @@ import warnings from abc import ABC, abstractmethod from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Iterator, - List, - Optional, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, cast from unittest import IsolatedAsyncioTestCase, mock from aiosignal import Signal @@ -29,7 +19,11 @@ from yarl import URL import aiohttp -from aiohttp.client import _RequestContextManager, _WSRequestContextManager +from aiohttp.client import ( + _RequestContextManager, + _RequestOptions, + _WSRequestContextManager, +) from . import ClientSession, hdrs from .abc import AbstractCookieJar @@ -55,6 +49,9 @@ else: SSLContext = None +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" @@ -90,7 +87,7 @@ class BaseTestServer(ABC): def __init__( self, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", loop: Optional[asyncio.AbstractEventLoop] = None, host: str = "127.0.0.1", port: Optional[int] = None, @@ -135,12 +132,8 @@ async def start_server( sockets = server.sockets # type: ignore[attr-defined] assert sockets is not None self.port = sockets[0].getsockname()[1] - if self.scheme is sentinel: - if self._ssl: - scheme = "https" - else: - scheme = "http" - self.scheme = scheme + if not self.scheme: + self.scheme = "https" if self._ssl else "http" self._root = URL(f"{self.scheme}://{self.host}:{self.port}") @abstractmethod # pragma: no cover @@ -222,7 +215,7 @@ def __init__( self, app: Application, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", host: str = "127.0.0.1", port: Optional[int] = None, **kwargs: Any, @@ -239,7 +232,7 @@ def __init__( self, handler: _RequestHandler, *, - scheme: Union[str, object] = sentinel, + scheme: str = "", host: str = "127.0.0.1", port: Optional[int] = None, **kwargs: Any, @@ -324,45 +317,101 @@ async def _request( self._responses.append(resp) return resp - def request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> _RequestContextManager: - """Routes a request to tested http server. + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions] + ) -> _RequestContextManager: ... + + def get( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def options( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def head( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def post( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def put( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def patch( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def delete( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... - The interface is identical to aiohttp.ClientSession.request, - except the loop kwarg is overridden by the instance used by the - test server. + else: - """ - return _RequestContextManager(self._request(method, path, **kwargs)) + def request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> _RequestContextManager: + """Routes a request to tested http server. - def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP GET request.""" - return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. - def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP POST request.""" - return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + """ + return _RequestContextManager(self._request(method, path, **kwargs)) - def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP OPTIONS request.""" - return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) + def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) - def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP HEAD request.""" - return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) - def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PUT request.""" - return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, path, **kwargs) + ) + + def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) - def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) + def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) - def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) + def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, path, **kwargs) + ) + + def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, path, **kwargs) + ) def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: """Initiate websocket connection. diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 66007cbeb2c..012ed7bdaf6 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -1,5 +1,5 @@ from types import SimpleNamespace -from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar +from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar import attr from aiosignal import Signal @@ -101,7 +101,7 @@ def __init__( self._trace_config_ctx_factory = trace_config_ctx_factory def trace_config_ctx( - self, trace_request_ctx: Optional[SimpleNamespace] = None + self, trace_request_ctx: Optional[Mapping[str, str]] = None ) -> SimpleNamespace: """Return a new trace_config_ctx instance""" return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 80dd26e80bd..9fb21c15f83 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -35,7 +35,13 @@ Byteish = Union[bytes, bytearray, memoryview] JSONEncoder = Callable[[Any], str] JSONDecoder = Callable[[str], Any] -LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] +LooseHeaders = Union[ + Mapping[str, str], + Mapping[istr, str], + _CIMultiDict, + _CIMultiDictProxy, + Iterable[Tuple[Union[str, istr], str]], +] RawHeaders = Tuple[Tuple[bytes, bytes], ...] StrOrURL = Union[str, URL] diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index d059a166884..28d9ef3d10b 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -239,7 +239,8 @@ def clone( # a copy semantic dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) dct["raw_headers"] = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + (k.encode("utf-8"), v.encode("utf-8")) + for k, v in dct["headers"].items() ) message = self._message._replace(**dct) diff --git a/requirements/lint.in b/requirements/lint.in index 98910e21f0e..0d46809a083 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,8 +1,11 @@ aiodns aioredis +freezegun mypy; implementation_name == "cpython" pre-commit pytest +pytest-mock python-on-whales slotscheck +trustme uvloop; platform_system != "Windows" diff --git a/requirements/lint.txt b/requirements/lint.txt index 85b96964c05..97809fe3dde 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -30,6 +30,8 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv +freezegun==1.5.1 + # via -r requirements/lint.in identify==2.5.26 # via pre-commit idna==3.7 @@ -66,6 +68,8 @@ pygments==2.17.2 # via rich pytest==8.3.2 # via -r requirements/lint.in +pytest-mock==3.14.0 + # via -r requirements/lint.in python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 @@ -85,6 +89,8 @@ tomli==2.0.1 # slotscheck tqdm==4.66.2 # via python-on-whales +trustme==1.1.0 + # via -r requirements/lint.in typer==0.12.3 # via python-on-whales typing-extensions==4.11.0 From 5eb18a665570d9a6d7238fefb79359d570bc19a5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 7 Aug 2024 16:08:04 -0500 Subject: [PATCH 0285/1511] [PR #8632/b2691f2 backport][3.11] Fix connecting to npipe://, tcp://, and unix:// urls (#8638) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8632.bugfix.rst | 1 + aiohttp/client.py | 10 ++--- aiohttp/connector.py | 16 ++++++++ tests/test_client_session.py | 75 +++++++++++++++++++++++++++++++++--- tests/test_connector.py | 49 +++++++++++++++++++++-- 5 files changed, 137 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8632.bugfix.rst diff --git a/CHANGES/8632.bugfix.rst b/CHANGES/8632.bugfix.rst new file mode 100644 index 00000000000..c6da81d7ab3 --- /dev/null +++ b/CHANGES/8632.bugfix.rst @@ -0,0 +1 @@ +Fixed connecting to ``npipe://``, ``tcp://``, and ``unix://`` urls -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 1d4ccc0814a..3d1045f355a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -75,6 +75,7 @@ ) from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse from .connector import ( + HTTP_AND_EMPTY_SCHEMA_SET, BaseConnector as BaseConnector, NamedPipeConnector as NamedPipeConnector, TCPConnector as TCPConnector, @@ -209,9 +210,6 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) -HTTP_SCHEMA_SET = frozenset({"http", "https", ""}) -WS_SCHEMA_SET = frozenset({"ws", "wss"}) -ALLOWED_PROTOCOL_SCHEMA_SET = HTTP_SCHEMA_SET | WS_SCHEMA_SET _RetType = TypeVar("_RetType") _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -517,7 +515,8 @@ async def _request( except ValueError as e: raise InvalidUrlClientError(str_or_url) from e - if url.scheme not in ALLOWED_PROTOCOL_SCHEMA_SET: + assert self._connector is not None + if url.scheme not in self._connector.allowed_protocol_schema_set: raise NonHttpUrlClientError(url) skip_headers = set(self._skip_auto_headers) @@ -655,7 +654,6 @@ async def _request( real_timeout.connect, ceil_threshold=real_timeout.ceil_threshold, ): - assert self._connector is not None conn = await self._connector.connect( req, traces=traces, timeout=real_timeout ) @@ -752,7 +750,7 @@ async def _request( ) from e scheme = parsed_redirect_url.scheme - if scheme not in HTTP_SCHEMA_SET: + if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: resp.close() raise NonHttpUrlRedirectClientError(r_url) elif not scheme: diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 2e07395aece..d4691b10e6e 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -63,6 +63,14 @@ SSLContext = object # type: ignore[misc,assignment] +EMPTY_SCHEMA_SET = frozenset({""}) +HTTP_SCHEMA_SET = frozenset({"http", "https"}) +WS_SCHEMA_SET = frozenset({"ws", "wss"}) + +HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET +HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET + + __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") @@ -211,6 +219,8 @@ class BaseConnector: # abort transport after 2 seconds (cleanup broken connections) _cleanup_closed_period = 2.0 + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET + def __init__( self, *, @@ -760,6 +770,8 @@ class TCPConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + def __init__( self, *, @@ -1458,6 +1470,8 @@ class UnixConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"}) + def __init__( self, path: str, @@ -1514,6 +1528,8 @@ class NamedPipeConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"}) + def __init__( self, path: str, diff --git a/tests/test_client_session.py b/tests/test_client_session.py index a522094a287..051c0aeba24 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -4,7 +4,7 @@ import io import json from http.cookies import SimpleCookie -from typing import Any, List +from typing import Any, Awaitable, Callable, List from unittest import mock from uuid import uuid4 @@ -16,10 +16,12 @@ import aiohttp from aiohttp import client, hdrs, web from aiohttp.client import ClientSession +from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ClientRequest -from aiohttp.connector import BaseConnector, TCPConnector +from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG from aiohttp.test_utils import make_mocked_coro +from aiohttp.tracing import Trace @pytest.fixture @@ -487,15 +489,17 @@ async def test_ws_connect_allowed_protocols( hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } - resp.url = URL(f"{protocol}://example.com") + resp.url = URL(f"{protocol}://example") resp.cookies = SimpleCookie() resp.start = mock.AsyncMock() req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) req_factory = mock.Mock(return_value=req) req.send = mock.AsyncMock(return_value=resp) + # BaseConnector allows all high level protocols by default + connector = BaseConnector() - session = await create_session(request_class=req_factory) + session = await create_session(connector=connector, request_class=req_factory) connections = [] original_connect = session._connector.connect @@ -515,7 +519,68 @@ async def create_connection(req, traces, timeout): "aiohttp.client.os" ) as m_os: m_os.urandom.return_value = key_data - await session.ws_connect(f"{protocol}://example.com") + await session.ws_connect(f"{protocol}://example") + + # normally called during garbage collection. triggers an exception + # if the connection wasn't already closed + for c in connections: + c.close() + c.__del__() + + await session.close() + + +@pytest.mark.parametrize("protocol", ["http", "https", "ws", "wss", "unix"]) +async def test_ws_connect_unix_socket_allowed_protocols( + create_session: Callable[..., Awaitable[ClientSession]], + create_mocked_conn: Callable[[], ResponseHandler], + protocol: str, + ws_key: bytes, + key_data: bytes, +) -> None: + resp = mock.create_autospec(aiohttp.ClientResponse) + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + resp.url = URL(f"{protocol}://example") + resp.cookies = SimpleCookie() + resp.start = mock.AsyncMock() + + req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req_factory = mock.Mock(return_value=req) + req.send = mock.AsyncMock(return_value=resp) + # UnixConnector allows all high level protocols by default and unix sockets + session = await create_session( + connector=UnixConnector(path=""), request_class=req_factory + ) + + connections = [] + assert session._connector is not None + original_connect = session._connector.connect + + async def connect( + req: ClientRequest, traces: List[Trace], timeout: aiohttp.ClientTimeout + ) -> Connection: + conn = await original_connect(req, traces, timeout) + connections.append(conn) + return conn + + async def create_connection( + req: object, traces: object, timeout: object + ) -> ResponseHandler: + return create_mocked_conn() + + connector = session._connector + with mock.patch.object(connector, "connect", connect), mock.patch.object( + connector, "_create_connection", create_connection + ), mock.patch.object(connector, "_release"), mock.patch( + "aiohttp.client.os" + ) as m_os: + m_os.urandom.return_value = key_data + await session.ws_connect(f"{protocol}://example") # normally called during garbage collection. triggers an exception # if the connection wasn't already closed diff --git a/tests/test_connector.py b/tests/test_connector.py index 2065adf7414..d146fb4ee51 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1481,7 +1481,19 @@ async def test_tcp_connector_ctor() -> None: assert conn.family == 0 -async def test_tcp_connector_ctor_fingerprint_valid(loop) -> None: +async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.TCPConnector() + assert conn.allowed_protocol_schema_set == {"", "tcp", "http", "https", "ws", "wss"} + + +async def test_invalid_ssl_param() -> None: + with pytest.raises(TypeError): + aiohttp.TCPConnector(ssl=object()) # type: ignore[arg-type] + + +async def test_tcp_connector_ctor_fingerprint_valid( + loop: asyncio.AbstractEventLoop, +) -> None: valid = aiohttp.Fingerprint(hashlib.sha256(b"foo").digest()) conn = aiohttp.TCPConnector(ssl=valid, loop=loop) assert conn._ssl is valid @@ -1639,8 +1651,23 @@ async def test_ctor_with_default_loop(loop) -> None: assert loop is conn._loop -async def test_connect_with_limit(loop, key) -> None: - proto = mock.Mock() +async def test_base_connector_allows_high_level_protocols( + loop: asyncio.AbstractEventLoop, +) -> None: + conn = aiohttp.BaseConnector() + assert conn.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + } + + +async def test_connect_with_limit( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + proto = create_mocked_conn(loop) proto.is_connected.return_value = True req = ClientRequest( @@ -2412,6 +2439,14 @@ async def handler(request): connector = aiohttp.UnixConnector(unix_sockname) assert unix_sockname == connector.path + assert connector.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + "unix", + } session = client.ClientSession(connector=connector) r = await session.get(url) @@ -2437,6 +2472,14 @@ async def handler(request): connector = aiohttp.NamedPipeConnector(pipe_name) assert pipe_name == connector.path + assert connector.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + "npipe", + } session = client.ClientSession(connector=connector) r = await session.get(url) From 72f41aab593d21fc16074c6ee358ec1f546c26a7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 7 Aug 2024 16:08:14 -0500 Subject: [PATCH 0286/1511] [PR #8632/b2691f2 backport][3.10] Fix connecting to npipe://, tcp://, and unix:// urls (#8637) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8632.bugfix.rst | 1 + aiohttp/client.py | 10 ++--- aiohttp/connector.py | 16 ++++++++ tests/test_client_session.py | 75 +++++++++++++++++++++++++++++++++--- tests/test_connector.py | 49 +++++++++++++++++++++-- 5 files changed, 137 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8632.bugfix.rst diff --git a/CHANGES/8632.bugfix.rst b/CHANGES/8632.bugfix.rst new file mode 100644 index 00000000000..c6da81d7ab3 --- /dev/null +++ b/CHANGES/8632.bugfix.rst @@ -0,0 +1 @@ +Fixed connecting to ``npipe://``, ``tcp://``, and ``unix://`` urls -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 1d4ccc0814a..3d1045f355a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -75,6 +75,7 @@ ) from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse from .connector import ( + HTTP_AND_EMPTY_SCHEMA_SET, BaseConnector as BaseConnector, NamedPipeConnector as NamedPipeConnector, TCPConnector as TCPConnector, @@ -209,9 +210,6 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) -HTTP_SCHEMA_SET = frozenset({"http", "https", ""}) -WS_SCHEMA_SET = frozenset({"ws", "wss"}) -ALLOWED_PROTOCOL_SCHEMA_SET = HTTP_SCHEMA_SET | WS_SCHEMA_SET _RetType = TypeVar("_RetType") _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -517,7 +515,8 @@ async def _request( except ValueError as e: raise InvalidUrlClientError(str_or_url) from e - if url.scheme not in ALLOWED_PROTOCOL_SCHEMA_SET: + assert self._connector is not None + if url.scheme not in self._connector.allowed_protocol_schema_set: raise NonHttpUrlClientError(url) skip_headers = set(self._skip_auto_headers) @@ -655,7 +654,6 @@ async def _request( real_timeout.connect, ceil_threshold=real_timeout.ceil_threshold, ): - assert self._connector is not None conn = await self._connector.connect( req, traces=traces, timeout=real_timeout ) @@ -752,7 +750,7 @@ async def _request( ) from e scheme = parsed_redirect_url.scheme - if scheme not in HTTP_SCHEMA_SET: + if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: resp.close() raise NonHttpUrlRedirectClientError(r_url) elif not scheme: diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 2e07395aece..d4691b10e6e 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -63,6 +63,14 @@ SSLContext = object # type: ignore[misc,assignment] +EMPTY_SCHEMA_SET = frozenset({""}) +HTTP_SCHEMA_SET = frozenset({"http", "https"}) +WS_SCHEMA_SET = frozenset({"ws", "wss"}) + +HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET +HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET + + __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") @@ -211,6 +219,8 @@ class BaseConnector: # abort transport after 2 seconds (cleanup broken connections) _cleanup_closed_period = 2.0 + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET + def __init__( self, *, @@ -760,6 +770,8 @@ class TCPConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + def __init__( self, *, @@ -1458,6 +1470,8 @@ class UnixConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"}) + def __init__( self, path: str, @@ -1514,6 +1528,8 @@ class NamedPipeConnector(BaseConnector): loop - Optional event loop. """ + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"}) + def __init__( self, path: str, diff --git a/tests/test_client_session.py b/tests/test_client_session.py index a522094a287..051c0aeba24 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -4,7 +4,7 @@ import io import json from http.cookies import SimpleCookie -from typing import Any, List +from typing import Any, Awaitable, Callable, List from unittest import mock from uuid import uuid4 @@ -16,10 +16,12 @@ import aiohttp from aiohttp import client, hdrs, web from aiohttp.client import ClientSession +from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ClientRequest -from aiohttp.connector import BaseConnector, TCPConnector +from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG from aiohttp.test_utils import make_mocked_coro +from aiohttp.tracing import Trace @pytest.fixture @@ -487,15 +489,17 @@ async def test_ws_connect_allowed_protocols( hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } - resp.url = URL(f"{protocol}://example.com") + resp.url = URL(f"{protocol}://example") resp.cookies = SimpleCookie() resp.start = mock.AsyncMock() req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) req_factory = mock.Mock(return_value=req) req.send = mock.AsyncMock(return_value=resp) + # BaseConnector allows all high level protocols by default + connector = BaseConnector() - session = await create_session(request_class=req_factory) + session = await create_session(connector=connector, request_class=req_factory) connections = [] original_connect = session._connector.connect @@ -515,7 +519,68 @@ async def create_connection(req, traces, timeout): "aiohttp.client.os" ) as m_os: m_os.urandom.return_value = key_data - await session.ws_connect(f"{protocol}://example.com") + await session.ws_connect(f"{protocol}://example") + + # normally called during garbage collection. triggers an exception + # if the connection wasn't already closed + for c in connections: + c.close() + c.__del__() + + await session.close() + + +@pytest.mark.parametrize("protocol", ["http", "https", "ws", "wss", "unix"]) +async def test_ws_connect_unix_socket_allowed_protocols( + create_session: Callable[..., Awaitable[ClientSession]], + create_mocked_conn: Callable[[], ResponseHandler], + protocol: str, + ws_key: bytes, + key_data: bytes, +) -> None: + resp = mock.create_autospec(aiohttp.ClientResponse) + resp.status = 101 + resp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + resp.url = URL(f"{protocol}://example") + resp.cookies = SimpleCookie() + resp.start = mock.AsyncMock() + + req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req_factory = mock.Mock(return_value=req) + req.send = mock.AsyncMock(return_value=resp) + # UnixConnector allows all high level protocols by default and unix sockets + session = await create_session( + connector=UnixConnector(path=""), request_class=req_factory + ) + + connections = [] + assert session._connector is not None + original_connect = session._connector.connect + + async def connect( + req: ClientRequest, traces: List[Trace], timeout: aiohttp.ClientTimeout + ) -> Connection: + conn = await original_connect(req, traces, timeout) + connections.append(conn) + return conn + + async def create_connection( + req: object, traces: object, timeout: object + ) -> ResponseHandler: + return create_mocked_conn() + + connector = session._connector + with mock.patch.object(connector, "connect", connect), mock.patch.object( + connector, "_create_connection", create_connection + ), mock.patch.object(connector, "_release"), mock.patch( + "aiohttp.client.os" + ) as m_os: + m_os.urandom.return_value = key_data + await session.ws_connect(f"{protocol}://example") # normally called during garbage collection. triggers an exception # if the connection wasn't already closed diff --git a/tests/test_connector.py b/tests/test_connector.py index 2065adf7414..d146fb4ee51 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1481,7 +1481,19 @@ async def test_tcp_connector_ctor() -> None: assert conn.family == 0 -async def test_tcp_connector_ctor_fingerprint_valid(loop) -> None: +async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.TCPConnector() + assert conn.allowed_protocol_schema_set == {"", "tcp", "http", "https", "ws", "wss"} + + +async def test_invalid_ssl_param() -> None: + with pytest.raises(TypeError): + aiohttp.TCPConnector(ssl=object()) # type: ignore[arg-type] + + +async def test_tcp_connector_ctor_fingerprint_valid( + loop: asyncio.AbstractEventLoop, +) -> None: valid = aiohttp.Fingerprint(hashlib.sha256(b"foo").digest()) conn = aiohttp.TCPConnector(ssl=valid, loop=loop) assert conn._ssl is valid @@ -1639,8 +1651,23 @@ async def test_ctor_with_default_loop(loop) -> None: assert loop is conn._loop -async def test_connect_with_limit(loop, key) -> None: - proto = mock.Mock() +async def test_base_connector_allows_high_level_protocols( + loop: asyncio.AbstractEventLoop, +) -> None: + conn = aiohttp.BaseConnector() + assert conn.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + } + + +async def test_connect_with_limit( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + proto = create_mocked_conn(loop) proto.is_connected.return_value = True req = ClientRequest( @@ -2412,6 +2439,14 @@ async def handler(request): connector = aiohttp.UnixConnector(unix_sockname) assert unix_sockname == connector.path + assert connector.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + "unix", + } session = client.ClientSession(connector=connector) r = await session.get(url) @@ -2437,6 +2472,14 @@ async def handler(request): connector = aiohttp.NamedPipeConnector(pipe_name) assert pipe_name == connector.path + assert connector.allowed_protocol_schema_set == { + "", + "http", + "https", + "ws", + "wss", + "npipe", + } session = client.ClientSession(connector=connector) r = await session.get(url) From 68e84968de04c0073e784d19485e220c625bb9dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 7 Aug 2024 17:33:23 -0500 Subject: [PATCH 0287/1511] [PR #8608/c4acabc backport][3.10] Fix timer handle churn in websocket heartbeat (#8639) Co-authored-by: Sam Bull <git@sambull.org> (cherry picked from commit c4acabc836ab969e95199aa976e85c01df720a27) --- CHANGES/8608.misc.rst | 3 + aiohttp/client_ws.py | 115 ++++++++++++++++--------- aiohttp/helpers.py | 23 +++-- aiohttp/web_ws.py | 100 ++++++++++++--------- tests/test_client_ws.py | 48 +++++++++-- tests/test_client_ws_functional.py | 81 ++++++++++++++++- tests/test_web_websocket_functional.py | 59 ++++++++++++- 7 files changed, 331 insertions(+), 98 deletions(-) create mode 100644 CHANGES/8608.misc.rst diff --git a/CHANGES/8608.misc.rst b/CHANGES/8608.misc.rst new file mode 100644 index 00000000000..76e845bf997 --- /dev/null +++ b/CHANGES/8608.misc.rst @@ -0,0 +1,3 @@ +Improved websocket performance when messages are sent or received frequently -- by :user:`bdraco`. + +The WebSocket heartbeat scheduling algorithm was improved to reduce the ``asyncio`` scheduling overhead by decreasing the number of ``asyncio.TimerHandle`` creations and cancellations. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index c1a2c4641ba..516ad586f70 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -6,7 +6,7 @@ from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse -from .helpers import call_later, set_result +from .helpers import calculate_timeout_when, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -62,6 +62,7 @@ def __init__( self._autoping = autoping self._heartbeat = heartbeat self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + self._heartbeat_when: float = 0.0 if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 self._pong_response_cb: Optional[asyncio.TimerHandle] = None @@ -75,52 +76,64 @@ def __init__( self._reset_heartbeat() def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - + self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None - if self._heartbeat is not None: - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=( - self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5 - ), - ) + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + loop = self._loop + assert loop is not None + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=( - self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5 - ), + self._heartbeat_cb = None + loop = self._loop + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat ) + return + + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] + + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) def _pong_not_received(self) -> None: if not self._closed: - self._closed = True + self._set_closed() self._close_code = WSCloseCode.ABNORMAL_CLOSURE self._exception = ServerTimeoutError() self._response.close() @@ -129,6 +142,22 @@ def _pong_not_received(self) -> None: WSMessage(WSMsgType.ERROR, self._exception, None) ) + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + + def _set_closing(self) -> None: + """Set the connection to closing. + + Cancel any heartbeat timers and set the closing flag. + """ + self._closing = True + self._cancel_heartbeat() + @property def closed(self) -> bool: return self._closed @@ -193,13 +222,12 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo if self._waiting and not self._closing: assert self._loop is not None self._close_wait = self._loop.create_future() - self._closing = True + self._set_closing() self._reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._close_wait if not self._closed: - self._cancel_heartbeat() - self._closed = True + self._set_closed() try: await self._writer.close(code, message) except asyncio.CancelledError: @@ -266,7 +294,8 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.CLOSED, None, None) except ClientError: - self._closed = True + # Likely ServerDisconnectedError when connection is lost + self._set_closed() self._close_code = WSCloseCode.ABNORMAL_CLOSURE return WS_CLOSED_MESSAGE except WebSocketError as exc: @@ -275,18 +304,18 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True + self._set_closing() self._close_code = WSCloseCode.ABNORMAL_CLOSURE await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type is WSMsgType.CLOSE: - self._closing = True + self._set_closing() self._close_code = msg.data if not self._closed and self._autoclose: await self.close() elif msg.type is WSMsgType.CLOSING: - self._closing = True + self._set_closing() elif msg.type is WSMsgType.PING and self._autoping: await self.pong(msg.data) continue diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index b3cc1b6b6e6..437c871e8f7 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -586,12 +586,23 @@ def call_later( loop: asyncio.AbstractEventLoop, timeout_ceil_threshold: float = 5, ) -> Optional[asyncio.TimerHandle]: - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout > timeout_ceil_threshold: - when = ceil(when) - return loop.call_at(when, cb) - return None + if timeout is None or timeout <= 0: + return None + now = loop.time() + when = calculate_timeout_when(now, timeout, timeout_ceil_threshold) + return loop.call_at(when, cb) + + +def calculate_timeout_when( + loop_time: float, + timeout: float, + timeout_ceiling_threshold: float, +) -> float: + """Calculate when to execute a timeout.""" + when = loop_time + timeout + if timeout > timeout_ceiling_threshold: + return ceil(when) + return when class TimeoutHandle: diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index b74bfd688c9..9f71d147997 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_exception, set_result +from .helpers import calculate_timeout_when, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -89,6 +89,7 @@ def __init__( self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat + self._heartbeat_when = 0.0 self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 @@ -97,57 +98,76 @@ def __init__( self._max_msg_size = max_msg_size def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - + self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None - if self._heartbeat is not None: - assert self._loop is not None - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=( - self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5 - ), - ) + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + loop = self._loop + assert loop is not None + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - assert self._loop is not None - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=( - self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5 - ), + self._heartbeat_cb = None + loop = self._loop + assert loop is not None and self._writer is not None + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat ) + return + + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] + + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._closed = True + self._set_closed() self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) self._exception = asyncio.TimeoutError() + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: # make pre-check to don't hide it by do_handshake() exceptions if self._payload_writer is not None: @@ -387,7 +407,7 @@ async def close( if self._closed: return False - self._closed = True + self._set_closed() try: await self._writer.close(code, message) writer = self._payload_writer @@ -431,6 +451,7 @@ def _set_closing(self, code: WSCloseCode) -> None: """Set the close code and mark the connection as closing.""" self._closing = True self._close_code = code + self._cancel_heartbeat() def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" @@ -543,5 +564,6 @@ def _cancel(self, exc: BaseException) -> None: # web_protocol calls this from connection_lost # or when the server is shutting down. self._closing = True + self._cancel_heartbeat() if self._reader is not None: set_exception(self._reader, exc) diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index ebc9d910c1a..a790fba43ec 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -9,6 +9,7 @@ import aiohttp from aiohttp import client, hdrs +from aiohttp.client_exceptions import ServerDisconnectedError from aiohttp.http import WS_KEY from aiohttp.streams import EofStream from aiohttp.test_utils import make_mocked_coro @@ -404,21 +405,56 @@ async def test_close_eofstream(loop, ws_key, key_data) -> None: await session.close() -async def test_close_exc(loop, ws_key, key_data) -> None: - resp = mock.Mock() - resp.status = 101 - resp.headers = { +async def test_close_connection_lost( + loop: asyncio.AbstractEventLoop, ws_key: bytes, key_data: bytes +) -> None: + """Test the websocket client handles the connection being closed out from under it.""" + mresp = mock.Mock(spec_set=client.ClientResponse) + mresp.status = 101 + mresp.headers = { hdrs.UPGRADE: "websocket", hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } - resp.connection.protocol.read_timeout = None + mresp.connection.protocol.read_timeout = None + with mock.patch("aiohttp.client.WebSocketWriter"), mock.patch( + "aiohttp.client.os" + ) as m_os, mock.patch("aiohttp.client.ClientSession.request") as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(mresp) + + session = aiohttp.ClientSession() + resp = await session.ws_connect("http://test.org") + assert not resp.closed + + exc = ServerDisconnectedError() + resp._reader.set_exception(exc) + + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert resp.closed + + await session.close() + + +async def test_close_exc( + loop: asyncio.AbstractEventLoop, ws_key: bytes, key_data: bytes +) -> None: + mresp = mock.Mock() + mresp.status = 101 + mresp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + mresp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() - m_req.return_value.set_result(resp) + m_req.return_value.set_result(mresp) writer = mock.Mock() WebSocketWriter.return_value = writer writer.close = make_mocked_coro() diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index dc474f96c39..5abaf0fefbf 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,6 @@ import asyncio import sys -from typing import Any +from typing import Any, NoReturn import pytest @@ -599,7 +599,8 @@ async def handler(request): assert ping_received -async def test_heartbeat_no_pong(aiohttp_client) -> None: +async def test_heartbeat_no_pong(aiohttp_client: AiohttpClient) -> None: + """Test that the connection is closed if no pong is received without sending messages.""" ping_received = False async def handler(request): @@ -624,7 +625,81 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE -async def test_heartbeat_no_pong_concurrent_receive(aiohttp_client: Any) -> None: +async def test_heartbeat_no_pong_after_receive_many_messages( + aiohttp_client: AiohttpClient, +) -> None: + """Test that the connection is closed if no pong is received after receiving many messages.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + for _ in range(5): + await ws.send_str("test") + await asyncio.sleep(0.05) + for _ in range(5): + await ws.send_str("test") + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + + for _ in range(10): + test_msg = await resp.receive() + assert test_msg.data == "test" + # Connection should be closed roughly after 1.5x heartbeat. + + await asyncio.sleep(0.2) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong_after_send_many_messages( + aiohttp_client: AiohttpClient, +) -> None: + """Test that the connection is closed if no pong is received after sending many messages.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + for _ in range(10): + msg = await ws.receive() + assert msg.data == "test" + assert msg.type is aiohttp.WSMsgType.TEXT + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + + for _ in range(5): + await resp.send_str("test") + await asyncio.sleep(0.05) + for _ in range(5): + await resp.send_str("test") + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong_concurrent_receive( + aiohttp_client: AiohttpClient, +) -> None: ping_received = False async def handler(request): diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index ce338cdf92d..15ef33e3648 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -722,7 +722,64 @@ async def handler(request): await ws.close() -async def test_server_ws_async_for(loop, aiohttp_server) -> None: +async def test_heartbeat_no_pong_send_many_messages( + loop: Any, aiohttp_client: Any +) -> None: + """Test no pong after sending many messages.""" + + async def handler(request): + ws = web.WebSocketResponse(heartbeat=0.05) + await ws.prepare(request) + for _ in range(10): + await ws.send_str("test") + + await ws.receive() + return ws + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + for _ in range(10): + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert msg.data == "test" + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + await ws.close() + + +async def test_heartbeat_no_pong_receive_many_messages( + loop: Any, aiohttp_client: Any +) -> None: + """Test no pong after receiving many messages.""" + + async def handler(request): + ws = web.WebSocketResponse(heartbeat=0.05) + await ws.prepare(request) + for _ in range(10): + server_msg = await ws.receive() + assert server_msg.type is aiohttp.WSMsgType.TEXT + + await ws.receive() + return ws + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + for _ in range(10): + await ws.send_str("test") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + await ws.close() + + +async def test_server_ws_async_for(loop: Any, aiohttp_server: Any) -> None: closed = loop.create_future() async def handler(request): From 13ef3c1d1a6a9d2a15acf72034f6107f0ae74a28 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 7 Aug 2024 17:33:44 -0500 Subject: [PATCH 0288/1511] [PR #8608/c4acabc backport][3.11] Fix timer handle churn in websocket heartbeat (#8640) Co-authored-by: Sam Bull <git@sambull.org> (cherry picked from commit c4acabc836ab969e95199aa976e85c01df720a27) --- CHANGES/8608.misc.rst | 3 + aiohttp/client_ws.py | 115 ++++++++++++++++--------- aiohttp/helpers.py | 23 +++-- aiohttp/web_ws.py | 100 ++++++++++++--------- tests/test_client_ws.py | 48 +++++++++-- tests/test_client_ws_functional.py | 81 ++++++++++++++++- tests/test_web_websocket_functional.py | 59 ++++++++++++- 7 files changed, 331 insertions(+), 98 deletions(-) create mode 100644 CHANGES/8608.misc.rst diff --git a/CHANGES/8608.misc.rst b/CHANGES/8608.misc.rst new file mode 100644 index 00000000000..76e845bf997 --- /dev/null +++ b/CHANGES/8608.misc.rst @@ -0,0 +1,3 @@ +Improved websocket performance when messages are sent or received frequently -- by :user:`bdraco`. + +The WebSocket heartbeat scheduling algorithm was improved to reduce the ``asyncio`` scheduling overhead by decreasing the number of ``asyncio.TimerHandle`` creations and cancellations. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index c1a2c4641ba..516ad586f70 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -6,7 +6,7 @@ from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse -from .helpers import call_later, set_result +from .helpers import calculate_timeout_when, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -62,6 +62,7 @@ def __init__( self._autoping = autoping self._heartbeat = heartbeat self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + self._heartbeat_when: float = 0.0 if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 self._pong_response_cb: Optional[asyncio.TimerHandle] = None @@ -75,52 +76,64 @@ def __init__( self._reset_heartbeat() def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - + self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None - if self._heartbeat is not None: - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=( - self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5 - ), - ) + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + loop = self._loop + assert loop is not None + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=( - self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5 - ), + self._heartbeat_cb = None + loop = self._loop + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat ) + return + + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] + + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) def _pong_not_received(self) -> None: if not self._closed: - self._closed = True + self._set_closed() self._close_code = WSCloseCode.ABNORMAL_CLOSURE self._exception = ServerTimeoutError() self._response.close() @@ -129,6 +142,22 @@ def _pong_not_received(self) -> None: WSMessage(WSMsgType.ERROR, self._exception, None) ) + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + + def _set_closing(self) -> None: + """Set the connection to closing. + + Cancel any heartbeat timers and set the closing flag. + """ + self._closing = True + self._cancel_heartbeat() + @property def closed(self) -> bool: return self._closed @@ -193,13 +222,12 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo if self._waiting and not self._closing: assert self._loop is not None self._close_wait = self._loop.create_future() - self._closing = True + self._set_closing() self._reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._close_wait if not self._closed: - self._cancel_heartbeat() - self._closed = True + self._set_closed() try: await self._writer.close(code, message) except asyncio.CancelledError: @@ -266,7 +294,8 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.CLOSED, None, None) except ClientError: - self._closed = True + # Likely ServerDisconnectedError when connection is lost + self._set_closed() self._close_code = WSCloseCode.ABNORMAL_CLOSURE return WS_CLOSED_MESSAGE except WebSocketError as exc: @@ -275,18 +304,18 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True + self._set_closing() self._close_code = WSCloseCode.ABNORMAL_CLOSURE await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type is WSMsgType.CLOSE: - self._closing = True + self._set_closing() self._close_code = msg.data if not self._closed and self._autoclose: await self.close() elif msg.type is WSMsgType.CLOSING: - self._closing = True + self._set_closing() elif msg.type is WSMsgType.PING and self._autoping: await self.pong(msg.data) continue diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index b3cc1b6b6e6..437c871e8f7 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -586,12 +586,23 @@ def call_later( loop: asyncio.AbstractEventLoop, timeout_ceil_threshold: float = 5, ) -> Optional[asyncio.TimerHandle]: - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout > timeout_ceil_threshold: - when = ceil(when) - return loop.call_at(when, cb) - return None + if timeout is None or timeout <= 0: + return None + now = loop.time() + when = calculate_timeout_when(now, timeout, timeout_ceil_threshold) + return loop.call_at(when, cb) + + +def calculate_timeout_when( + loop_time: float, + timeout: float, + timeout_ceiling_threshold: float, +) -> float: + """Calculate when to execute a timeout.""" + when = loop_time + timeout + if timeout > timeout_ceiling_threshold: + return ceil(when) + return when class TimeoutHandle: diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index b74bfd688c9..9f71d147997 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_exception, set_result +from .helpers import calculate_timeout_when, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -89,6 +89,7 @@ def __init__( self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat + self._heartbeat_when = 0.0 self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 @@ -97,57 +98,76 @@ def __init__( self._max_msg_size = max_msg_size def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - + self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None - if self._heartbeat is not None: - assert self._loop is not None - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=( - self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5 - ), - ) + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + loop = self._loop + assert loop is not None + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - assert self._loop is not None - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=( - self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5 - ), + self._heartbeat_cb = None + loop = self._loop + assert loop is not None and self._writer is not None + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat ) + return + + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] + + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._closed = True + self._set_closed() self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) self._exception = asyncio.TimeoutError() + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: # make pre-check to don't hide it by do_handshake() exceptions if self._payload_writer is not None: @@ -387,7 +407,7 @@ async def close( if self._closed: return False - self._closed = True + self._set_closed() try: await self._writer.close(code, message) writer = self._payload_writer @@ -431,6 +451,7 @@ def _set_closing(self, code: WSCloseCode) -> None: """Set the close code and mark the connection as closing.""" self._closing = True self._close_code = code + self._cancel_heartbeat() def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" @@ -543,5 +564,6 @@ def _cancel(self, exc: BaseException) -> None: # web_protocol calls this from connection_lost # or when the server is shutting down. self._closing = True + self._cancel_heartbeat() if self._reader is not None: set_exception(self._reader, exc) diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index ebc9d910c1a..a790fba43ec 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -9,6 +9,7 @@ import aiohttp from aiohttp import client, hdrs +from aiohttp.client_exceptions import ServerDisconnectedError from aiohttp.http import WS_KEY from aiohttp.streams import EofStream from aiohttp.test_utils import make_mocked_coro @@ -404,21 +405,56 @@ async def test_close_eofstream(loop, ws_key, key_data) -> None: await session.close() -async def test_close_exc(loop, ws_key, key_data) -> None: - resp = mock.Mock() - resp.status = 101 - resp.headers = { +async def test_close_connection_lost( + loop: asyncio.AbstractEventLoop, ws_key: bytes, key_data: bytes +) -> None: + """Test the websocket client handles the connection being closed out from under it.""" + mresp = mock.Mock(spec_set=client.ClientResponse) + mresp.status = 101 + mresp.headers = { hdrs.UPGRADE: "websocket", hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } - resp.connection.protocol.read_timeout = None + mresp.connection.protocol.read_timeout = None + with mock.patch("aiohttp.client.WebSocketWriter"), mock.patch( + "aiohttp.client.os" + ) as m_os, mock.patch("aiohttp.client.ClientSession.request") as m_req: + m_os.urandom.return_value = key_data + m_req.return_value = loop.create_future() + m_req.return_value.set_result(mresp) + + session = aiohttp.ClientSession() + resp = await session.ws_connect("http://test.org") + assert not resp.closed + + exc = ServerDisconnectedError() + resp._reader.set_exception(exc) + + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert resp.closed + + await session.close() + + +async def test_close_exc( + loop: asyncio.AbstractEventLoop, ws_key: bytes, key_data: bytes +) -> None: + mresp = mock.Mock() + mresp.status = 101 + mresp.headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, + } + mresp.connection.protocol.read_timeout = None with mock.patch("aiohttp.client.WebSocketWriter") as WebSocketWriter: with mock.patch("aiohttp.client.os") as m_os: with mock.patch("aiohttp.client.ClientSession.request") as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() - m_req.return_value.set_result(resp) + m_req.return_value.set_result(mresp) writer = mock.Mock() WebSocketWriter.return_value = writer writer.close = make_mocked_coro() diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index dc474f96c39..5abaf0fefbf 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,6 @@ import asyncio import sys -from typing import Any +from typing import Any, NoReturn import pytest @@ -599,7 +599,8 @@ async def handler(request): assert ping_received -async def test_heartbeat_no_pong(aiohttp_client) -> None: +async def test_heartbeat_no_pong(aiohttp_client: AiohttpClient) -> None: + """Test that the connection is closed if no pong is received without sending messages.""" ping_received = False async def handler(request): @@ -624,7 +625,81 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE -async def test_heartbeat_no_pong_concurrent_receive(aiohttp_client: Any) -> None: +async def test_heartbeat_no_pong_after_receive_many_messages( + aiohttp_client: AiohttpClient, +) -> None: + """Test that the connection is closed if no pong is received after receiving many messages.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + for _ in range(5): + await ws.send_str("test") + await asyncio.sleep(0.05) + for _ in range(5): + await ws.send_str("test") + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + + for _ in range(10): + test_msg = await resp.receive() + assert test_msg.data == "test" + # Connection should be closed roughly after 1.5x heartbeat. + + await asyncio.sleep(0.2) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong_after_send_many_messages( + aiohttp_client: AiohttpClient, +) -> None: + """Test that the connection is closed if no pong is received after sending many messages.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + for _ in range(10): + msg = await ws.receive() + assert msg.data == "test" + assert msg.type is aiohttp.WSMsgType.TEXT + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + + for _ in range(5): + await resp.send_str("test") + await asyncio.sleep(0.05) + for _ in range(5): + await resp.send_str("test") + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + +async def test_heartbeat_no_pong_concurrent_receive( + aiohttp_client: AiohttpClient, +) -> None: ping_received = False async def handler(request): diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index ce338cdf92d..15ef33e3648 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -722,7 +722,64 @@ async def handler(request): await ws.close() -async def test_server_ws_async_for(loop, aiohttp_server) -> None: +async def test_heartbeat_no_pong_send_many_messages( + loop: Any, aiohttp_client: Any +) -> None: + """Test no pong after sending many messages.""" + + async def handler(request): + ws = web.WebSocketResponse(heartbeat=0.05) + await ws.prepare(request) + for _ in range(10): + await ws.send_str("test") + + await ws.receive() + return ws + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + for _ in range(10): + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert msg.data == "test" + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + await ws.close() + + +async def test_heartbeat_no_pong_receive_many_messages( + loop: Any, aiohttp_client: Any +) -> None: + """Test no pong after receiving many messages.""" + + async def handler(request): + ws = web.WebSocketResponse(heartbeat=0.05) + await ws.prepare(request) + for _ in range(10): + server_msg = await ws.receive() + assert server_msg.type is aiohttp.WSMsgType.TEXT + + await ws.receive() + return ws + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + for _ in range(10): + await ws.send_str("test") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + await ws.close() + + +async def test_server_ws_async_for(loop: Any, aiohttp_server: Any) -> None: closed = loop.create_future() async def handler(request): From a42aa3c87737060ae77fd104938254b183106373 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 10:52:38 +0000 Subject: [PATCH 0289/1511] Bump aiohappyeyeballs from 2.3.4 to 2.3.5 (#8644) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.3.4 to 2.3.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.3.5 (2024-08-07)</h1> <h2>Documentation</h2> <ul> <li>docs: add link to Happy Eyeballs explanation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/73">#73</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/077710c150b6c762ffe408e0ad418c506a2d6f31"><code>077710c</code></a>)</li> </ul> <h2>Fix</h2> <ul> <li>fix: remove upper bound on python requirement (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/74">#74</a>)</li> </ul> <p>Co-authored-by: J. Nick Koston <<a href="mailto:nick@koston.org">nick@koston.org</a>> (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/0de1e534fc5b7526e11bf203ab09b95b13f3070b"><code>0de1e53</code></a>)</p> <ul> <li> <p>fix: preserve errno if all exceptions have the same errno (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/77">#77</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/7bbb2bf0feb3994953a52a1d606e682acad49cb8"><code>7bbb2bf</code></a>)</p> </li> <li> <p>fix: adjust license classifier to better reflect license terms (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/78">#78</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/56e7ba612c5029364bb960b07022a2b720f0a967"><code>56e7ba6</code></a>)</p> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.3.5 (2024-08-07)</h2> <h3>Fix</h3> <ul> <li>Remove upper bound on python requirement (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/74">#74</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/0de1e534fc5b7526e11bf203ab09b95b13f3070b"><code>0de1e53</code></a>)</li> <li>Preserve errno if all exceptions have the same errno (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/77">#77</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/7bbb2bf0feb3994953a52a1d606e682acad49cb8"><code>7bbb2bf</code></a>)</li> <li>Adjust license classifier to better reflect license terms (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/78">#78</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/56e7ba612c5029364bb960b07022a2b720f0a967"><code>56e7ba6</code></a>)</li> </ul> <h3>Documentation</h3> <ul> <li>Add link to happy eyeballs explanation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/73">#73</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/077710c150b6c762ffe408e0ad418c506a2d6f31"><code>077710c</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/01595bbda3380154cc4e72702a1f82502a15940a"><code>01595bb</code></a> 2.3.5</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/0de1e534fc5b7526e11bf203ab09b95b13f3070b"><code>0de1e53</code></a> fix: remove upper bound on python requirement (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/74">#74</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/7bbb2bf0feb3994953a52a1d606e682acad49cb8"><code>7bbb2bf</code></a> fix: preserve errno if all exceptions have the same errno (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/77">#77</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/56e7ba612c5029364bb960b07022a2b720f0a967"><code>56e7ba6</code></a> fix: adjust license classifier to better reflect license terms (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/78">#78</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/ef944493d46655fe4b78dc519a359a06305cc9b9"><code>ef94449</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/75">#75</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/077710c150b6c762ffe408e0ad418c506a2d6f31"><code>077710c</code></a> docs: add link to Happy Eyeballs explanation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/73">#73</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/d69d8e05612b6b659c80fd1089075889a10e1948"><code>d69d8e0</code></a> chore: remove redundant asyncio import (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/72">#72</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.3.4...v2.3.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.4&new-version=2.3.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 14 ++++++++++---- requirements/dev.txt | 14 ++++++++++---- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 23 insertions(+), 11 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index dbfeaf48ab8..797d8fe353f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.4 +aiohappyeyeballs==2.3.5 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2e543bfff5b..3eefb01e60d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.4 +aiohappyeyeballs==2.3.5 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in @@ -75,7 +75,9 @@ exceptiongroup==1.1.2 filelock==3.3.2 # via virtualenv freezegun==1.5.1 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in frozenlist==1.4.1 # via # -r requirements/runtime-deps.in @@ -175,7 +177,9 @@ pytest==8.3.2 pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in python-dateutil==2.8.2 # via freezegun python-on-whales==0.72.0 @@ -247,7 +251,9 @@ towncrier==23.11.0 tqdm==4.62.3 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in typer==0.6.1 # via python-on-whales typing-extensions==4.11.0 diff --git a/requirements/dev.txt b/requirements/dev.txt index a924fa6cf23..19b6a90251e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.4 +aiohappyeyeballs==2.3.5 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in @@ -71,7 +71,9 @@ exceptiongroup==1.1.2 filelock==3.12.2 # via virtualenv freezegun==1.5.1 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in frozenlist==1.4.1 # via # -r requirements/runtime-deps.in @@ -166,7 +168,9 @@ pytest==8.3.2 pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in python-dateutil==2.8.2 # via freezegun python-on-whales==0.72.0 @@ -233,7 +237,9 @@ towncrier==23.11.0 tqdm==4.65.0 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in typer==0.9.0 # via python-on-whales typing-extensions==4.11.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index d9f33f4cb84..a96f1981f81 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.4 +aiohappyeyeballs==2.3.5 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index a093c71ebbf..d5efe1e2218 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.4 +aiohappyeyeballs==2.3.5 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 2ef14a6631f6e4c715fee5403e0a81b5e8d5fc83 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 09:55:01 -0500 Subject: [PATCH 0290/1511] [PR #8641/0a88bab backport][3.10] Fix WebSocket ping tasks being prematurely garbage collected (#8646) --- CHANGES/8641.bugfix.rst | 3 ++ aiohttp/client_ws.py | 25 ++++++++++++--- aiohttp/web_ws.py | 25 ++++++++++++--- tests/test_client_ws_functional.py | 50 ++++++++++++++++++++++++++++-- 4 files changed, 91 insertions(+), 12 deletions(-) create mode 100644 CHANGES/8641.bugfix.rst diff --git a/CHANGES/8641.bugfix.rst b/CHANGES/8641.bugfix.rst new file mode 100644 index 00000000000..9c85ac04419 --- /dev/null +++ b/CHANGES/8641.bugfix.rst @@ -0,0 +1,3 @@ +Fixed WebSocket ping tasks being prematurely garbage collected -- by :user:`bdraco`. + +There was a small risk that WebSocket ping tasks would be prematurely garbage collected because the event loop only holds a weak reference to the task. The garbage collection risk has been fixed by holding a strong reference to the task. Additionally, the task is now scheduled eagerly with Python 3.12+ to increase the chance it can be completed immediately and avoid having to hold any references to the task. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 516ad586f70..247f62c758e 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -72,6 +72,7 @@ def __init__( self._exception: Optional[BaseException] = None self._compress = compress self._client_notakeover = client_notakeover + self._ping_task: Optional[asyncio.Task[None]] = None self._reset_heartbeat() @@ -80,6 +81,9 @@ def _cancel_heartbeat(self) -> None: if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None def _cancel_pong_response_cb(self) -> None: if self._pong_response_cb is not None: @@ -118,11 +122,6 @@ def _send_heartbeat(self) -> None: ) return - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] - conn = self._conn timeout_ceil_threshold = ( conn._connector._timeout_ceil_threshold if conn is not None else 5 @@ -131,6 +130,22 @@ def _send_heartbeat(self) -> None: self._cancel_pong_response_cb() self._pong_response_cb = loop.call_at(when, self._pong_not_received) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + self._ping_task = None + def _pong_not_received(self) -> None: if not self._closed: self._set_closed() diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 9f71d147997..ba3332715a6 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -96,12 +96,16 @@ def __init__( self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._compress = compress self._max_msg_size = max_msg_size + self._ping_task: Optional[asyncio.Task[None]] = None def _cancel_heartbeat(self) -> None: self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None def _cancel_pong_response_cb(self) -> None: if self._pong_response_cb is not None: @@ -141,11 +145,6 @@ def _send_heartbeat(self) -> None: ) return - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] - req = self._req timeout_ceil_threshold = ( req._protocol._timeout_ceil_threshold if req is not None else 5 @@ -154,6 +153,22 @@ def _send_heartbeat(self) -> None: self._cancel_pong_response_cb() self._pong_response_cb = loop.call_at(when, self._pong_not_received) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + self._ping_task = None + def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: self._set_closed() diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 5abaf0fefbf..907ae232e9a 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,7 @@ import asyncio import sys from typing import Any, NoReturn +from unittest import mock import pytest @@ -727,8 +728,53 @@ async def handler(request): assert isinstance(msg.data, ServerTimeoutError) -async def test_send_recv_compress(aiohttp_client: Any) -> None: - async def handler(request): +async def test_close_websocket_while_ping_inflight( + aiohttp_client: AiohttpClient, +) -> None: + """Test closing the websocket while a ping is in-flight.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.BINARY + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + await resp.send_bytes(b"ask") + + cancelled = False + ping_stated = False + + async def delayed_ping() -> None: + nonlocal cancelled, ping_stated + ping_stated = True + try: + await asyncio.sleep(1) + except asyncio.CancelledError: + cancelled = True + raise + + with mock.patch.object(resp._writer, "ping", delayed_ping): + await asyncio.sleep(0.1) + + await resp.close() + await asyncio.sleep(0) + assert ping_stated is True + assert cancelled is True + + +async def test_send_recv_compress(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) From 80969d2c061b1661505c11aa30c8ab661fffcb20 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 10:01:49 -0500 Subject: [PATCH 0291/1511] [PR #8641/0a88bab backport][3.11] Fix WebSocket ping tasks being prematurely garbage collected (#8647) --- CHANGES/8641.bugfix.rst | 3 ++ aiohttp/client_ws.py | 25 ++++++++++++--- aiohttp/web_ws.py | 25 ++++++++++++--- tests/test_client_ws_functional.py | 50 ++++++++++++++++++++++++++++-- 4 files changed, 91 insertions(+), 12 deletions(-) create mode 100644 CHANGES/8641.bugfix.rst diff --git a/CHANGES/8641.bugfix.rst b/CHANGES/8641.bugfix.rst new file mode 100644 index 00000000000..9c85ac04419 --- /dev/null +++ b/CHANGES/8641.bugfix.rst @@ -0,0 +1,3 @@ +Fixed WebSocket ping tasks being prematurely garbage collected -- by :user:`bdraco`. + +There was a small risk that WebSocket ping tasks would be prematurely garbage collected because the event loop only holds a weak reference to the task. The garbage collection risk has been fixed by holding a strong reference to the task. Additionally, the task is now scheduled eagerly with Python 3.12+ to increase the chance it can be completed immediately and avoid having to hold any references to the task. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 516ad586f70..247f62c758e 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -72,6 +72,7 @@ def __init__( self._exception: Optional[BaseException] = None self._compress = compress self._client_notakeover = client_notakeover + self._ping_task: Optional[asyncio.Task[None]] = None self._reset_heartbeat() @@ -80,6 +81,9 @@ def _cancel_heartbeat(self) -> None: if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None def _cancel_pong_response_cb(self) -> None: if self._pong_response_cb is not None: @@ -118,11 +122,6 @@ def _send_heartbeat(self) -> None: ) return - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] - conn = self._conn timeout_ceil_threshold = ( conn._connector._timeout_ceil_threshold if conn is not None else 5 @@ -131,6 +130,22 @@ def _send_heartbeat(self) -> None: self._cancel_pong_response_cb() self._pong_response_cb = loop.call_at(when, self._pong_not_received) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + self._ping_task = None + def _pong_not_received(self) -> None: if not self._closed: self._set_closed() diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 9f71d147997..ba3332715a6 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -96,12 +96,16 @@ def __init__( self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._compress = compress self._max_msg_size = max_msg_size + self._ping_task: Optional[asyncio.Task[None]] = None def _cancel_heartbeat(self) -> None: self._cancel_pong_response_cb() if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None def _cancel_pong_response_cb(self) -> None: if self._pong_response_cb is not None: @@ -141,11 +145,6 @@ def _send_heartbeat(self) -> None: ) return - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - loop.create_task(self._writer.ping()) # type: ignore[unused-awaitable] - req = self._req timeout_ceil_threshold = ( req._protocol._timeout_ceil_threshold if req is not None else 5 @@ -154,6 +153,22 @@ def _send_heartbeat(self) -> None: self._cancel_pong_response_cb() self._pong_response_cb = loop.call_at(when, self._pong_not_received) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + self._ping_task = None + def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: self._set_closed() diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 5abaf0fefbf..907ae232e9a 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,7 @@ import asyncio import sys from typing import Any, NoReturn +from unittest import mock import pytest @@ -727,8 +728,53 @@ async def handler(request): assert isinstance(msg.data, ServerTimeoutError) -async def test_send_recv_compress(aiohttp_client: Any) -> None: - async def handler(request): +async def test_close_websocket_while_ping_inflight( + aiohttp_client: AiohttpClient, +) -> None: + """Test closing the websocket while a ping is in-flight.""" + ping_received = False + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_received + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.BINARY + msg = await ws.receive() + ping_received = msg.type is aiohttp.WSMsgType.PING + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + await resp.send_bytes(b"ask") + + cancelled = False + ping_stated = False + + async def delayed_ping() -> None: + nonlocal cancelled, ping_stated + ping_stated = True + try: + await asyncio.sleep(1) + except asyncio.CancelledError: + cancelled = True + raise + + with mock.patch.object(resp._writer, "ping", delayed_ping): + await asyncio.sleep(0.1) + + await resp.close() + await asyncio.sleep(0) + assert ping_stated is True + assert cancelled is True + + +async def test_send_recv_compress(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) From 1f92213c3e0be8111a55391a86d03710c518f352 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:39:30 +0000 Subject: [PATCH 0292/1511] [PR #8642/e4942771 backport][3.10] Fix response to circular symlinks with Python v3.13 (#8648) Co-authored-by: Steve Repsher <steverep@users.noreply.github.com> --- CHANGES/8565.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 4 +++- aiohttp/web_urldispatcher.py | 9 +++++---- 3 files changed, 9 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8565.bugfix.rst diff --git a/CHANGES/8565.bugfix.rst b/CHANGES/8565.bugfix.rst new file mode 100644 index 00000000000..35e7c4dc71a --- /dev/null +++ b/CHANGES/8565.bugfix.rst @@ -0,0 +1 @@ +Fixed server checks for circular symbolic links to be compatible with Python 3.13 -- by :user:`steverep`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7fc5b3d787f..d8bbbe08993 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -191,7 +191,9 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter file_path, st, file_encoding = await loop.run_in_executor( None, self._get_file_path_stat_encoding, accept_encoding ) - except FileNotFoundError: + except OSError: + # Most likely to be FileNotFoundError or OSError for circular + # symlinks in python >= 3.13, so respond with 404. self.set_status(HTTPNotFound.status_code) return await super().prepare(request) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 688946626fd..558fb7d0c9b 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -80,9 +80,9 @@ BaseDict = dict CIRCULAR_SYMLINK_ERROR = ( - OSError + (OSError,) if sys.version_info < (3, 10) and sys.platform.startswith("win32") - else RuntimeError + else (RuntimeError,) if sys.version_info < (3, 13) else () ) YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) @@ -694,8 +694,9 @@ def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: else: file_path = unresolved_path.resolve() file_path.relative_to(self._directory) - except (ValueError, CIRCULAR_SYMLINK_ERROR) as error: - # ValueError for relative check; RuntimeError for circular symlink. + except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error: + # ValueError is raised for the relative check. Circular symlinks + # raise here on resolving for python < 3.13. raise HTTPNotFound() from error # if path is a directory, return the contents if permitted. Note the From 61a0c77ec8ef62d551dff6623a50e4036a6d737e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:45:26 +0000 Subject: [PATCH 0293/1511] [PR #8642/e4942771 backport][3.11] Fix response to circular symlinks with Python v3.13 (#8649) Co-authored-by: Steve Repsher <steverep@users.noreply.github.com> --- CHANGES/8565.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 4 +++- aiohttp/web_urldispatcher.py | 9 +++++---- 3 files changed, 9 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8565.bugfix.rst diff --git a/CHANGES/8565.bugfix.rst b/CHANGES/8565.bugfix.rst new file mode 100644 index 00000000000..35e7c4dc71a --- /dev/null +++ b/CHANGES/8565.bugfix.rst @@ -0,0 +1 @@ +Fixed server checks for circular symbolic links to be compatible with Python 3.13 -- by :user:`steverep`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7fc5b3d787f..d8bbbe08993 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -191,7 +191,9 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter file_path, st, file_encoding = await loop.run_in_executor( None, self._get_file_path_stat_encoding, accept_encoding ) - except FileNotFoundError: + except OSError: + # Most likely to be FileNotFoundError or OSError for circular + # symlinks in python >= 3.13, so respond with 404. self.set_status(HTTPNotFound.status_code) return await super().prepare(request) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 688946626fd..558fb7d0c9b 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -80,9 +80,9 @@ BaseDict = dict CIRCULAR_SYMLINK_ERROR = ( - OSError + (OSError,) if sys.version_info < (3, 10) and sys.platform.startswith("win32") - else RuntimeError + else (RuntimeError,) if sys.version_info < (3, 13) else () ) YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) @@ -694,8 +694,9 @@ def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: else: file_path = unresolved_path.resolve() file_path.relative_to(self._directory) - except (ValueError, CIRCULAR_SYMLINK_ERROR) as error: - # ValueError for relative check; RuntimeError for circular symlink. + except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error: + # ValueError is raised for the relative check. Circular symlinks + # raise here on resolving for python < 3.13. raise HTTPNotFound() from error # if path is a directory, return the contents if permitted. Note the From 6a778061eb3d66146012ceef760c8d84b7be6cf3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 10:46:18 -0500 Subject: [PATCH 0294/1511] [PR #8636/51d872e backport][3.10] Remove Request.wait_for_disconnection() method (#8650) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8636.breaking.rst | 1 + aiohttp/web_request.py | 27 ++++++++++----------------- docs/web_reference.rst | 13 ------------- 3 files changed, 11 insertions(+), 30 deletions(-) create mode 100644 CHANGES/8636.breaking.rst diff --git a/CHANGES/8636.breaking.rst b/CHANGES/8636.breaking.rst new file mode 100644 index 00000000000..ae3d599bf7a --- /dev/null +++ b/CHANGES/8636.breaking.rst @@ -0,0 +1 @@ +Removed ``Request.wait_for_disconnection()`` which was mistakenly added briefly in 3.10.0 -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 28d9ef3d10b..a485f0dcea6 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -19,7 +19,6 @@ MutableMapping, Optional, Pattern, - Set, Tuple, Union, cast, @@ -50,7 +49,6 @@ reify, sentinel, set_exception, - set_result, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -146,7 +144,6 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): "_loop", "_transport_sslcontext", "_transport_peername", - "_disconnection_waiters", ] ) @@ -194,7 +191,6 @@ def __init__( self._task = task self._client_max_size = client_max_size self._loop = loop - self._disconnection_waiters: Set[asyncio.Future[None]] = set() transport = self._protocol.transport assert transport is not None @@ -823,21 +819,18 @@ async def _prepare_hook(self, response: StreamResponse) -> None: def _cancel(self, exc: BaseException) -> None: set_exception(self._payload, exc) - for fut in self._disconnection_waiters: - set_result(fut, None) def _finish(self) -> None: - for fut in self._disconnection_waiters: - fut.cancel() - - async def wait_for_disconnection(self) -> None: - loop = asyncio.get_event_loop() - fut = loop.create_future() # type: asyncio.Future[None] - self._disconnection_waiters.add(fut) - try: - await fut - finally: - self._disconnection_waiters.remove(fut) + if self._post is None or self.content_type != "multipart/form-data": + return + + # NOTE: Release file descriptors for the + # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` + # NOTE: instances of files sent within multipart request body + # NOTE: via HTTP POST request. + for file_name, file_field_object in self._post.items(): + if isinstance(file_field_object, FileField): + file_field_object.file.close() class Request(BaseRequest): diff --git a/docs/web_reference.rst b/docs/web_reference.rst index ddd5a3c264c..bb22cfd6369 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -510,19 +510,6 @@ and :ref:`aiohttp-web-signals` handlers. required work will be processed by :mod:`aiohttp.web` internal machinery. - .. method:: wait_for_disconnection() - - Returns when the connection that sent this request closes - - If there is no client disconnection during request handling, this - coroutine gets cancelled automatically at the end of this request being - handled. - - This can be used in handlers as a means of receiving a notification of - premature client disconnection. - - .. versionadded:: 3.10 - .. class:: Request A request used for receiving request's information by *web handler*. From 0de1c26a7ae740fbc3e2c2a32fbfbbbd1d7b26b9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 10:57:52 -0500 Subject: [PATCH 0295/1511] [PR #8636/51d872e backport][3.11] Remove Request.wait_for_disconnection() method (#8651) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8636.breaking.rst | 1 + aiohttp/web_request.py | 27 ++++++++++----------------- docs/web_reference.rst | 13 ------------- 3 files changed, 11 insertions(+), 30 deletions(-) create mode 100644 CHANGES/8636.breaking.rst diff --git a/CHANGES/8636.breaking.rst b/CHANGES/8636.breaking.rst new file mode 100644 index 00000000000..ae3d599bf7a --- /dev/null +++ b/CHANGES/8636.breaking.rst @@ -0,0 +1 @@ +Removed ``Request.wait_for_disconnection()`` which was mistakenly added briefly in 3.10.0 -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 28d9ef3d10b..a485f0dcea6 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -19,7 +19,6 @@ MutableMapping, Optional, Pattern, - Set, Tuple, Union, cast, @@ -50,7 +49,6 @@ reify, sentinel, set_exception, - set_result, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -146,7 +144,6 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): "_loop", "_transport_sslcontext", "_transport_peername", - "_disconnection_waiters", ] ) @@ -194,7 +191,6 @@ def __init__( self._task = task self._client_max_size = client_max_size self._loop = loop - self._disconnection_waiters: Set[asyncio.Future[None]] = set() transport = self._protocol.transport assert transport is not None @@ -823,21 +819,18 @@ async def _prepare_hook(self, response: StreamResponse) -> None: def _cancel(self, exc: BaseException) -> None: set_exception(self._payload, exc) - for fut in self._disconnection_waiters: - set_result(fut, None) def _finish(self) -> None: - for fut in self._disconnection_waiters: - fut.cancel() - - async def wait_for_disconnection(self) -> None: - loop = asyncio.get_event_loop() - fut = loop.create_future() # type: asyncio.Future[None] - self._disconnection_waiters.add(fut) - try: - await fut - finally: - self._disconnection_waiters.remove(fut) + if self._post is None or self.content_type != "multipart/form-data": + return + + # NOTE: Release file descriptors for the + # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` + # NOTE: instances of files sent within multipart request body + # NOTE: via HTTP POST request. + for file_name, file_field_object in self._post.items(): + if isinstance(file_field_object, FileField): + file_field_object.file.close() class Request(BaseRequest): diff --git a/docs/web_reference.rst b/docs/web_reference.rst index ddd5a3c264c..bb22cfd6369 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -510,19 +510,6 @@ and :ref:`aiohttp-web-signals` handlers. required work will be processed by :mod:`aiohttp.web` internal machinery. - .. method:: wait_for_disconnection() - - Returns when the connection that sent this request closes - - If there is no client disconnection during request handling, this - coroutine gets cancelled automatically at the end of this request being - handled. - - This can be used in handlers as a means of receiving a notification of - premature client disconnection. - - .. versionadded:: 3.10 - .. class:: Request A request used for receiving request's information by *web handler*. From ce2e9758814527589b10759a20783fb03b98339f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:36:43 +0000 Subject: [PATCH 0296/1511] [PR #8652/b0536ae6 backport][3.10] Do not follow symlinks for compressed file variants (#8653) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8652.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 5 ++++- tests/test_web_sendfile.py | 14 +++++++------- tests/test_web_urldispatcher.py | 32 ++++++++++++++++++++++++++++++++ 4 files changed, 44 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8652.bugfix.rst diff --git a/CHANGES/8652.bugfix.rst b/CHANGES/8652.bugfix.rst new file mode 100644 index 00000000000..3a1003e50ad --- /dev/null +++ b/CHANGES/8652.bugfix.rst @@ -0,0 +1 @@ +Fixed incorrectly following symlinks for compressed file variants -- by :user:`steverep`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index d8bbbe08993..0c23e375d25 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -177,7 +177,10 @@ def _get_file_path_stat_encoding( compressed_path = file_path.with_suffix(file_path.suffix + file_extension) with suppress(OSError): - return compressed_path, compressed_path.stat(), file_encoding + # Do not follow symlinks and ignore any non-regular files. + st = compressed_path.lstat() + if S_ISREG(st.st_mode): + return compressed_path, st, file_encoding # Fallback to the uncompressed file return file_path, file_path.stat(), None diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 0ba2861c391..58a46ec602c 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -18,9 +18,9 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.return_value.st_size = 1024 - gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 - gz_filepath.stat.return_value.st_mode = MOCK_MODE + gz_filepath.lstat.return_value.st_size = 1024 + gz_filepath.lstat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -40,9 +40,9 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.return_value.st_size = 1024 - gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 - gz_filepath.stat.return_value.st_mode = MOCK_MODE + gz_filepath.lstat.return_value.st_size = 1024 + gz_filepath.lstat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -90,7 +90,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.side_effect = OSError(2, "No such file or directory") + gz_filepath.lstat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index de44ea0648c..3a45b9355f5 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -520,6 +520,38 @@ async def test_access_symlink_loop( assert r.status == 404 +async def test_access_compressed_file_as_symlink( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + """Test that compressed file variants as symlinks are ignored.""" + private_file = tmp_path / "private.txt" + private_file.write_text("private info") + www_dir = tmp_path / "www" + www_dir.mkdir() + gz_link = www_dir / "file.txt.gz" + gz_link.symlink_to(f"../{private_file.name}") + + app = web.Application() + app.router.add_static("/", www_dir) + client = await aiohttp_client(app) + + # Symlink should be ignored; response reflects missing uncompressed file. + resp = await client.get(f"/{gz_link.stem}", auto_decompress=False) + assert resp.status == 404 + resp.release() + + # Again symlin is ignored, and then uncompressed is served. + txt_file = gz_link.with_suffix("") + txt_file.write_text("public data") + resp = await client.get(f"/{txt_file.name}") + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") is None + assert resp.content_type == "text/plain" + assert await resp.text() == "public data" + resp.release() + await client.close() + + async def test_access_special_resource( tmp_path_factory: pytest.TempPathFactory, aiohttp_client: AiohttpClient ) -> None: From 3d41df0e8f0473c8e553f2f0446c7a35646320ea Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:43:36 +0000 Subject: [PATCH 0297/1511] [PR #8652/b0536ae6 backport][3.11] Do not follow symlinks for compressed file variants (#8654) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8652.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 5 ++++- tests/test_web_sendfile.py | 14 +++++++------- tests/test_web_urldispatcher.py | 32 ++++++++++++++++++++++++++++++++ 4 files changed, 44 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8652.bugfix.rst diff --git a/CHANGES/8652.bugfix.rst b/CHANGES/8652.bugfix.rst new file mode 100644 index 00000000000..3a1003e50ad --- /dev/null +++ b/CHANGES/8652.bugfix.rst @@ -0,0 +1 @@ +Fixed incorrectly following symlinks for compressed file variants -- by :user:`steverep`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index d8bbbe08993..0c23e375d25 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -177,7 +177,10 @@ def _get_file_path_stat_encoding( compressed_path = file_path.with_suffix(file_path.suffix + file_extension) with suppress(OSError): - return compressed_path, compressed_path.stat(), file_encoding + # Do not follow symlinks and ignore any non-regular files. + st = compressed_path.lstat() + if S_ISREG(st.st_mode): + return compressed_path, st, file_encoding # Fallback to the uncompressed file return file_path, file_path.stat(), None diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 0ba2861c391..58a46ec602c 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -18,9 +18,9 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.return_value.st_size = 1024 - gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 - gz_filepath.stat.return_value.st_mode = MOCK_MODE + gz_filepath.lstat.return_value.st_size = 1024 + gz_filepath.lstat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -40,9 +40,9 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.return_value.st_size = 1024 - gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 - gz_filepath.stat.return_value.st_mode = MOCK_MODE + gz_filepath.lstat.return_value.st_size = 1024 + gz_filepath.lstat.return_value.st_mtime_ns = 1603733507222449291 + gz_filepath.lstat.return_value.st_mode = MOCK_MODE filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -90,7 +90,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.stat.side_effect = OSError(2, "No such file or directory") + gz_filepath.lstat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index de44ea0648c..3a45b9355f5 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -520,6 +520,38 @@ async def test_access_symlink_loop( assert r.status == 404 +async def test_access_compressed_file_as_symlink( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + """Test that compressed file variants as symlinks are ignored.""" + private_file = tmp_path / "private.txt" + private_file.write_text("private info") + www_dir = tmp_path / "www" + www_dir.mkdir() + gz_link = www_dir / "file.txt.gz" + gz_link.symlink_to(f"../{private_file.name}") + + app = web.Application() + app.router.add_static("/", www_dir) + client = await aiohttp_client(app) + + # Symlink should be ignored; response reflects missing uncompressed file. + resp = await client.get(f"/{gz_link.stem}", auto_decompress=False) + assert resp.status == 404 + resp.release() + + # Again symlin is ignored, and then uncompressed is served. + txt_file = gz_link.with_suffix("") + txt_file.write_text("public data") + resp = await client.get(f"/{txt_file.name}") + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") is None + assert resp.content_type == "text/plain" + assert await resp.text() == "public data" + resp.release() + await client.close() + + async def test_access_special_resource( tmp_path_factory: pytest.TempPathFactory, aiohttp_client: AiohttpClient ) -> None: From 491106e65a96910f5a652594cc9aa5eb8457e311 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 12:24:04 -0500 Subject: [PATCH 0298/1511] Release 3.10.2 (#8655) --- CHANGES.rst | 106 ++++++++++++++++++++++++++++++++++++++ CHANGES/8551.contrib.rst | 1 - CHANGES/8565.bugfix.rst | 1 - CHANGES/8597.bugfix.rst | 1 - CHANGES/8608.misc.rst | 3 -- CHANGES/8611.bugfix.rst | 1 - CHANGES/8632.bugfix.rst | 1 - CHANGES/8634.misc.rst | 1 - CHANGES/8636.breaking.rst | 1 - CHANGES/8641.bugfix.rst | 3 -- CHANGES/8652.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 12 files changed, 107 insertions(+), 15 deletions(-) delete mode 100644 CHANGES/8551.contrib.rst delete mode 100644 CHANGES/8565.bugfix.rst delete mode 100644 CHANGES/8597.bugfix.rst delete mode 100644 CHANGES/8608.misc.rst delete mode 100644 CHANGES/8611.bugfix.rst delete mode 100644 CHANGES/8632.bugfix.rst delete mode 100644 CHANGES/8634.misc.rst delete mode 100644 CHANGES/8636.breaking.rst delete mode 100644 CHANGES/8641.bugfix.rst delete mode 100644 CHANGES/8652.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index b1331a7fe9f..0150c95494c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,112 @@ .. towncrier release notes start +3.10.2 (2024-08-08) +=================== + +Bug fixes +--------- + +- Fixed server checks for circular symbolic links to be compatible with Python 3.13 -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8565`. + + + +- Fixed request body not being read when ignoring an Upgrade request -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8597`. + + + +- Fixed an edge case where shutdown would wait for timeout when the handler was already completed -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8611`. + + + +- Fixed connecting to ``npipe://``, ``tcp://``, and ``unix://`` urls -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8632`. + + + +- Fixed WebSocket ping tasks being prematurely garbage collected -- by :user:`bdraco`. + + There was a small risk that WebSocket ping tasks would be prematurely garbage collected because the event loop only holds a weak reference to the task. The garbage collection risk has been fixed by holding a strong reference to the task. Additionally, the task is now scheduled eagerly with Python 3.12+ to increase the chance it can be completed immediately and avoid having to hold any references to the task. + + + *Related issues and pull requests on GitHub:* + :issue:`8641`. + + + +- Fixed incorrectly following symlinks for compressed file variants -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8652`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Removed ``Request.wait_for_disconnection()``, which was mistakenly added briefly in 3.10.0 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8636`. + + + + +Contributor-facing changes +-------------------------- + +- Fixed monkey patches for ``Path.stat()`` and ``Path.is_dir()`` for Python 3.13 compatibility -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8551`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved WebSocket performance when messages are sent or received frequently -- by :user:`bdraco`. + + The WebSocket heartbeat scheduling algorithm was improved to reduce the ``asyncio`` scheduling overhead by decreasing the number of ``asyncio.TimerHandle`` creations and cancellations. + + + *Related issues and pull requests on GitHub:* + :issue:`8608`. + + + +- Minor improvements to various type annotations -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8634`. + + + + +---- + + 3.10.1 (2024-08-03) ======================== diff --git a/CHANGES/8551.contrib.rst b/CHANGES/8551.contrib.rst deleted file mode 100644 index 3505b483ca8..00000000000 --- a/CHANGES/8551.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed monkey patches for ``Path.stat()`` and ``Path.is_dir()`` for python 3.13 compatibility -- by :user:`steverep`. diff --git a/CHANGES/8565.bugfix.rst b/CHANGES/8565.bugfix.rst deleted file mode 100644 index 35e7c4dc71a..00000000000 --- a/CHANGES/8565.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed server checks for circular symbolic links to be compatible with Python 3.13 -- by :user:`steverep`. diff --git a/CHANGES/8597.bugfix.rst b/CHANGES/8597.bugfix.rst deleted file mode 100644 index 27186bb52d1..00000000000 --- a/CHANGES/8597.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed request body not being read when ignoring an Upgrade request -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8608.misc.rst b/CHANGES/8608.misc.rst deleted file mode 100644 index 76e845bf997..00000000000 --- a/CHANGES/8608.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved websocket performance when messages are sent or received frequently -- by :user:`bdraco`. - -The WebSocket heartbeat scheduling algorithm was improved to reduce the ``asyncio`` scheduling overhead by decreasing the number of ``asyncio.TimerHandle`` creations and cancellations. diff --git a/CHANGES/8611.bugfix.rst b/CHANGES/8611.bugfix.rst deleted file mode 100644 index 2cd795cc14e..00000000000 --- a/CHANGES/8611.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an edge case where shutdown would wait for timeout when handler was already completed -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8632.bugfix.rst b/CHANGES/8632.bugfix.rst deleted file mode 100644 index c6da81d7ab3..00000000000 --- a/CHANGES/8632.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed connecting to ``npipe://``, ``tcp://``, and ``unix://`` urls -- by :user:`bdraco`. diff --git a/CHANGES/8634.misc.rst b/CHANGES/8634.misc.rst deleted file mode 100644 index cf4c68d5119..00000000000 --- a/CHANGES/8634.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Minor improvements to various type annotations -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8636.breaking.rst b/CHANGES/8636.breaking.rst deleted file mode 100644 index ae3d599bf7a..00000000000 --- a/CHANGES/8636.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Removed ``Request.wait_for_disconnection()`` which was mistakenly added briefly in 3.10.0 -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8641.bugfix.rst b/CHANGES/8641.bugfix.rst deleted file mode 100644 index 9c85ac04419..00000000000 --- a/CHANGES/8641.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed WebSocket ping tasks being prematurely garbage collected -- by :user:`bdraco`. - -There was a small risk that WebSocket ping tasks would be prematurely garbage collected because the event loop only holds a weak reference to the task. The garbage collection risk has been fixed by holding a strong reference to the task. Additionally, the task is now scheduled eagerly with Python 3.12+ to increase the chance it can be completed immediately and avoid having to hold any references to the task. diff --git a/CHANGES/8652.bugfix.rst b/CHANGES/8652.bugfix.rst deleted file mode 100644 index 3a1003e50ad..00000000000 --- a/CHANGES/8652.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed incorrectly following symlinks for compressed file variants -- by :user:`steverep`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 9ec07dbe449..f050229f008 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.1" +__version__ = "3.10.2" from typing import TYPE_CHECKING, Tuple From 1bc8d53210512afa044fbea0b676d1575aed7baa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 21:59:10 +0100 Subject: [PATCH 0299/1511] [PR #8657/6c6ecfaf backport][3.10] Fix multipart reading with split boundary (#8658) **This is a backport of PR #8657 as merged into master (6c6ecfaf320b27eb9f86066c4bfb1f3947c3362d).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8653.bugfix.rst | 1 + aiohttp/multipart.py | 19 ++++++++++--- tests/test_multipart.py | 61 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8653.bugfix.rst diff --git a/CHANGES/8653.bugfix.rst b/CHANGES/8653.bugfix.rst new file mode 100644 index 00000000000..5c4d66c181f --- /dev/null +++ b/CHANGES/8653.bugfix.rst @@ -0,0 +1 @@ +Fixed multipart reading when stream buffer splits the boundary over several read() calls -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 71fc2654a1c..26780e3060c 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -266,6 +266,7 @@ def __init__( ) -> None: self.headers = headers self._boundary = boundary + self._boundary_len = len(boundary) + 2 # Boundary + \r\n self._content = content self._default_charset = default_charset self._at_eof = False @@ -346,15 +347,25 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: # Reads content chunk of body part with unknown length. # The Content-Length header for body part is not necessary. assert ( - size >= len(self._boundary) + 2 + size >= self._boundary_len ), "Chunk size must be greater or equal than boundary length + 2" first_chunk = self._prev_chunk is None if first_chunk: self._prev_chunk = await self._content.read(size) - chunk = await self._content.read(size) - self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" + chunk = b"" + # content.read() may return less than size, so we need to loop to ensure + # we have enough data to detect the boundary. + while len(chunk) < self._boundary_len: + chunk += await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + if self._content_eof: + break + if len(chunk) > size: + self._content.unread_data(chunk[size:]) + chunk = chunk[:size] + assert self._prev_chunk is not None window = self._prev_chunk + chunk sub = b"\r\n" + self._boundary diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 436b70957fa..6fc9fe573ec 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -2,6 +2,7 @@ import io import json import pathlib +import sys import zlib from unittest import mock @@ -754,6 +755,66 @@ async def test_invalid_boundary(self) -> None: with pytest.raises(ValueError): await reader.next() + @pytest.mark.skipif(sys.version_info < (3, 10), reason="Needs anext()") + async def test_read_boundary_across_chunks(self) -> None: + class SplitBoundaryStream: + def __init__(self) -> None: + self.content = [ + b"--foobar\r\n\r\n", + b"Hello,\r\n-", + b"-fo", + b"ob", + b"ar\r\n", + b"\r\nwor", + b"ld!", + b"\r\n--f", + b"oobar--", + ] + + async def read(self, size=None) -> bytes: + chunk = self.content.pop(0) + assert len(chunk) <= size + return chunk + + def at_eof(self) -> bool: + return not self.content + + async def readline(self) -> bytes: + line = b"" + while self.content and b"\n" not in line: + line += self.content.pop(0) + line, *extra = line.split(b"\n", maxsplit=1) + if extra and extra[0]: + self.content.insert(0, extra[0]) + return line + b"\n" + + def unread_data(self, data: bytes) -> None: + if self.content: + self.content[0] = data + self.content[0] + else: + self.content.append(data) + + stream = SplitBoundaryStream() + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary="foobar"'}, stream + ) + part = await anext(reader) + result = await part.read_chunk(10) + assert result == b"Hello," + result = await part.read_chunk(10) + assert result == b"" + assert part.at_eof() + + part = await anext(reader) + result = await part.read_chunk(10) + assert result == b"world!" + result = await part.read_chunk(10) + assert result == b"" + assert part.at_eof() + + with pytest.raises(StopAsyncIteration): + await anext(reader) + async def test_release(self) -> None: with Stream( newline.join( From dbaf17479c8a7fdaf85dc093963c99f740e118fe Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 23:58:29 +0100 Subject: [PATCH 0300/1511] [PR #8657/6c6ecfaf backport][3.11] Fix multipart reading with split boundary (#8659) **This is a backport of PR #8657 as merged into master (6c6ecfaf320b27eb9f86066c4bfb1f3947c3362d).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8653.bugfix.rst | 1 + aiohttp/multipart.py | 19 ++++++++++--- tests/test_multipart.py | 61 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8653.bugfix.rst diff --git a/CHANGES/8653.bugfix.rst b/CHANGES/8653.bugfix.rst new file mode 100644 index 00000000000..5c4d66c181f --- /dev/null +++ b/CHANGES/8653.bugfix.rst @@ -0,0 +1 @@ +Fixed multipart reading when stream buffer splits the boundary over several read() calls -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 71fc2654a1c..26780e3060c 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -266,6 +266,7 @@ def __init__( ) -> None: self.headers = headers self._boundary = boundary + self._boundary_len = len(boundary) + 2 # Boundary + \r\n self._content = content self._default_charset = default_charset self._at_eof = False @@ -346,15 +347,25 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: # Reads content chunk of body part with unknown length. # The Content-Length header for body part is not necessary. assert ( - size >= len(self._boundary) + 2 + size >= self._boundary_len ), "Chunk size must be greater or equal than boundary length + 2" first_chunk = self._prev_chunk is None if first_chunk: self._prev_chunk = await self._content.read(size) - chunk = await self._content.read(size) - self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" + chunk = b"" + # content.read() may return less than size, so we need to loop to ensure + # we have enough data to detect the boundary. + while len(chunk) < self._boundary_len: + chunk += await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + if self._content_eof: + break + if len(chunk) > size: + self._content.unread_data(chunk[size:]) + chunk = chunk[:size] + assert self._prev_chunk is not None window = self._prev_chunk + chunk sub = b"\r\n" + self._boundary diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 436b70957fa..6fc9fe573ec 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -2,6 +2,7 @@ import io import json import pathlib +import sys import zlib from unittest import mock @@ -754,6 +755,66 @@ async def test_invalid_boundary(self) -> None: with pytest.raises(ValueError): await reader.next() + @pytest.mark.skipif(sys.version_info < (3, 10), reason="Needs anext()") + async def test_read_boundary_across_chunks(self) -> None: + class SplitBoundaryStream: + def __init__(self) -> None: + self.content = [ + b"--foobar\r\n\r\n", + b"Hello,\r\n-", + b"-fo", + b"ob", + b"ar\r\n", + b"\r\nwor", + b"ld!", + b"\r\n--f", + b"oobar--", + ] + + async def read(self, size=None) -> bytes: + chunk = self.content.pop(0) + assert len(chunk) <= size + return chunk + + def at_eof(self) -> bool: + return not self.content + + async def readline(self) -> bytes: + line = b"" + while self.content and b"\n" not in line: + line += self.content.pop(0) + line, *extra = line.split(b"\n", maxsplit=1) + if extra and extra[0]: + self.content.insert(0, extra[0]) + return line + b"\n" + + def unread_data(self, data: bytes) -> None: + if self.content: + self.content[0] = data + self.content[0] + else: + self.content.append(data) + + stream = SplitBoundaryStream() + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary="foobar"'}, stream + ) + part = await anext(reader) + result = await part.read_chunk(10) + assert result == b"Hello," + result = await part.read_chunk(10) + assert result == b"" + assert part.at_eof() + + part = await anext(reader) + result = await part.read_chunk(10) + assert result == b"world!" + result = await part.read_chunk(10) + assert result == b"" + assert part.at_eof() + + with pytest.raises(StopAsyncIteration): + await anext(reader) + async def test_release(self) -> None: with Stream( newline.join( From 3a9de0c1457e04bbe81acfefd031ff436c1da98d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 19:17:35 -0500 Subject: [PATCH 0301/1511] [PR #8660/14d5295 backport][3.10] Improve performance of WebSockets when there is no timeout (#8663) --- CHANGES/8660.misc.rst | 3 +++ aiohttp/client_ws.py | 11 ++++++++++- aiohttp/web_ws.py | 10 +++++++++- 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8660.misc.rst diff --git a/CHANGES/8660.misc.rst b/CHANGES/8660.misc.rst new file mode 100644 index 00000000000..8710063329e --- /dev/null +++ b/CHANGES/8660.misc.rst @@ -0,0 +1,3 @@ +Improved performance of :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` when there is no timeout. -- by :user:`bdraco`. + +The timeout context manager is now avoided when there is no timeout as it accounted for up to 50% of the time spent in the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` methods. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 247f62c758e..7fd141248bd 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -281,6 +281,8 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo return False async def receive(self, timeout: Optional[float] = None) -> WSMessage: + receive_timeout = timeout or self._receive_timeout + while True: if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") @@ -294,7 +296,14 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: try: self._waiting = True try: - async with async_timeout.timeout(timeout or self._receive_timeout): + if receive_timeout: + # Entering the context manager and creating + # Timeout() object can take almost 50% of the + # run time in this loop so we avoid it if + # there is no read timeout. + async with async_timeout.timeout(receive_timeout): + msg = await self._reader.read() + else: msg = await self._reader.read() self._reset_heartbeat() finally: diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index ba3332715a6..fe8f537dc76 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -484,6 +484,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: loop = self._loop assert loop is not None + receive_timeout = timeout or self._receive_timeout while True: if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") @@ -499,7 +500,14 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: try: self._waiting = True try: - async with async_timeout.timeout(timeout or self._receive_timeout): + if receive_timeout: + # Entering the context manager and creating + # Timeout() object can take almost 50% of the + # run time in this loop so we avoid it if + # there is no read timeout. + async with async_timeout.timeout(receive_timeout): + msg = await self._reader.read() + else: msg = await self._reader.read() self._reset_heartbeat() finally: From 52f2e856c43d5f689f5ca910e933882d13df1442 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 19:17:37 -0500 Subject: [PATCH 0302/1511] [PR #8660/14d5295 backport][3.11] Improve performance of WebSockets when there is no timeout (#8664) --- CHANGES/8660.misc.rst | 3 +++ aiohttp/client_ws.py | 11 ++++++++++- aiohttp/web_ws.py | 10 +++++++++- 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8660.misc.rst diff --git a/CHANGES/8660.misc.rst b/CHANGES/8660.misc.rst new file mode 100644 index 00000000000..8710063329e --- /dev/null +++ b/CHANGES/8660.misc.rst @@ -0,0 +1,3 @@ +Improved performance of :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` when there is no timeout. -- by :user:`bdraco`. + +The timeout context manager is now avoided when there is no timeout as it accounted for up to 50% of the time spent in the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` methods. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 247f62c758e..7fd141248bd 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -281,6 +281,8 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo return False async def receive(self, timeout: Optional[float] = None) -> WSMessage: + receive_timeout = timeout or self._receive_timeout + while True: if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") @@ -294,7 +296,14 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: try: self._waiting = True try: - async with async_timeout.timeout(timeout or self._receive_timeout): + if receive_timeout: + # Entering the context manager and creating + # Timeout() object can take almost 50% of the + # run time in this loop so we avoid it if + # there is no read timeout. + async with async_timeout.timeout(receive_timeout): + msg = await self._reader.read() + else: msg = await self._reader.read() self._reset_heartbeat() finally: diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index ba3332715a6..fe8f537dc76 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -484,6 +484,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: loop = self._loop assert loop is not None + receive_timeout = timeout or self._receive_timeout while True: if self._waiting: raise RuntimeError("Concurrent call to receive() is not allowed") @@ -499,7 +500,14 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: try: self._waiting = True try: - async with async_timeout.timeout(timeout or self._receive_timeout): + if receive_timeout: + # Entering the context manager and creating + # Timeout() object can take almost 50% of the + # run time in this loop so we avoid it if + # there is no read timeout. + async with async_timeout.timeout(receive_timeout): + msg = await self._reader.read() + else: msg = await self._reader.read() self._reset_heartbeat() finally: From b4ad882576666ce2bba6eafe634001d46b850cb2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 19:31:27 -0500 Subject: [PATCH 0303/1511] [PR #8661/4d604ea backport][3.10] Improve performance of starting request handlers with Python 3.12+ (#8665) --- CHANGES/8661.misc.rst | 1 + aiohttp/web_protocol.py | 9 ++++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8661.misc.rst diff --git a/CHANGES/8661.misc.rst b/CHANGES/8661.misc.rst new file mode 100644 index 00000000000..c0a6fdadb37 --- /dev/null +++ b/CHANGES/8661.misc.rst @@ -0,0 +1 @@ +Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 9ba05a08e75..f60759d927b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -1,5 +1,6 @@ import asyncio import asyncio.streams +import sys import traceback import warnings from collections import deque @@ -533,9 +534,11 @@ async def start(self) -> None: request = self._request_factory(message, payload, self, writer, handler) try: # a new task is used for copy context vars (#3406) - task = self._loop.create_task( - self._handle_request(request, start, request_handler) - ) + coro = self._handle_request(request, start, request_handler) + if sys.version_info >= (3, 12): + task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + task = loop.create_task(coro) try: resp, reset = await task except (asyncio.CancelledError, ConnectionError): From 58e91a17d6e797df297709e7612eff3876ad060b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 8 Aug 2024 19:32:41 -0500 Subject: [PATCH 0304/1511] [PR #8661/4d604ea backport][3.11] Improve performance of starting request handlers with Python 3.12+ (#8666) --- CHANGES/8661.misc.rst | 1 + aiohttp/web_protocol.py | 9 ++++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8661.misc.rst diff --git a/CHANGES/8661.misc.rst b/CHANGES/8661.misc.rst new file mode 100644 index 00000000000..c0a6fdadb37 --- /dev/null +++ b/CHANGES/8661.misc.rst @@ -0,0 +1 @@ +Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 9ba05a08e75..f60759d927b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -1,5 +1,6 @@ import asyncio import asyncio.streams +import sys import traceback import warnings from collections import deque @@ -533,9 +534,11 @@ async def start(self) -> None: request = self._request_factory(message, payload, self, writer, handler) try: # a new task is used for copy context vars (#3406) - task = self._loop.create_task( - self._handle_request(request, start, request_handler) - ) + coro = self._handle_request(request, start, request_handler) + if sys.version_info >= (3, 12): + task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + task = loop.create_task(coro) try: resp, reset = await task except (asyncio.CancelledError, ConnectionError): From dbcdb16d6ef82dc310dc03841008c54bd4b61d59 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Aug 2024 09:11:19 -0500 Subject: [PATCH 0305/1511] [PR #8667/406cd2c7 backport][3.10] Improve performance of generating random WebSocket mask (#8668) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8667.misc.rst | 1 + aiohttp/http_websocket.py | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8667.misc.rst diff --git a/CHANGES/8667.misc.rst b/CHANGES/8667.misc.rst new file mode 100644 index 00000000000..1c43b6e069a --- /dev/null +++ b/CHANGES/8667.misc.rst @@ -0,0 +1 @@ +Improved performance of generating random WebSocket mask -- by :user:`bdraco`. diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 39f2e4a5c15..b513a45ebdc 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -8,6 +8,7 @@ import sys import zlib from enum import IntEnum +from functools import partial from struct import Struct from typing import ( Any, @@ -103,6 +104,7 @@ class WSMsgType(IntEnum): PACK_LEN2 = Struct("!BBH").pack PACK_LEN3 = Struct("!BBQ").pack PACK_CLOSE_CODE = Struct("!H").pack +PACK_RANDBITS = Struct("!L").pack MSG_SIZE: Final[int] = 2**14 DEFAULT_LIMIT: Final[int] = 2**16 @@ -612,7 +614,7 @@ def __init__( self.protocol = protocol self.transport = transport self.use_mask = use_mask - self.randrange = random.randrange + self.get_random_bits = partial(random.getrandbits, 32) self.compress = compress self.notakeover = notakeover self._closing = False @@ -668,8 +670,7 @@ async def _send_frame( else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: - mask_int = self.randrange(0, 0xFFFFFFFF) - mask = mask_int.to_bytes(4, "big") + mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) From 9fd043ea4c06af921196b44c4b3777c27f42e784 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Aug 2024 09:11:29 -0500 Subject: [PATCH 0306/1511] [PR #8667/406cd2c7 backport][3.11] Improve performance of generating random WebSocket mask (#8669) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8667.misc.rst | 1 + aiohttp/http_websocket.py | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8667.misc.rst diff --git a/CHANGES/8667.misc.rst b/CHANGES/8667.misc.rst new file mode 100644 index 00000000000..1c43b6e069a --- /dev/null +++ b/CHANGES/8667.misc.rst @@ -0,0 +1 @@ +Improved performance of generating random WebSocket mask -- by :user:`bdraco`. diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 39f2e4a5c15..b513a45ebdc 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -8,6 +8,7 @@ import sys import zlib from enum import IntEnum +from functools import partial from struct import Struct from typing import ( Any, @@ -103,6 +104,7 @@ class WSMsgType(IntEnum): PACK_LEN2 = Struct("!BBH").pack PACK_LEN3 = Struct("!BBQ").pack PACK_CLOSE_CODE = Struct("!H").pack +PACK_RANDBITS = Struct("!L").pack MSG_SIZE: Final[int] = 2**14 DEFAULT_LIMIT: Final[int] = 2**16 @@ -612,7 +614,7 @@ def __init__( self.protocol = protocol self.transport = transport self.use_mask = use_mask - self.randrange = random.randrange + self.get_random_bits = partial(random.getrandbits, 32) self.compress = compress self.notakeover = notakeover self._closing = False @@ -668,8 +670,7 @@ async def _send_frame( else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: - mask_int = self.randrange(0, 0xFFFFFFFF) - mask = mask_int.to_bytes(4, "big") + mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) From f96182adab30d8609342c6d273c5aad1cd92b29f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Aug 2024 17:17:22 +0000 Subject: [PATCH 0307/1511] [PR #8662/be23d16f backport][3.10] Improve performance of keepalive rescheduling (#8670) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8662.misc.rst | 3 +++ aiohttp/web_protocol.py | 33 +++++++++++++++------------------ 2 files changed, 18 insertions(+), 18 deletions(-) create mode 100644 CHANGES/8662.misc.rst diff --git a/CHANGES/8662.misc.rst b/CHANGES/8662.misc.rst new file mode 100644 index 00000000000..efe30a60cb2 --- /dev/null +++ b/CHANGES/8662.misc.rst @@ -0,0 +1,3 @@ +Improved performance of HTTP keep-alive checks -- by :user:`bdraco`. + +Previously, when processing a request for a keep-alive connection, the keep-alive check would happen every second; the check is now rescheduled if it fires too early instead. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index f60759d927b..635b668ceb0 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -134,8 +134,6 @@ class RequestHandler(BaseProtocol): """ - KEEPALIVE_RESCHEDULE_DELAY = 1 - __slots__ = ( "_request_count", "_keepalive", @@ -143,7 +141,7 @@ class RequestHandler(BaseProtocol): "_request_handler", "_request_factory", "_tcp_keepalive", - "_keepalive_time", + "_next_keepalive_close_time", "_keepalive_handle", "_keepalive_timeout", "_lingering_time", @@ -197,7 +195,7 @@ def __init__( self._tcp_keepalive = tcp_keepalive # placeholder to be replaced on keepalive timeout setup - self._keepalive_time = 0.0 + self._next_keepalive_close_time = 0.0 self._keepalive_handle: Optional[asyncio.Handle] = None self._keepalive_timeout = keepalive_timeout self._lingering_time = float(lingering_time) @@ -429,23 +427,21 @@ def log_exception(self, *args: Any, **kw: Any) -> None: self.logger.exception(*args, **kw) def _process_keepalive(self) -> None: + self._keepalive_handle = None if self._force_close or not self._keepalive: return - next = self._keepalive_time + self._keepalive_timeout + loop = self._loop + now = loop.time() + close_time = self._next_keepalive_close_time + if now <= close_time: + # Keep alive close check fired too early, reschedule + self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) + return # handler in idle state if self._waiter: - if self._loop.time() > next: - self.force_close() - return - - # not all request handlers are done, - # reschedule itself to next second - self._keepalive_handle = self._loop.call_later( - self.KEEPALIVE_RESCHEDULE_DELAY, - self._process_keepalive, - ) + self.force_close() async def _handle_request( self, @@ -596,11 +592,12 @@ async def start(self) -> None: if self._keepalive and not self._close: # start keep-alive timer if keepalive_timeout is not None: - now = self._loop.time() - self._keepalive_time = now + now = loop.time() + close_time = now + keepalive_timeout + self._next_keepalive_close_time = close_time if self._keepalive_handle is None: self._keepalive_handle = loop.call_at( - now + keepalive_timeout, self._process_keepalive + close_time, self._process_keepalive ) else: break From 3f452a0963b0419033e7d554fc583b8e7754995e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Aug 2024 19:26:59 +0200 Subject: [PATCH 0308/1511] [PR #8662/be23d16f backport][3.11] Improve performance of keepalive rescheduling (#8671) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8662.misc.rst | 3 +++ aiohttp/web_protocol.py | 33 +++++++++++++++------------------ 2 files changed, 18 insertions(+), 18 deletions(-) create mode 100644 CHANGES/8662.misc.rst diff --git a/CHANGES/8662.misc.rst b/CHANGES/8662.misc.rst new file mode 100644 index 00000000000..efe30a60cb2 --- /dev/null +++ b/CHANGES/8662.misc.rst @@ -0,0 +1,3 @@ +Improved performance of HTTP keep-alive checks -- by :user:`bdraco`. + +Previously, when processing a request for a keep-alive connection, the keep-alive check would happen every second; the check is now rescheduled if it fires too early instead. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index f60759d927b..635b668ceb0 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -134,8 +134,6 @@ class RequestHandler(BaseProtocol): """ - KEEPALIVE_RESCHEDULE_DELAY = 1 - __slots__ = ( "_request_count", "_keepalive", @@ -143,7 +141,7 @@ class RequestHandler(BaseProtocol): "_request_handler", "_request_factory", "_tcp_keepalive", - "_keepalive_time", + "_next_keepalive_close_time", "_keepalive_handle", "_keepalive_timeout", "_lingering_time", @@ -197,7 +195,7 @@ def __init__( self._tcp_keepalive = tcp_keepalive # placeholder to be replaced on keepalive timeout setup - self._keepalive_time = 0.0 + self._next_keepalive_close_time = 0.0 self._keepalive_handle: Optional[asyncio.Handle] = None self._keepalive_timeout = keepalive_timeout self._lingering_time = float(lingering_time) @@ -429,23 +427,21 @@ def log_exception(self, *args: Any, **kw: Any) -> None: self.logger.exception(*args, **kw) def _process_keepalive(self) -> None: + self._keepalive_handle = None if self._force_close or not self._keepalive: return - next = self._keepalive_time + self._keepalive_timeout + loop = self._loop + now = loop.time() + close_time = self._next_keepalive_close_time + if now <= close_time: + # Keep alive close check fired too early, reschedule + self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) + return # handler in idle state if self._waiter: - if self._loop.time() > next: - self.force_close() - return - - # not all request handlers are done, - # reschedule itself to next second - self._keepalive_handle = self._loop.call_later( - self.KEEPALIVE_RESCHEDULE_DELAY, - self._process_keepalive, - ) + self.force_close() async def _handle_request( self, @@ -596,11 +592,12 @@ async def start(self) -> None: if self._keepalive and not self._close: # start keep-alive timer if keepalive_timeout is not None: - now = self._loop.time() - self._keepalive_time = now + now = loop.time() + close_time = now + keepalive_timeout + self._next_keepalive_close_time = close_time if self._keepalive_handle is None: self._keepalive_handle = loop.call_at( - now + keepalive_timeout, self._process_keepalive + close_time, self._process_keepalive ) else: break From f3fcba467676ba1e86aacdc6b1f2ed4a7e72455c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 10 Aug 2024 10:04:32 -0500 Subject: [PATCH 0309/1511] [PR #8672/c3219bf backport][3.10] Fix TCPConnector doing blocking I/O in the event loop to create the SSLContext (#8673) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] --- CHANGES/8672.bugfix.rst | 3 ++ aiohttp/connector.py | 104 ++++++++++++++++++++++++---------------- tests/test_connector.py | 78 +++++++++++++++++++++++------- tests/test_proxy.py | 2 +- 4 files changed, 128 insertions(+), 59 deletions(-) create mode 100644 CHANGES/8672.bugfix.rst diff --git a/CHANGES/8672.bugfix.rst b/CHANGES/8672.bugfix.rst new file mode 100644 index 00000000000..a57ed16d5d2 --- /dev/null +++ b/CHANGES/8672.bugfix.rst @@ -0,0 +1,3 @@ +Fixed :py:class:`aiohttp.TCPConnector` doing blocking I/O in the event loop to create the ``SSLContext`` -- by :user:`bdraco`. + +The blocking I/O would only happen once per verify mode. However, it could cause the event loop to block for a long time if the ``SSLContext`` creation is slow, which is more likely during startup when the disk cache is not yet present. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d4691b10e6e..04115c36a24 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -50,7 +50,14 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, is_ip_address, noop, sentinel +from .helpers import ( + ceil_timeout, + is_ip_address, + noop, + sentinel, + set_exception, + set_result, +) from .locks import EventResultOrError from .resolver import DefaultResolver @@ -771,6 +778,7 @@ class TCPConnector(BaseConnector): """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + _made_ssl_context: Dict[bool, "asyncio.Future[SSLContext]"] = {} def __init__( self, @@ -969,29 +977,24 @@ async def _create_connection( return proto @staticmethod - @functools.lru_cache(None) def _make_ssl_context(verified: bool) -> SSLContext: + """Create SSL context. + + This method is not async-friendly and should be called from a thread + because it will load certificates from disk and do other blocking I/O. + """ if verified: return ssl.create_default_context() - else: - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - try: - sslcontext.options |= ssl.OP_NO_COMPRESSION - except AttributeError as attr_err: - warnings.warn( - "{!s}: The Python interpreter is compiled " - "against OpenSSL < 1.0.0. Ref: " - "https://docs.python.org/3/library/ssl.html" - "#ssl.OP_NO_COMPRESSION".format(attr_err), - ) - sslcontext.set_default_verify_paths() - return sslcontext - - def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + async def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: """Logic to get the correct SSL context 0. if req.ssl is false, return None @@ -1005,25 +1008,46 @@ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: 3. if verify_ssl is False in req, generate a SSL context that won't verify """ - if req.is_ssl(): - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - sslcontext = req.ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - sslcontext = self._ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - return self._make_ssl_context(True) - else: + if not req.is_ssl(): return None + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return await self._make_or_get_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return await self._make_or_get_ssl_context(False) + return await self._make_or_get_ssl_context(True) + + async def _make_or_get_ssl_context(self, verified: bool) -> SSLContext: + """Create or get cached SSL context.""" + try: + return await self._made_ssl_context[verified] + except KeyError: + loop = self._loop + future = loop.create_future() + self._made_ssl_context[verified] = future + try: + result = await loop.run_in_executor( + None, self._make_ssl_context, verified + ) + # BaseException is used since we might get CancelledError + except BaseException as ex: + del self._made_ssl_context[verified] + set_exception(future, ex) + raise + else: + set_result(future, result) + return result + def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: ret = req.ssl if isinstance(ret, Fingerprint): @@ -1180,7 +1204,7 @@ async def _start_tls_connection( # `req.is_ssl()` evaluates to `False` which is never gonna happen # in this code path. Of course, it's rather fragile # maintainability-wise but this is to be solved separately. - sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + sslcontext = cast(ssl.SSLContext, await self._get_ssl_context(req)) try: async with ceil_timeout( @@ -1258,7 +1282,7 @@ async def _create_direct_connection( *, client_error: Type[Exception] = ClientConnectorError, ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = self._get_ssl_context(req) + sslcontext = await self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) host = req.url.raw_host diff --git a/tests/test_connector.py b/tests/test_connector.py index d146fb4ee51..0d6ca18ef53 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1540,23 +1540,23 @@ async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn.clear_dns_cache("localhost") -async def test_dont_recreate_ssl_context(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - ctx = conn._make_ssl_context(True) - assert ctx is conn._make_ssl_context(True) +async def test_dont_recreate_ssl_context() -> None: + conn = aiohttp.TCPConnector() + ctx = await conn._make_or_get_ssl_context(True) + assert ctx is await conn._make_or_get_ssl_context(True) -async def test_dont_recreate_ssl_context2(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - ctx = conn._make_ssl_context(False) - assert ctx is conn._make_ssl_context(False) +async def test_dont_recreate_ssl_context2() -> None: + conn = aiohttp.TCPConnector() + ctx = await conn._make_or_get_ssl_context(False) + assert ctx is await conn._make_or_get_ssl_context(False) -async def test___get_ssl_context1(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test___get_ssl_context1() -> None: + conn = aiohttp.TCPConnector() req = mock.Mock() req.is_ssl.return_value = False - assert conn._get_ssl_context(req) is None + assert await conn._get_ssl_context(req) is None async def test___get_ssl_context2(loop) -> None: @@ -1565,7 +1565,7 @@ async def test___get_ssl_context2(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = ctx - assert conn._get_ssl_context(req) is ctx + assert await conn._get_ssl_context(req) is ctx async def test___get_ssl_context3(loop) -> None: @@ -1574,7 +1574,7 @@ async def test___get_ssl_context3(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert conn._get_ssl_context(req) is ctx + assert await conn._get_ssl_context(req) is ctx async def test___get_ssl_context4(loop) -> None: @@ -1583,7 +1583,9 @@ async def test___get_ssl_context4(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = False - assert conn._get_ssl_context(req) is conn._make_ssl_context(False) + assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( + False + ) async def test___get_ssl_context5(loop) -> None: @@ -1592,15 +1594,55 @@ async def test___get_ssl_context5(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) - assert conn._get_ssl_context(req) is conn._make_ssl_context(False) + assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( + False + ) -async def test___get_ssl_context6(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test___get_ssl_context6() -> None: + conn = aiohttp.TCPConnector() + req = mock.Mock() + req.is_ssl.return_value = True + req.ssl = True + assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context(True) + + +async def test_ssl_context_once() -> None: + """Test the ssl context is created only once and shared between connectors.""" + conn1 = aiohttp.TCPConnector() + conn2 = aiohttp.TCPConnector() + conn3 = aiohttp.TCPConnector() + req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert conn._get_ssl_context(req) is conn._make_ssl_context(True) + assert await conn1._get_ssl_context(req) is await conn1._make_or_get_ssl_context( + True + ) + assert await conn2._get_ssl_context(req) is await conn1._make_or_get_ssl_context( + True + ) + assert await conn3._get_ssl_context(req) is await conn1._make_or_get_ssl_context( + True + ) + assert conn1._made_ssl_context is conn2._made_ssl_context is conn3._made_ssl_context + assert True in conn1._made_ssl_context + + +@pytest.mark.parametrize("exception", [OSError, ssl.SSLError, asyncio.CancelledError]) +async def test_ssl_context_creation_raises(exception: BaseException) -> None: + """Test that we try again if SSLContext creation fails the first time.""" + conn = aiohttp.TCPConnector() + conn._made_ssl_context.clear() + + with mock.patch.object( + conn, "_make_ssl_context", side_effect=exception + ), pytest.raises( # type: ignore[call-overload] + exception + ): + await conn._make_or_get_ssl_context(True) + + assert isinstance(await conn._make_or_get_ssl_context(True), ssl.SSLContext) async def test_close_twice(loop) -> None: diff --git a/tests/test_proxy.py b/tests/test_proxy.py index f335e42c254..c5e98deb8a5 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -817,7 +817,7 @@ async def make_conn(): self.loop.start_tls.assert_called_with( mock.ANY, mock.ANY, - connector._make_ssl_context(True), + self.loop.run_until_complete(connector._make_or_get_ssl_context(True)), server_hostname="www.python.org", ssl_handshake_timeout=mock.ANY, ) From adf4dea030ce12a356528b5854e668c4556743e3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 10 Aug 2024 10:10:25 -0500 Subject: [PATCH 0310/1511] [PR #8672/c3219bf backport][3.11] Fix TCPConnector doing blocking I/O in the event loop to create the SSLContext (#8674) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] --- CHANGES/8672.bugfix.rst | 3 ++ aiohttp/connector.py | 104 ++++++++++++++++++++++++---------------- tests/test_connector.py | 78 +++++++++++++++++++++++------- tests/test_proxy.py | 2 +- 4 files changed, 128 insertions(+), 59 deletions(-) create mode 100644 CHANGES/8672.bugfix.rst diff --git a/CHANGES/8672.bugfix.rst b/CHANGES/8672.bugfix.rst new file mode 100644 index 00000000000..a57ed16d5d2 --- /dev/null +++ b/CHANGES/8672.bugfix.rst @@ -0,0 +1,3 @@ +Fixed :py:class:`aiohttp.TCPConnector` doing blocking I/O in the event loop to create the ``SSLContext`` -- by :user:`bdraco`. + +The blocking I/O would only happen once per verify mode. However, it could cause the event loop to block for a long time if the ``SSLContext`` creation is slow, which is more likely during startup when the disk cache is not yet present. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d4691b10e6e..04115c36a24 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -50,7 +50,14 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, is_ip_address, noop, sentinel +from .helpers import ( + ceil_timeout, + is_ip_address, + noop, + sentinel, + set_exception, + set_result, +) from .locks import EventResultOrError from .resolver import DefaultResolver @@ -771,6 +778,7 @@ class TCPConnector(BaseConnector): """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + _made_ssl_context: Dict[bool, "asyncio.Future[SSLContext]"] = {} def __init__( self, @@ -969,29 +977,24 @@ async def _create_connection( return proto @staticmethod - @functools.lru_cache(None) def _make_ssl_context(verified: bool) -> SSLContext: + """Create SSL context. + + This method is not async-friendly and should be called from a thread + because it will load certificates from disk and do other blocking I/O. + """ if verified: return ssl.create_default_context() - else: - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - try: - sslcontext.options |= ssl.OP_NO_COMPRESSION - except AttributeError as attr_err: - warnings.warn( - "{!s}: The Python interpreter is compiled " - "against OpenSSL < 1.0.0. Ref: " - "https://docs.python.org/3/library/ssl.html" - "#ssl.OP_NO_COMPRESSION".format(attr_err), - ) - sslcontext.set_default_verify_paths() - return sslcontext - - def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + async def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: """Logic to get the correct SSL context 0. if req.ssl is false, return None @@ -1005,25 +1008,46 @@ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: 3. if verify_ssl is False in req, generate a SSL context that won't verify """ - if req.is_ssl(): - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - sslcontext = req.ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - sslcontext = self._ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - return self._make_ssl_context(True) - else: + if not req.is_ssl(): return None + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return await self._make_or_get_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return await self._make_or_get_ssl_context(False) + return await self._make_or_get_ssl_context(True) + + async def _make_or_get_ssl_context(self, verified: bool) -> SSLContext: + """Create or get cached SSL context.""" + try: + return await self._made_ssl_context[verified] + except KeyError: + loop = self._loop + future = loop.create_future() + self._made_ssl_context[verified] = future + try: + result = await loop.run_in_executor( + None, self._make_ssl_context, verified + ) + # BaseException is used since we might get CancelledError + except BaseException as ex: + del self._made_ssl_context[verified] + set_exception(future, ex) + raise + else: + set_result(future, result) + return result + def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: ret = req.ssl if isinstance(ret, Fingerprint): @@ -1180,7 +1204,7 @@ async def _start_tls_connection( # `req.is_ssl()` evaluates to `False` which is never gonna happen # in this code path. Of course, it's rather fragile # maintainability-wise but this is to be solved separately. - sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + sslcontext = cast(ssl.SSLContext, await self._get_ssl_context(req)) try: async with ceil_timeout( @@ -1258,7 +1282,7 @@ async def _create_direct_connection( *, client_error: Type[Exception] = ClientConnectorError, ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = self._get_ssl_context(req) + sslcontext = await self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) host = req.url.raw_host diff --git a/tests/test_connector.py b/tests/test_connector.py index d146fb4ee51..0d6ca18ef53 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1540,23 +1540,23 @@ async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn.clear_dns_cache("localhost") -async def test_dont_recreate_ssl_context(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - ctx = conn._make_ssl_context(True) - assert ctx is conn._make_ssl_context(True) +async def test_dont_recreate_ssl_context() -> None: + conn = aiohttp.TCPConnector() + ctx = await conn._make_or_get_ssl_context(True) + assert ctx is await conn._make_or_get_ssl_context(True) -async def test_dont_recreate_ssl_context2(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - ctx = conn._make_ssl_context(False) - assert ctx is conn._make_ssl_context(False) +async def test_dont_recreate_ssl_context2() -> None: + conn = aiohttp.TCPConnector() + ctx = await conn._make_or_get_ssl_context(False) + assert ctx is await conn._make_or_get_ssl_context(False) -async def test___get_ssl_context1(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test___get_ssl_context1() -> None: + conn = aiohttp.TCPConnector() req = mock.Mock() req.is_ssl.return_value = False - assert conn._get_ssl_context(req) is None + assert await conn._get_ssl_context(req) is None async def test___get_ssl_context2(loop) -> None: @@ -1565,7 +1565,7 @@ async def test___get_ssl_context2(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = ctx - assert conn._get_ssl_context(req) is ctx + assert await conn._get_ssl_context(req) is ctx async def test___get_ssl_context3(loop) -> None: @@ -1574,7 +1574,7 @@ async def test___get_ssl_context3(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert conn._get_ssl_context(req) is ctx + assert await conn._get_ssl_context(req) is ctx async def test___get_ssl_context4(loop) -> None: @@ -1583,7 +1583,9 @@ async def test___get_ssl_context4(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = False - assert conn._get_ssl_context(req) is conn._make_ssl_context(False) + assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( + False + ) async def test___get_ssl_context5(loop) -> None: @@ -1592,15 +1594,55 @@ async def test___get_ssl_context5(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) - assert conn._get_ssl_context(req) is conn._make_ssl_context(False) + assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( + False + ) -async def test___get_ssl_context6(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) +async def test___get_ssl_context6() -> None: + conn = aiohttp.TCPConnector() + req = mock.Mock() + req.is_ssl.return_value = True + req.ssl = True + assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context(True) + + +async def test_ssl_context_once() -> None: + """Test the ssl context is created only once and shared between connectors.""" + conn1 = aiohttp.TCPConnector() + conn2 = aiohttp.TCPConnector() + conn3 = aiohttp.TCPConnector() + req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert conn._get_ssl_context(req) is conn._make_ssl_context(True) + assert await conn1._get_ssl_context(req) is await conn1._make_or_get_ssl_context( + True + ) + assert await conn2._get_ssl_context(req) is await conn1._make_or_get_ssl_context( + True + ) + assert await conn3._get_ssl_context(req) is await conn1._make_or_get_ssl_context( + True + ) + assert conn1._made_ssl_context is conn2._made_ssl_context is conn3._made_ssl_context + assert True in conn1._made_ssl_context + + +@pytest.mark.parametrize("exception", [OSError, ssl.SSLError, asyncio.CancelledError]) +async def test_ssl_context_creation_raises(exception: BaseException) -> None: + """Test that we try again if SSLContext creation fails the first time.""" + conn = aiohttp.TCPConnector() + conn._made_ssl_context.clear() + + with mock.patch.object( + conn, "_make_ssl_context", side_effect=exception + ), pytest.raises( # type: ignore[call-overload] + exception + ): + await conn._make_or_get_ssl_context(True) + + assert isinstance(await conn._make_or_get_ssl_context(True), ssl.SSLContext) async def test_close_twice(loop) -> None: diff --git a/tests/test_proxy.py b/tests/test_proxy.py index f335e42c254..c5e98deb8a5 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -817,7 +817,7 @@ async def make_conn(): self.loop.start_tls.assert_called_with( mock.ANY, mock.ANY, - connector._make_ssl_context(True), + self.loop.run_until_complete(connector._make_or_get_ssl_context(True)), server_hostname="www.python.org", ssl_handshake_timeout=mock.ANY, ) From 73d17d40b38ed71dd2066315313ffa53025912dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 10 Aug 2024 11:31:17 -0500 Subject: [PATCH 0311/1511] [PR #8676/2915102 backport][3.10] Fix type ignore in SSLContext creation connector test (#8677) --- tests/test_connector.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_connector.py b/tests/test_connector.py index 0d6ca18ef53..8dd7a294b30 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,7 +10,7 @@ import uuid from collections import deque from contextlib import closing -from typing import Any, List, Optional +from typing import Any, List, Optional, Type from unittest import mock import pytest @@ -1630,16 +1630,14 @@ async def test_ssl_context_once() -> None: @pytest.mark.parametrize("exception", [OSError, ssl.SSLError, asyncio.CancelledError]) -async def test_ssl_context_creation_raises(exception: BaseException) -> None: +async def test_ssl_context_creation_raises(exception: Type[BaseException]) -> None: """Test that we try again if SSLContext creation fails the first time.""" conn = aiohttp.TCPConnector() conn._made_ssl_context.clear() with mock.patch.object( conn, "_make_ssl_context", side_effect=exception - ), pytest.raises( # type: ignore[call-overload] - exception - ): + ), pytest.raises(exception): await conn._make_or_get_ssl_context(True) assert isinstance(await conn._make_or_get_ssl_context(True), ssl.SSLContext) From 8977bae3b78458e494be6a06514ce4c0a8a66f0b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 10 Aug 2024 11:36:52 -0500 Subject: [PATCH 0312/1511] [PR #8676/2915102 backport][3.11] Fix type ignore in SSLContext creation connector test (#8678) --- tests/test_connector.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_connector.py b/tests/test_connector.py index 0d6ca18ef53..8dd7a294b30 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,7 +10,7 @@ import uuid from collections import deque from contextlib import closing -from typing import Any, List, Optional +from typing import Any, List, Optional, Type from unittest import mock import pytest @@ -1630,16 +1630,14 @@ async def test_ssl_context_once() -> None: @pytest.mark.parametrize("exception", [OSError, ssl.SSLError, asyncio.CancelledError]) -async def test_ssl_context_creation_raises(exception: BaseException) -> None: +async def test_ssl_context_creation_raises(exception: Type[BaseException]) -> None: """Test that we try again if SSLContext creation fails the first time.""" conn = aiohttp.TCPConnector() conn._made_ssl_context.clear() with mock.patch.object( conn, "_make_ssl_context", side_effect=exception - ), pytest.raises( # type: ignore[call-overload] - exception - ): + ), pytest.raises(exception): await conn._make_or_get_ssl_context(True) assert isinstance(await conn._make_or_get_ssl_context(True), ssl.SSLContext) From ef20502821a301df5d376d2d93191a13b7f5e895 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 10 Aug 2024 12:08:16 -0500 Subject: [PATCH 0313/1511] Release 3.10.3 (#8675) --- CHANGES.rst | 68 +++++++++++++++++++++++++++++++++++++++++ CHANGES/8653.bugfix.rst | 1 - CHANGES/8660.misc.rst | 3 -- CHANGES/8661.misc.rst | 1 - CHANGES/8662.misc.rst | 3 -- CHANGES/8667.misc.rst | 1 - CHANGES/8672.bugfix.rst | 3 -- aiohttp/__init__.py | 2 +- 8 files changed, 69 insertions(+), 13 deletions(-) delete mode 100644 CHANGES/8653.bugfix.rst delete mode 100644 CHANGES/8660.misc.rst delete mode 100644 CHANGES/8661.misc.rst delete mode 100644 CHANGES/8662.misc.rst delete mode 100644 CHANGES/8667.misc.rst delete mode 100644 CHANGES/8672.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 0150c95494c..43ca69235e3 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,74 @@ .. towncrier release notes start +3.10.3 (2024-08-10) +======================== + +Bug fixes +--------- + +- Fixed multipart reading when stream buffer splits the boundary over several read() calls -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8653`. + + + +- Fixed :py:class:`aiohttp.TCPConnector` doing blocking I/O in the event loop to create the ``SSLContext`` -- by :user:`bdraco`. + + The blocking I/O would only happen once per verify mode. However, it could cause the event loop to block for a long time if the ``SSLContext`` creation is slow, which is more likely during startup when the disk cache is not yet present. + + + *Related issues and pull requests on GitHub:* + :issue:`8672`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` when there is no timeout. -- by :user:`bdraco`. + + The timeout context manager is now avoided when there is no timeout as it accounted for up to 50% of the time spent in the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` methods. + + + *Related issues and pull requests on GitHub:* + :issue:`8660`. + + + +- Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8661`. + + + +- Improved performance of HTTP keep-alive checks -- by :user:`bdraco`. + + Previously, when processing a request for a keep-alive connection, the keep-alive check would happen every second; the check is now rescheduled if it fires too early instead. + + + *Related issues and pull requests on GitHub:* + :issue:`8662`. + + + +- Improved performance of generating random WebSocket mask -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8667`. + + + + +---- + + 3.10.2 (2024-08-08) =================== diff --git a/CHANGES/8653.bugfix.rst b/CHANGES/8653.bugfix.rst deleted file mode 100644 index 5c4d66c181f..00000000000 --- a/CHANGES/8653.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed multipart reading when stream buffer splits the boundary over several read() calls -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8660.misc.rst b/CHANGES/8660.misc.rst deleted file mode 100644 index 8710063329e..00000000000 --- a/CHANGES/8660.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved performance of :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` when there is no timeout. -- by :user:`bdraco`. - -The timeout context manager is now avoided when there is no timeout as it accounted for up to 50% of the time spent in the :py:meth:`~aiohttp.ClientWebSocketResponse.receive` and :py:meth:`~aiohttp.web.WebSocketResponse.receive` methods. diff --git a/CHANGES/8661.misc.rst b/CHANGES/8661.misc.rst deleted file mode 100644 index c0a6fdadb37..00000000000 --- a/CHANGES/8661.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. diff --git a/CHANGES/8662.misc.rst b/CHANGES/8662.misc.rst deleted file mode 100644 index efe30a60cb2..00000000000 --- a/CHANGES/8662.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved performance of HTTP keep-alive checks -- by :user:`bdraco`. - -Previously, when processing a request for a keep-alive connection, the keep-alive check would happen every second; the check is now rescheduled if it fires too early instead. diff --git a/CHANGES/8667.misc.rst b/CHANGES/8667.misc.rst deleted file mode 100644 index 1c43b6e069a..00000000000 --- a/CHANGES/8667.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of generating random WebSocket mask -- by :user:`bdraco`. diff --git a/CHANGES/8672.bugfix.rst b/CHANGES/8672.bugfix.rst deleted file mode 100644 index a57ed16d5d2..00000000000 --- a/CHANGES/8672.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed :py:class:`aiohttp.TCPConnector` doing blocking I/O in the event loop to create the ``SSLContext`` -- by :user:`bdraco`. - -The blocking I/O would only happen once per verify mode. However, it could cause the event loop to block for a long time if the ``SSLContext`` creation is slow, which is more likely during startup when the disk cache is not yet present. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f050229f008..de896a56398 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.2" +__version__ = "3.10.3" from typing import TYPE_CHECKING, Tuple From 8c686e6a43fc756d6766331c98edda14c3f79369 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 10 Aug 2024 13:28:23 -0500 Subject: [PATCH 0314/1511] Bump version to 3.10.4.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index de896a56398..bbda1fe6b57 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.3" +__version__ = "3.10.4.dev0" from typing import TYPE_CHECKING, Tuple From 1500a53bc98216d72b6aabd4513a1d402ada0190 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Aug 2024 10:37:41 +0000 Subject: [PATCH 0315/1511] Bump gunicorn from 22.0.0 to 23.0.0 (#8683) Bumps [gunicorn](https://github.com/benoitc/gunicorn) from 22.0.0 to 23.0.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/benoitc/gunicorn/releases">gunicorn's releases</a>.</em></p> <blockquote> <h2>23.0.0</h2> <p>Gunicorn 23.0.0 has been released. This version improve HTTP 1.1. support and which improve safety</p> <p>You're invited to upgrade asap your own installation.</p> <h1>23.0.0 - 2024-08-10</h1> <ul> <li>minor docs fixes (:pr:<code>3217</code>, :pr:<code>3089</code>, :pr:<code>3167</code>)</li> <li>worker_class parameter accepts a class (:pr:<code>3079</code>)</li> <li>fix deadlock if request terminated during chunked parsing (:pr:<code>2688</code>)</li> <li>permit receiving Transfer-Encodings: compress, deflate, gzip (:pr:<code>3261</code>)</li> <li>permit Transfer-Encoding headers specifying multiple encodings. note: no parameters, still (:pr:<code>3261</code>)</li> <li>sdist generation now explicitly excludes sphinx build folder (:pr:<code>3257</code>)</li> <li>decode bytes-typed status (as can be passed by gevent) as utf-8 instead of raising <code>TypeError</code> (:pr:<code>2336</code>)</li> <li>raise correct Exception when encounting invalid chunked requests (:pr:<code>3258</code>)</li> <li>the SCRIPT_NAME and PATH_INFO headers, when received from allowed forwarders, are no longer restricted for containing an underscore (:pr:<code>3192</code>)</li> <li>include IPv6 loopback address <code>[::1]</code> in default for :ref:<code>forwarded-allow-ips</code> and :ref:<code>proxy-allow-ips</code> (:pr:<code>3192</code>)</li> </ul> <p>** NOTE **</p> <ul> <li>The SCRIPT_NAME change mitigates a regression that appeared first in the 22.0.0 release</li> <li>Review your :ref:<code>forwarded-allow-ips</code> setting if you are still not seeing the SCRIPT_NAME transmitted</li> <li>Review your :ref:<code>forwarder-headers</code> setting if you are missing headers after upgrading from a version prior to 22.0.0</li> </ul> <p>** Breaking changes **</p> <ul> <li>refuse requests where the uri field is empty (:pr:<code>3255</code>)</li> <li>refuse requests with invalid CR/LR/NUL in heade field values (:pr:<code>3253</code>)</li> <li>remove temporary <code>--tolerate-dangerous-framing</code> switch from 22.0 (:pr:<code>3260</code>)</li> <li>If any of the breaking changes affect you, be aware that now refused requests can post a security problem, especially so in setups involving request pipe-lining and/or proxies.</li> </ul> <p>Fix CVE-2024-1135</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/benoitc/gunicorn/commit/411986d6191114dd1d1bbb9c72c948dbf0ef0425"><code>411986d</code></a> fix doc</li> <li><a href="https://github.com/benoitc/gunicorn/commit/334392e7795f2017e83f7054d372422512d6f4b6"><code>334392e</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/2559">#2559</a> from laggardkernel/bugfix/reexec-env</li> <li><a href="https://github.com/benoitc/gunicorn/commit/e75c3533e32f91a9dceba9e8e1341fea5540ba81"><code>e75c353</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/3189">#3189</a> from pajod/patch-py36</li> <li><a href="https://github.com/benoitc/gunicorn/commit/9357b28dd867950e33ca3864207cb35a1eb8ba6f"><code>9357b28</code></a> keep document user in access_log_format setting</li> <li><a href="https://github.com/benoitc/gunicorn/commit/79fdef0822cbfe7e16b659b07230af9be098d5fc"><code>79fdef0</code></a> bump to 23.0.0</li> <li><a href="https://github.com/benoitc/gunicorn/commit/3acd9fbfd1159ca3cd80a8052ada89a0bf27f806"><code>3acd9fb</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/2620">#2620</a> from talkerbox/improve-access-log-format-docs</li> <li><a href="https://github.com/benoitc/gunicorn/commit/3f56d76548e4ade034bf5e174737902970285d1f"><code>3f56d76</code></a> Merge pull request <a href="https://redirect.github.com/benoitc/gunicorn/issues/3192">#3192</a> from pajod/patch-allowed-script-name</li> <li><a href="https://github.com/benoitc/gunicorn/commit/256d474a7910bd605f2cc8c082b79c1ae55215a9"><code>256d474</code></a> docs: revert duped directive</li> <li><a href="https://github.com/benoitc/gunicorn/commit/ffa48b581dcaa75f17fd2df263515e4266feeef6"><code>ffa48b5</code></a> test: default change was intentional</li> <li><a href="https://github.com/benoitc/gunicorn/commit/52538ca9070b5e7ead5d0fa731e82a622dc6f3ee"><code>52538ca</code></a> docs: recommend SCRIPT_NAME=/subfolder</li> <li>Additional commits viewable in <a href="https://github.com/benoitc/gunicorn/compare/22.0.0...23.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=gunicorn&package-manager=pip&previous-version=22.0.0&new-version=23.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 797d8fe353f..a3992400794 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -22,7 +22,7 @@ frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -gunicorn==22.0.0 +gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3eefb01e60d..8e01e7dc044 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -86,7 +86,7 @@ funcparserlib==1.0.1 # via blockdiag gidgethub==5.0.1 # via cherry-picker -gunicorn==22.0.0 +gunicorn==23.0.0 # via -r requirements/base.in identify==2.3.5 # via pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 19b6a90251e..2781512a526 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -82,7 +82,7 @@ funcparserlib==1.0.1 # via blockdiag gidgethub==5.3.0 # via cherry-picker -gunicorn==22.0.0 +gunicorn==23.0.0 # via -r requirements/base.in identify==2.5.26 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index d5efe1e2218..fbf0a67e484 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -44,7 +44,7 @@ frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -gunicorn==22.0.0 +gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via From 52581e8af7ceed473659ae3d88795183c71ee37d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 12 Aug 2024 09:40:15 -0500 Subject: [PATCH 0316/1511] [PR #8680/4f41d05 backport][3.11] Fix close race that prevented the close code from reaching the client (#8687) --- CHANGES/8680.bugfix.rst | 1 + aiohttp/web_ws.py | 28 ++++++------ tests/test_web_websocket_functional.py | 60 ++++++++++++++++++++++++++ 3 files changed, 73 insertions(+), 16 deletions(-) create mode 100644 CHANGES/8680.bugfix.rst diff --git a/CHANGES/8680.bugfix.rst b/CHANGES/8680.bugfix.rst new file mode 100644 index 00000000000..2149f12aaaf --- /dev/null +++ b/CHANGES/8680.bugfix.rst @@ -0,0 +1 @@ +Fixed a race closing the server-side WebSocket where the close code would not reach the client. -- by :user:`bdraco`. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index fe8f537dc76..98f26cc48c6 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -406,23 +406,10 @@ async def close( if self._writer is None: raise RuntimeError("Call .prepare() first") - self._cancel_heartbeat() - reader = self._reader - assert reader is not None - - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting and not self._closed: - if not self._close_wait: - assert self._loop is not None - self._close_wait = self._loop.create_future() - reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._close_wait - if self._closed: return False - self._set_closed() + try: await self._writer.close(code, message) writer = self._payload_writer @@ -437,12 +424,21 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True + reader = self._reader + assert reader is not None + # we need to break `receive()` cycle before we can call + # `reader.read()` as `close()` may be called from different task + if self._waiting: + assert self._loop is not None + assert self._close_wait is None + self._close_wait = self._loop.create_future() + reader.feed_data(WS_CLOSING_MESSAGE) + await self._close_wait + if self._closing: self._close_transport() return True - reader = self._reader - assert reader is not None try: async with async_timeout.timeout(self._timeout): msg = await reader.read() diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 15ef33e3648..6540f134da8 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -3,6 +3,7 @@ import asyncio import contextlib import sys +import weakref from typing import Any, Optional import pytest @@ -10,6 +11,7 @@ import aiohttp from aiohttp import web from aiohttp.http import WSCloseCode, WSMsgType +from aiohttp.pytest_plugin import AiohttpClient async def test_websocket_can_prepare(loop, aiohttp_client) -> None: @@ -988,3 +990,61 @@ async def handler(request): await ws.close(code=WSCloseCode.OK, message="exit message") await closed + + +async def test_websocket_shutdown(aiohttp_client: AiohttpClient) -> None: + """Test that the client websocket gets the close message when the server is shutting down.""" + url = "/ws" + app = web.Application() + websockets = web.AppKey("websockets", weakref.WeakSet) + app[websockets] = weakref.WeakSet() + + # need for send signal shutdown server + shutdown_websockets = web.AppKey("shutdown_websockets", weakref.WeakSet) + app[shutdown_websockets] = weakref.WeakSet() + + async def websocket_handler(request: web.Request) -> web.WebSocketResponse: + websocket = web.WebSocketResponse() + await websocket.prepare(request) + request.app[websockets].add(websocket) + request.app[shutdown_websockets].add(websocket) + + try: + async for message in websocket: + await websocket.send_json({"ok": True, "message": message.json()}) + finally: + request.app[websockets].discard(websocket) + + return websocket + + async def on_shutdown(app: web.Application) -> None: + while app[shutdown_websockets]: + websocket = app[shutdown_websockets].pop() + await websocket.close( + code=aiohttp.WSCloseCode.GOING_AWAY, + message="Server shutdown", + ) + + app.router.add_get(url, websocket_handler) + app.on_shutdown.append(on_shutdown) + + client = await aiohttp_client(app) + + websocket = await client.ws_connect(url) + + message = {"message": "hi"} + await websocket.send_json(message) + reply = await websocket.receive_json() + assert reply == {"ok": True, "message": message} + + await app.shutdown() + + assert websocket.closed is False + + reply = await websocket.receive() + + assert reply.type is aiohttp.http.WSMsgType.CLOSE + assert reply.data == aiohttp.WSCloseCode.GOING_AWAY + assert reply.extra == "Server shutdown" + + assert websocket.closed is True From 50efb2a389414cf36012d3f6e86623f8eac3a42e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 12 Aug 2024 09:40:34 -0500 Subject: [PATCH 0317/1511] [PR #8680/4f41d05 backport][3.10] Fix close race that prevented the close code from reaching the client (#8686) --- CHANGES/8680.bugfix.rst | 1 + aiohttp/web_ws.py | 28 ++++++------ tests/test_web_websocket_functional.py | 60 ++++++++++++++++++++++++++ 3 files changed, 73 insertions(+), 16 deletions(-) create mode 100644 CHANGES/8680.bugfix.rst diff --git a/CHANGES/8680.bugfix.rst b/CHANGES/8680.bugfix.rst new file mode 100644 index 00000000000..2149f12aaaf --- /dev/null +++ b/CHANGES/8680.bugfix.rst @@ -0,0 +1 @@ +Fixed a race closing the server-side WebSocket where the close code would not reach the client. -- by :user:`bdraco`. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index fe8f537dc76..98f26cc48c6 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -406,23 +406,10 @@ async def close( if self._writer is None: raise RuntimeError("Call .prepare() first") - self._cancel_heartbeat() - reader = self._reader - assert reader is not None - - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting and not self._closed: - if not self._close_wait: - assert self._loop is not None - self._close_wait = self._loop.create_future() - reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._close_wait - if self._closed: return False - self._set_closed() + try: await self._writer.close(code, message) writer = self._payload_writer @@ -437,12 +424,21 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True + reader = self._reader + assert reader is not None + # we need to break `receive()` cycle before we can call + # `reader.read()` as `close()` may be called from different task + if self._waiting: + assert self._loop is not None + assert self._close_wait is None + self._close_wait = self._loop.create_future() + reader.feed_data(WS_CLOSING_MESSAGE) + await self._close_wait + if self._closing: self._close_transport() return True - reader = self._reader - assert reader is not None try: async with async_timeout.timeout(self._timeout): msg = await reader.read() diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 15ef33e3648..6540f134da8 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -3,6 +3,7 @@ import asyncio import contextlib import sys +import weakref from typing import Any, Optional import pytest @@ -10,6 +11,7 @@ import aiohttp from aiohttp import web from aiohttp.http import WSCloseCode, WSMsgType +from aiohttp.pytest_plugin import AiohttpClient async def test_websocket_can_prepare(loop, aiohttp_client) -> None: @@ -988,3 +990,61 @@ async def handler(request): await ws.close(code=WSCloseCode.OK, message="exit message") await closed + + +async def test_websocket_shutdown(aiohttp_client: AiohttpClient) -> None: + """Test that the client websocket gets the close message when the server is shutting down.""" + url = "/ws" + app = web.Application() + websockets = web.AppKey("websockets", weakref.WeakSet) + app[websockets] = weakref.WeakSet() + + # need for send signal shutdown server + shutdown_websockets = web.AppKey("shutdown_websockets", weakref.WeakSet) + app[shutdown_websockets] = weakref.WeakSet() + + async def websocket_handler(request: web.Request) -> web.WebSocketResponse: + websocket = web.WebSocketResponse() + await websocket.prepare(request) + request.app[websockets].add(websocket) + request.app[shutdown_websockets].add(websocket) + + try: + async for message in websocket: + await websocket.send_json({"ok": True, "message": message.json()}) + finally: + request.app[websockets].discard(websocket) + + return websocket + + async def on_shutdown(app: web.Application) -> None: + while app[shutdown_websockets]: + websocket = app[shutdown_websockets].pop() + await websocket.close( + code=aiohttp.WSCloseCode.GOING_AWAY, + message="Server shutdown", + ) + + app.router.add_get(url, websocket_handler) + app.on_shutdown.append(on_shutdown) + + client = await aiohttp_client(app) + + websocket = await client.ws_connect(url) + + message = {"message": "hi"} + await websocket.send_json(message) + reply = await websocket.receive_json() + assert reply == {"ok": True, "message": message} + + await app.shutdown() + + assert websocket.closed is False + + reply = await websocket.receive() + + assert reply.type is aiohttp.http.WSMsgType.CLOSE + assert reply.data == aiohttp.WSCloseCode.GOING_AWAY + assert reply.extra == "Server shutdown" + + assert websocket.closed is True From 2f7c45ebc0e2adee709815fc26264c7940b732fd Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 12 Aug 2024 22:43:14 +0100 Subject: [PATCH 0318/1511] Fix decoding base64 chunk for BodyPartReader (#3867) (#8688) (cherry picked from commit aeb01ce3bda7497d7cc6c434ac6806aca291e273) --------- Co-authored-by: Yevhenii Hyzyla <hyzyla@gmail.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES/3867.bugfix.rst | 1 + aiohttp/multipart.py | 25 +++++++++++++++++++++++++ tests/test_multipart.py | 11 +++++++++++ 3 files changed, 37 insertions(+) create mode 100644 CHANGES/3867.bugfix.rst diff --git a/CHANGES/3867.bugfix.rst b/CHANGES/3867.bugfix.rst new file mode 100644 index 00000000000..12376bf6ef0 --- /dev/null +++ b/CHANGES/3867.bugfix.rst @@ -0,0 +1 @@ +Fixed decoding base64 chunk in BodyPartReader -- by :user:`hyzyla`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 26780e3060c..da016039011 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -323,6 +323,31 @@ async def read_chunk(self, size: int = chunk_size) -> bytes: else: chunk = await self._read_chunk_from_stream(size) + # For the case of base64 data, we must read a fragment of size with a + # remainder of 0 by dividing by 4 for string without symbols \n or \r + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING) + if encoding and encoding.lower() == "base64": + stripped_chunk = b"".join(chunk.split()) + remainder = len(stripped_chunk) % 4 + + while remainder != 0 and not self.at_eof(): + over_chunk_size = 4 - remainder + over_chunk = b"" + + if self._prev_chunk: + over_chunk = self._prev_chunk[:over_chunk_size] + self._prev_chunk = self._prev_chunk[len(over_chunk) :] + + if len(over_chunk) != over_chunk_size: + over_chunk += await self._content.read(4 - len(over_chunk)) + + if not over_chunk: + self._at_eof = True + + stripped_chunk += b"".join(over_chunk.split()) + chunk += over_chunk + remainder = len(stripped_chunk) % 4 + self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 6fc9fe573ec..bbbc1c666ca 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -378,6 +378,17 @@ async def test_read_with_content_transfer_encoding_quoted_printable(self) -> Non ) assert result == expected + async def test_decode_with_content_transfer_encoding_base64(self) -> None: + with Stream(b"VG\r\r\nltZSB0byBSZ\r\nWxheCE=\r\n--:--") as stream: + obj = aiohttp.BodyPartReader( + BOUNDARY, {CONTENT_TRANSFER_ENCODING: "base64"}, stream + ) + result = b"" + while not obj.at_eof(): + chunk = await obj.read_chunk(size=6) + result += obj.decode(chunk) + assert b"Time to Relax!" == result + @pytest.mark.parametrize("encoding", ("binary", "8bit", "7bit")) async def test_read_with_content_transfer_encoding_binary( self, encoding: str From 91c9c6fbf60edfbb43c119a15be47e4f43166865 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 12 Aug 2024 22:55:44 +0100 Subject: [PATCH 0319/1511] Fix decoding base64 chunk for BodyPartReader (#3867) (#8689) (cherry picked from commit aeb01ce3bda7497d7cc6c434ac6806aca291e273) --------- Co-authored-by: Yevhenii Hyzyla <hyzyla@gmail.com> --- CHANGES/3867.bugfix.rst | 1 + aiohttp/multipart.py | 25 +++++++++++++++++++++++++ tests/test_multipart.py | 11 +++++++++++ 3 files changed, 37 insertions(+) create mode 100644 CHANGES/3867.bugfix.rst diff --git a/CHANGES/3867.bugfix.rst b/CHANGES/3867.bugfix.rst new file mode 100644 index 00000000000..12376bf6ef0 --- /dev/null +++ b/CHANGES/3867.bugfix.rst @@ -0,0 +1 @@ +Fixed decoding base64 chunk in BodyPartReader -- by :user:`hyzyla`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 26780e3060c..da016039011 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -323,6 +323,31 @@ async def read_chunk(self, size: int = chunk_size) -> bytes: else: chunk = await self._read_chunk_from_stream(size) + # For the case of base64 data, we must read a fragment of size with a + # remainder of 0 by dividing by 4 for string without symbols \n or \r + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING) + if encoding and encoding.lower() == "base64": + stripped_chunk = b"".join(chunk.split()) + remainder = len(stripped_chunk) % 4 + + while remainder != 0 and not self.at_eof(): + over_chunk_size = 4 - remainder + over_chunk = b"" + + if self._prev_chunk: + over_chunk = self._prev_chunk[:over_chunk_size] + self._prev_chunk = self._prev_chunk[len(over_chunk) :] + + if len(over_chunk) != over_chunk_size: + over_chunk += await self._content.read(4 - len(over_chunk)) + + if not over_chunk: + self._at_eof = True + + stripped_chunk += b"".join(over_chunk.split()) + chunk += over_chunk + remainder = len(stripped_chunk) % 4 + self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 6fc9fe573ec..bbbc1c666ca 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -378,6 +378,17 @@ async def test_read_with_content_transfer_encoding_quoted_printable(self) -> Non ) assert result == expected + async def test_decode_with_content_transfer_encoding_base64(self) -> None: + with Stream(b"VG\r\r\nltZSB0byBSZ\r\nWxheCE=\r\n--:--") as stream: + obj = aiohttp.BodyPartReader( + BOUNDARY, {CONTENT_TRANSFER_ENCODING: "base64"}, stream + ) + result = b"" + while not obj.at_eof(): + chunk = await obj.read_chunk(size=6) + result += obj.decode(chunk) + assert b"Time to Relax!" == result + @pytest.mark.parametrize("encoding", ("binary", "8bit", "7bit")) async def test_read_with_content_transfer_encoding_binary( self, encoding: str From d4f124abfc21250b278916df0172ae5f1fb50d4b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Aug 2024 11:19:00 +0000 Subject: [PATCH 0320/1511] Bump proxy-py from 2.4.5 to 2.4.7 (#8692) Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.5 to 2.4.7. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/aa596f9d0cc97115db3c7f352098fcd3f07acc3e"><code>aa596f9</code></a> Option to not verify upstream ssl (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1459">#1459</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/0bfd7d7fdb34b55ad9082ac9770ec2c31bdb8cbc"><code>0bfd7d7</code></a> Fix client for HTTPS endpoints with Python 3.12 (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1454">#1454</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/74c42f6e57dc6d334aa3a0f24a9bf0e5a14c2fa1"><code>74c42f6</code></a> <code>setup.cfg/extras_require</code> configuration (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1452">#1452</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/6602be3da81308ad2fa660f1c6f0287f63c396c7"><code>6602be3</code></a> <code>Prometheus Metrics</code> documentation</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/091ba361ef3506c9ed24ae56f350cf384d9b0cc8"><code>091ba36</code></a> Prometheus Metrics (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1447">#1447</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/71f3c658e8a7d99de97f80b0c84fcbbcf274f2b8"><code>71f3c65</code></a> Use <code>python:3.12-alpine</code> in Docker image</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/579838eea497a2cbfab36e8813f3c5a7c916beda"><code>579838e</code></a> Trigger build</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/50046d3f68d75896a618ba4bb18f7267df00aae9"><code>50046d3</code></a> TLS intercept self-signed servers using <code>--insecure-tls-interception</code> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1446">#1446</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/39854e1d799e198c393d3721e8b75602f72554b0"><code>39854e1</code></a> Use Python 3.12 as default Docker base image (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1445">#1445</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/1e4e87d5154b02d358227ff33c4e2ed5a3791ec1"><code>1e4e87d</code></a> Support for Python 3.12 (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1444">#1444</a>)</li> <li>Additional commits viewable in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.5...v2.4.7">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.5&new-version=2.4.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8e01e7dc044..dd41bc8489e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -144,7 +144,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.5 +proxy-py==2.4.7 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index 2781512a526..faf675fd5d0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -139,7 +139,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.5 +proxy-py==2.4.7 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index fbf0a67e484..f0998821c08 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ packaging==23.1 # pytest pluggy==1.5.0 # via pytest -proxy-py==2.4.5 +proxy-py==2.4.7 # via -r requirements/test.in pycares==4.3.0 # via aiodns From fb1832560bcec1714b43c23cd8dc8a4584ec7f4a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:42:48 +0100 Subject: [PATCH 0321/1511] [PR #8694/d83804db backport][3.11] Add cleanup_ctx example for sharing resources across subapps (#8696) **This is a backport of PR #8694 as merged into master (d83804db6a77221c996dfa3710ba6cf6b58d08b9).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/faq.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/faq.rst b/docs/faq.rst index 2de70f97d7b..30803da3576 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -279,7 +279,18 @@ A subapplication is an isolated unit by design. If you need to share a database object, do it explicitly:: subapp[db_key] = mainapp[db_key] - mainapp.add_subapp('/prefix', subapp) + mainapp.add_subapp("/prefix", subapp) + +This can also be done from a :ref:`cleanup context<aiohttp-web-cleanup-ctx>`:: + + async def db_context(app: web.Application) -> AsyncIterator[None]: + async with create_db() as db: + mainapp[db_key] = mainapp[subapp_key][db_key] = db + yield + + mainapp[subapp_key] = subapp + mainapp.add_subapp("/prefix", subapp) + mainapp.cleanup_ctx.append(db_context) How do I perform operations in a request handler after sending the response? From 7d593ec21f27ca7831a3549eb668905440893631 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:48:58 +0100 Subject: [PATCH 0322/1511] [PR #8694/d83804db backport][3.10] Add cleanup_ctx example for sharing resources across subapps (#8695) **This is a backport of PR #8694 as merged into master (d83804db6a77221c996dfa3710ba6cf6b58d08b9).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/faq.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/faq.rst b/docs/faq.rst index 2de70f97d7b..30803da3576 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -279,7 +279,18 @@ A subapplication is an isolated unit by design. If you need to share a database object, do it explicitly:: subapp[db_key] = mainapp[db_key] - mainapp.add_subapp('/prefix', subapp) + mainapp.add_subapp("/prefix", subapp) + +This can also be done from a :ref:`cleanup context<aiohttp-web-cleanup-ctx>`:: + + async def db_context(app: web.Application) -> AsyncIterator[None]: + async with create_db() as db: + mainapp[db_key] = mainapp[subapp_key][db_key] = db + yield + + mainapp[subapp_key] = subapp + mainapp.add_subapp("/prefix", subapp) + mainapp.cleanup_ctx.append(db_context) How do I perform operations in a request handler after sending the response? From 2cca51ceaffa722b5e65f40da3af026f906616f5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 15 Aug 2024 19:08:54 +0100 Subject: [PATCH 0323/1511] [PR #8700/92e14f86 backport][3.11] Add aiohttp-apischema to supported libraries (#8703) **This is a backport of PR #8700 as merged into master (92e14f86b59e683b88a7ffe0e1fab75b55e77cd7).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8700.doc.rst | 1 + docs/third_party.rst | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 CHANGES/8700.doc.rst diff --git a/CHANGES/8700.doc.rst b/CHANGES/8700.doc.rst new file mode 100644 index 00000000000..71157969a75 --- /dev/null +++ b/CHANGES/8700.doc.rst @@ -0,0 +1 @@ +Added ``aiohttp-apischema`` to supported libraries -- by :user:`Dreamsorcerer`. diff --git a/docs/third_party.rst b/docs/third_party.rst index 797f9f011ec..e8095c7f09d 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -30,6 +30,10 @@ and located on https://github.com/aio-libs aiohttp extensions ^^^^^^^^^^^^^^^^^^ +- `aiohttp-apischema <https://github.com/aio-libs/aiohttp-apischema>`_ + provides automatic API schema generation and validation of user input + for :mod:`aiohttp.web`. + - `aiohttp-session <https://github.com/aio-libs/aiohttp-session>`_ provides sessions for :mod:`aiohttp.web`. From 6a09242294dd7452cff7d71d18adbbd3f5e85503 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 15 Aug 2024 19:09:07 +0100 Subject: [PATCH 0324/1511] [PR #8700/92e14f86 backport][3.10] Add aiohttp-apischema to supported libraries (#8702) **This is a backport of PR #8700 as merged into master (92e14f86b59e683b88a7ffe0e1fab75b55e77cd7).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8700.doc.rst | 1 + docs/third_party.rst | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 CHANGES/8700.doc.rst diff --git a/CHANGES/8700.doc.rst b/CHANGES/8700.doc.rst new file mode 100644 index 00000000000..71157969a75 --- /dev/null +++ b/CHANGES/8700.doc.rst @@ -0,0 +1 @@ +Added ``aiohttp-apischema`` to supported libraries -- by :user:`Dreamsorcerer`. diff --git a/docs/third_party.rst b/docs/third_party.rst index 797f9f011ec..e8095c7f09d 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -30,6 +30,10 @@ and located on https://github.com/aio-libs aiohttp extensions ^^^^^^^^^^^^^^^^^^ +- `aiohttp-apischema <https://github.com/aio-libs/aiohttp-apischema>`_ + provides automatic API schema generation and validation of user input + for :mod:`aiohttp.web`. + - `aiohttp-session <https://github.com/aio-libs/aiohttp-session>`_ provides sessions for :mod:`aiohttp.web`. From 58e859763017daaac67c66c02b0b782e23e09798 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 15 Aug 2024 22:01:19 +0000 Subject: [PATCH 0325/1511] [PR #8693/7ca244ef backport][3.10] Fix missing logic in quickstart example (#8705) **This is a backport of PR #8693 as merged into master (7ca244efb622773be36f6c8ff9b82200cbc665ff).** Co-authored-by: NewUserHa <32261870+NewUserHa@users.noreply.github.com> --- docs/web_quickstart.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/web_quickstart.rst b/docs/web_quickstart.rst index c36a995dc09..fcd2b686d06 100644 --- a/docs/web_quickstart.rst +++ b/docs/web_quickstart.rst @@ -441,8 +441,11 @@ third-party library, :mod:`aiohttp_session`, that adds *session* support:: async def handler(request): session = await get_session(request) - last_visit = session['last_visit'] if 'last_visit' in session else None - text = 'Last visited: {}'.format(last_visit) + + last_visit = session.get("last_visit") + session["last_visit"] = time.time() + text = "Last visited: {}".format(last_visit) + return web.Response(text=text) async def make_app(): From 4f27a274594a4238301df23e4026f431d6fcf4a3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 15 Aug 2024 22:06:52 +0000 Subject: [PATCH 0326/1511] [PR #8693/7ca244ef backport][3.11] Fix missing logic in quickstart example (#8706) **This is a backport of PR #8693 as merged into master (7ca244efb622773be36f6c8ff9b82200cbc665ff).** Co-authored-by: NewUserHa <32261870+NewUserHa@users.noreply.github.com> --- docs/web_quickstart.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/web_quickstart.rst b/docs/web_quickstart.rst index c36a995dc09..fcd2b686d06 100644 --- a/docs/web_quickstart.rst +++ b/docs/web_quickstart.rst @@ -441,8 +441,11 @@ third-party library, :mod:`aiohttp_session`, that adds *session* support:: async def handler(request): session = await get_session(request) - last_visit = session['last_visit'] if 'last_visit' in session else None - text = 'Last visited: {}'.format(last_visit) + + last_visit = session.get("last_visit") + session["last_visit"] = time.time() + text = "Last visited: {}".format(last_visit) + return web.Response(text=text) async def make_app(): From 53a00c01e505bb03d8222feed2c80b957c8cdd1d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 11:20:13 +0000 Subject: [PATCH 0327/1511] Bump uvloop from 0.19.0 to 0.20.0 (#8712) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [uvloop](https://github.com/MagicStack/uvloop) from 0.19.0 to 0.20.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/MagicStack/uvloop/releases">uvloop's releases</a>.</em></p> <blockquote> <h2>v0.20.0</h2> <h1>Changes</h1> <ul> <li>Upgrade libuv to v1.48.0 (<a href="https://redirect.github.com/MagicStack/uvloop/issues/600">#600</a>) (by <a href="https://github.com/niklasr22"><code>@​niklasr22</code></a> <a href="https://github.com/fantix"><code>@​fantix</code></a> in 77778525 for <a href="https://redirect.github.com/MagicStack/uvloop/issues/596">#596</a> <a href="https://redirect.github.com/MagicStack/uvloop/issues/615">#615</a>)</li> </ul> <h1>Fixes</h1> <ul> <li> <p>Fix test_create_server_4 with Python 3.12.5 (<a href="https://redirect.github.com/MagicStack/uvloop/issues/614">#614</a>) (by <a href="https://github.com/shadchin"><code>@​shadchin</code></a> in 62f92393)</p> </li> <li> <p>Use len(os.sched_getaffinity(0)) instead of os.cpu_count() (<a href="https://redirect.github.com/MagicStack/uvloop/issues/591">#591</a>) (by <a href="https://github.com/avkarenow"><code>@​avkarenow</code></a> in c8531c24 for <a href="https://redirect.github.com/MagicStack/uvloop/issues/591">#591</a>)</p> </li> <li> <p>Inline _Py_RestoreSignals() from CPython (<a href="https://redirect.github.com/MagicStack/uvloop/issues/604">#604</a>) (by <a href="https://github.com/befeleme"><code>@​befeleme</code></a> in 8511ba1f for <a href="https://redirect.github.com/MagicStack/uvloop/issues/603">#603</a>)</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/MagicStack/uvloop/commit/2d35f106d3ccc51d648b4ab90c964c0748b5873f"><code>2d35f10</code></a> uvloop 0.20.0</li> <li><a href="https://github.com/MagicStack/uvloop/commit/8511ba1fc2131ee0ceaecfd5ce57973012e94ef9"><code>8511ba1</code></a> Inline _Py_RestoreSignals() from CPython (<a href="https://redirect.github.com/MagicStack/uvloop/issues/604">#604</a>)</li> <li><a href="https://github.com/MagicStack/uvloop/commit/deb2cf9d653305b55d0d97a85b4067e33db0f1c0"><code>deb2cf9</code></a> Fix Python version in README.rst (<a href="https://redirect.github.com/MagicStack/uvloop/issues/599">#599</a>)</li> <li><a href="https://github.com/MagicStack/uvloop/commit/c8531c2431a45beeb3c86ec04dcd9a778dd67066"><code>c8531c2</code></a> Use len(os.sched_getaffinity(0)) instead of os.cpu_count() (<a href="https://redirect.github.com/MagicStack/uvloop/issues/591">#591</a>)</li> <li><a href="https://github.com/MagicStack/uvloop/commit/777785257cc6740e38f03b059bf43b944135a782"><code>7777852</code></a> Upgrade libuv to v1.48.0 (<a href="https://redirect.github.com/MagicStack/uvloop/issues/600">#600</a>)</li> <li><a href="https://github.com/MagicStack/uvloop/commit/62f92393b057fc94cf1fc259cf1e6ca8a8c4a1e7"><code>62f9239</code></a> Fix test_create_server_4 with Python 3.12.5 (<a href="https://redirect.github.com/MagicStack/uvloop/issues/614">#614</a>)</li> <li>See full diff in <a href="https://github.com/MagicStack/uvloop/compare/v0.19.0...v0.20.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=uvloop&package-manager=pip&previous-version=0.19.0&new-version=0.20.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 20 ++++++++++++++++---- requirements/test.txt | 2 +- 5 files changed, 20 insertions(+), 8 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index a3992400794..0e388a28d47 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.9.4 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index dd41bc8489e..95a421e46d3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -268,7 +268,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==1.26.7 # via requests -uvloop==0.19.0 ; platform_system != "Windows" +uvloop==0.20.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index faf675fd5d0..c8bcc46e282 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -255,7 +255,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.0.4 # via requests -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 97809fe3dde..ce24882252f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -15,7 +15,9 @@ async-timeout==4.0.3 certifi==2024.2.2 # via requests cffi==1.16.0 - # via pycares + # via + # cryptography + # pycares cfgv==3.3.1 # via pre-commit charset-normalizer==3.3.2 @@ -24,6 +26,8 @@ click==8.1.6 # via # slotscheck # typer +cryptography==43.0.0 + # via trustme distlib==0.3.7 # via virtualenv exceptiongroup==1.1.2 @@ -35,7 +39,9 @@ freezegun==1.5.1 identify==2.5.26 # via pre-commit idna==3.7 - # via requests + # via + # requests + # trustme iniconfig==2.0.0 # via pytest markdown-it-py==3.0.0 @@ -67,9 +73,13 @@ pydantic-core==2.18.2 pygments==2.17.2 # via rich pytest==8.3.2 - # via -r requirements/lint.in + # via + # -r requirements/lint.in + # pytest-mock pytest-mock==3.14.0 # via -r requirements/lint.in +python-dateutil==2.9.0.post0 + # via freezegun python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 @@ -80,6 +90,8 @@ rich==13.7.1 # via typer shellingham==1.5.4 # via typer +six==1.16.0 + # via python-dateutil slotscheck==0.19.0 # via -r requirements/lint.in tomli==2.0.1 @@ -105,7 +117,7 @@ typing-extensions==4.11.0 # typer urllib3==2.2.1 # via requests -uvloop==0.19.0 ; platform_system != "Windows" +uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in virtualenv==20.24.2 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index f0998821c08..502017d7149 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -121,7 +121,7 @@ typing-extensions==4.11.0 # typer urllib3==2.0.4 # via requests -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in From 1f69dfe92d115059155b2b524a04d2d784a49595 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 11:27:54 +0000 Subject: [PATCH 0328/1511] Bump aiohappyeyeballs from 2.3.5 to 2.3.6 (#8713) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.3.5 to 2.3.6. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.3.6 (2024-08-16)</h1> <h2>Fix</h2> <ul> <li>fix: adjust license to Python-2.0.1 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/82">#82</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/30a2dc57c49d1000ebdafa8c81ecf4f79e35c9f3"><code>30a2dc5</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.3.6 (2024-08-16)</h2> <h3>Fix</h3> <ul> <li>Adjust license to python-2.0.1 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/82">#82</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/30a2dc57c49d1000ebdafa8c81ecf4f79e35c9f3"><code>30a2dc5</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/97db870ea3e74060e5ef39692ff86c0fda565564"><code>97db870</code></a> 2.3.6</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/30a2dc57c49d1000ebdafa8c81ecf4f79e35c9f3"><code>30a2dc5</code></a> fix: adjust license to Python-2.0.1 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/82">#82</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/42d34857f4b7e92d9681ef567f20832005cafb38"><code>42d3485</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/80">#80</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.3.5...v2.3.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.5&new-version=2.3.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 0e388a28d47..9aa29b61f03 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.5 +aiohappyeyeballs==2.3.6 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 95a421e46d3..3ec4e6fe313 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.5 +aiohappyeyeballs==2.3.6 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index c8bcc46e282..0c7fa1280d4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.5 +aiohappyeyeballs==2.3.6 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a96f1981f81..5c42f7a712c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.5 +aiohappyeyeballs==2.3.6 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 502017d7149..1b647f78edd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.5 +aiohappyeyeballs==2.3.6 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 7cb7189efc3bbd15414c55e809a41ec2caf9d5e5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 16:29:44 +0100 Subject: [PATCH 0329/1511] [PR #8714/8690b0fb backport][3.11] Minor improvements to testing docs (#8716) **This is a backport of PR #8714 as merged into master (8690b0fb305d1a9cd8dab0ccad35142205b0d625).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- docs/testing.rst | 23 +++++++---------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/docs/testing.rst b/docs/testing.rst index c2937b82282..828b5072b4d 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -32,20 +32,6 @@ insert ``pytest_plugins = 'aiohttp.pytest_plugin'`` line into -Provisional Status -~~~~~~~~~~~~~~~~~~ - -The module is a **provisional**. - -*aiohttp* has a year and half period for removing deprecated API -(:ref:`aiohttp-backward-compatibility-policy`). - -But for :mod:`aiohttp.test_tools` the deprecation period could be reduced. - -Moreover we may break *backward compatibility* without *deprecation -period* for some very strong reason. - - The Test Client and Servers ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -76,14 +62,19 @@ Pytest The :data:`aiohttp_client` fixture available from pytest-aiohttp_ plugin allows you to create a client to make requests to test your app. -A simple would be:: +To run these examples, you need to use `--asyncio-mode=auto` or add to your +pytest config file:: + + asyncio_mode = auto + +A simple test would be:: from aiohttp import web async def hello(request): return web.Response(text='Hello, world') - async def test_hello(aiohttp_client, loop): + async def test_hello(aiohttp_client): app = web.Application() app.router.add_get('/', hello) client = await aiohttp_client(app) From 677f6af46c375a0eb2b43bc254ae69967815e52b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 16:43:12 +0100 Subject: [PATCH 0330/1511] [PR #8714/8690b0fb backport][3.10] Minor improvements to testing docs (#8715) **This is a backport of PR #8714 as merged into master (8690b0fb305d1a9cd8dab0ccad35142205b0d625).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- docs/testing.rst | 23 +++++++---------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/docs/testing.rst b/docs/testing.rst index c2937b82282..828b5072b4d 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -32,20 +32,6 @@ insert ``pytest_plugins = 'aiohttp.pytest_plugin'`` line into -Provisional Status -~~~~~~~~~~~~~~~~~~ - -The module is a **provisional**. - -*aiohttp* has a year and half period for removing deprecated API -(:ref:`aiohttp-backward-compatibility-policy`). - -But for :mod:`aiohttp.test_tools` the deprecation period could be reduced. - -Moreover we may break *backward compatibility* without *deprecation -period* for some very strong reason. - - The Test Client and Servers ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -76,14 +62,19 @@ Pytest The :data:`aiohttp_client` fixture available from pytest-aiohttp_ plugin allows you to create a client to make requests to test your app. -A simple would be:: +To run these examples, you need to use `--asyncio-mode=auto` or add to your +pytest config file:: + + asyncio_mode = auto + +A simple test would be:: from aiohttp import web async def hello(request): return web.Response(text='Hello, world') - async def test_hello(aiohttp_client, loop): + async def test_hello(aiohttp_client): app = web.Application() app.router.add_get('/', hello) client = await aiohttp_client(app) From e0a7c0cb7131ce45265157be5caa683ed57d24a7 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 16 Aug 2024 17:26:08 +0100 Subject: [PATCH 0331/1511] Minor tweaks backported from #8701 (#8717) --- aiohttp/helpers.py | 2 +- aiohttp/multipart.py | 21 +++++++++++++-------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 437c871e8f7..ccfa9d5e2fe 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -629,7 +629,7 @@ def register( def close(self) -> None: self._callbacks.clear() - def start(self) -> Optional[asyncio.Handle]: + def start(self) -> Optional[asyncio.TimerHandle]: timeout = self._timeout if timeout is not None and timeout > 0: when = self._loop.time() + timeout diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index da016039011..e3680a7b2a1 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -2,6 +2,7 @@ import binascii import json import re +import sys import uuid import warnings import zlib @@ -10,7 +11,6 @@ from typing import ( TYPE_CHECKING, Any, - AsyncIterator, Deque, Dict, Iterator, @@ -48,6 +48,13 @@ ) from .streams import StreamReader +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing import TypeVar + + Self = TypeVar("Self", bound="BodyPartReader") + __all__ = ( "MultipartReader", "MultipartWriter", @@ -280,8 +287,8 @@ def __init__( self._content_eof = 0 self._cache: Dict[str, Any] = {} - def __aiter__(self) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] + def __aiter__(self: Self) -> Self: + return self async def __anext__(self) -> bytes: part = await self.next() @@ -581,7 +588,7 @@ class MultipartReader: response_wrapper_cls = MultipartResponseWrapper #: Multipart reader class, used to handle multipart/* body parts. #: None points to type(self) - multipart_reader_cls = None + multipart_reader_cls: Optional[Type["MultipartReader"]] = None #: Body part reader class for non multipart/* content types. part_reader_cls = BodyPartReader @@ -602,10 +609,8 @@ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: self._at_bof = True self._unread: List[bytes] = [] - def __aiter__( - self, - ) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] + def __aiter__(self: Self) -> Self: + return self async def __anext__( self, From 5ef8cbaf3b83ffcb3b61cad7b20e702fc35b85eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 18:01:58 +0100 Subject: [PATCH 0332/1511] [PR #8717/e0a7c0cb backport][3.10] Minor tweaks backported from #8701 (#8719) **This is a backport of PR #8717 as merged into 3.11 (e0a7c0cb7131ce45265157be5caa683ed57d24a7).** Co-authored-by: Sam Bull <git@sambull.org> --- aiohttp/helpers.py | 2 +- aiohttp/multipart.py | 21 +++++++++++++-------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 437c871e8f7..ccfa9d5e2fe 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -629,7 +629,7 @@ def register( def close(self) -> None: self._callbacks.clear() - def start(self) -> Optional[asyncio.Handle]: + def start(self) -> Optional[asyncio.TimerHandle]: timeout = self._timeout if timeout is not None and timeout > 0: when = self._loop.time() + timeout diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index da016039011..e3680a7b2a1 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -2,6 +2,7 @@ import binascii import json import re +import sys import uuid import warnings import zlib @@ -10,7 +11,6 @@ from typing import ( TYPE_CHECKING, Any, - AsyncIterator, Deque, Dict, Iterator, @@ -48,6 +48,13 @@ ) from .streams import StreamReader +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing import TypeVar + + Self = TypeVar("Self", bound="BodyPartReader") + __all__ = ( "MultipartReader", "MultipartWriter", @@ -280,8 +287,8 @@ def __init__( self._content_eof = 0 self._cache: Dict[str, Any] = {} - def __aiter__(self) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] + def __aiter__(self: Self) -> Self: + return self async def __anext__(self) -> bytes: part = await self.next() @@ -581,7 +588,7 @@ class MultipartReader: response_wrapper_cls = MultipartResponseWrapper #: Multipart reader class, used to handle multipart/* body parts. #: None points to type(self) - multipart_reader_cls = None + multipart_reader_cls: Optional[Type["MultipartReader"]] = None #: Body part reader class for non multipart/* content types. part_reader_cls = BodyPartReader @@ -602,10 +609,8 @@ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: self._at_bof = True self._unread: List[bytes] = [] - def __aiter__( - self, - ) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] + def __aiter__(self: Self) -> Self: + return self async def __anext__( self, From 78d45e733dfeebc0914a409ea2af646fcb6f1c61 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 16 Aug 2024 19:40:24 +0100 Subject: [PATCH 0333/1511] Fix Python parser when chunk separators align (#8720) (#8722) (cherry picked from commit 6d3d1fcf2583eb7b8330b194c00356ce169b2ebd) --- CHANGES/8720.bugfix.rst | 1 + aiohttp/http_parser.py | 4 ++-- tests/test_http_parser.py | 23 +++++++++++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8720.bugfix.rst diff --git a/CHANGES/8720.bugfix.rst b/CHANGES/8720.bugfix.rst new file mode 100644 index 00000000000..9941be27530 --- /dev/null +++ b/CHANGES/8720.bugfix.rst @@ -0,0 +1 @@ +Fixed an edge case in the Python parser when chunk separators happen to align with network chunks -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 751a7e1bb73..b992955a011 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -870,13 +870,13 @@ def feed_data( self._chunk_size = 0 self.payload.feed_data(chunk[:required], required) chunk = chunk[required:] - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF self.payload.end_http_chunk_receiving() # toss the CRLF at the end of the chunk if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] if chunk[: len(SEP)] == SEP: chunk = chunk[len(SEP) :] self._chunk = ChunkState.PARSE_CHUNKED_SIZE diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 0e9aff68dc2..74700df4253 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1410,6 +1410,29 @@ def test_parse_chunked_payload_empty_body_than_another_chunked( assert b"second" == b"".join(d for d in payload._buffer) +async def test_parse_chunked_payload_split_chunks(response: Any) -> None: + network_chunks = ( + b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", + b"5\r\nfi", + b"rst", + # This simulates a bug in lax mode caused when the \r\n separator, before the + # next HTTP chunk, appears at the start of the next network chunk. + b"\r\n", + b"6", + b"\r", + b"\n", + b"second\r", + b"\n0\r\n\r\n", + ) + reader = response.feed_data(network_chunks[0])[0][0][1] + for c in network_chunks[1:]: + response.feed_data(c) + + assert response.feed_eof() is None + assert reader.is_eof() + assert await reader.read() == b"firstsecond" + + def test_partial_url(parser: Any) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 From c6c36a664750d1f61e159dcc1a8d73ae2998854b Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 16 Aug 2024 19:56:00 +0100 Subject: [PATCH 0334/1511] Fix Python parser when chunk separators align (#8720) (#8721) (cherry picked from commit 6d3d1fcf2583eb7b8330b194c00356ce169b2ebd) --- CHANGES/8720.bugfix.rst | 1 + aiohttp/http_parser.py | 4 ++-- tests/test_http_parser.py | 23 +++++++++++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8720.bugfix.rst diff --git a/CHANGES/8720.bugfix.rst b/CHANGES/8720.bugfix.rst new file mode 100644 index 00000000000..9941be27530 --- /dev/null +++ b/CHANGES/8720.bugfix.rst @@ -0,0 +1 @@ +Fixed an edge case in the Python parser when chunk separators happen to align with network chunks -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 751a7e1bb73..b992955a011 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -870,13 +870,13 @@ def feed_data( self._chunk_size = 0 self.payload.feed_data(chunk[:required], required) chunk = chunk[required:] - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF self.payload.end_http_chunk_receiving() # toss the CRLF at the end of the chunk if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] if chunk[: len(SEP)] == SEP: chunk = chunk[len(SEP) :] self._chunk = ChunkState.PARSE_CHUNKED_SIZE diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 0e9aff68dc2..74700df4253 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1410,6 +1410,29 @@ def test_parse_chunked_payload_empty_body_than_another_chunked( assert b"second" == b"".join(d for d in payload._buffer) +async def test_parse_chunked_payload_split_chunks(response: Any) -> None: + network_chunks = ( + b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", + b"5\r\nfi", + b"rst", + # This simulates a bug in lax mode caused when the \r\n separator, before the + # next HTTP chunk, appears at the start of the next network chunk. + b"\r\n", + b"6", + b"\r", + b"\n", + b"second\r", + b"\n0\r\n\r\n", + ) + reader = response.feed_data(network_chunks[0])[0][0][1] + for c in network_chunks[1:]: + response.feed_data(c) + + assert response.feed_eof() is None + assert reader.is_eof() + assert await reader.read() == b"firstsecond" + + def test_partial_url(parser: Any) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 From ebec945ffaf04e5caf3576b809237124aa6b417a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 14:20:00 +0000 Subject: [PATCH 0335/1511] [PR #8681/30a3d0ef backport][3.10] Improve performance of starting request handlers with Python 3.12+ (#8725) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8681.misc.rst | 3 +++ aiohttp/web_protocol.py | 10 ++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8681.misc.rst diff --git a/CHANGES/8681.misc.rst b/CHANGES/8681.misc.rst new file mode 100644 index 00000000000..222787d36da --- /dev/null +++ b/CHANGES/8681.misc.rst @@ -0,0 +1,3 @@ +Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. + +This change is a followup to :issue:`8661` to make the same optimization for Python 3.12+ where the request is connected. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 635b668ceb0..39e1c8be50e 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -292,10 +292,16 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: if self._tcp_keepalive: tcp_keepalive(real_transport) - self._task_handler = self._loop.create_task(self.start()) assert self._manager is not None self._manager.connection_made(self, real_transport) + loop = self._loop + if sys.version_info >= (3, 12): + task = asyncio.Task(self.start(), loop=loop, eager_start=True) + else: + task = loop.create_task(self.start()) + self._task_handler = task + def connection_lost(self, exc: Optional[BaseException]) -> None: if self._manager is None: return @@ -494,7 +500,7 @@ async def start(self) -> None: keep_alive(True) specified. """ loop = self._loop - handler = self._task_handler + handler = asyncio.current_task(loop) assert handler is not None manager = self._manager assert manager is not None From 9370c6a31ba3978e75887671fc91c6d3f32670b1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 14:20:02 +0000 Subject: [PATCH 0336/1511] [PR #8681/30a3d0ef backport][3.11] Improve performance of starting request handlers with Python 3.12+ (#8726) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8681.misc.rst | 3 +++ aiohttp/web_protocol.py | 10 ++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8681.misc.rst diff --git a/CHANGES/8681.misc.rst b/CHANGES/8681.misc.rst new file mode 100644 index 00000000000..222787d36da --- /dev/null +++ b/CHANGES/8681.misc.rst @@ -0,0 +1,3 @@ +Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. + +This change is a followup to :issue:`8661` to make the same optimization for Python 3.12+ where the request is connected. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 635b668ceb0..39e1c8be50e 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -292,10 +292,16 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: if self._tcp_keepalive: tcp_keepalive(real_transport) - self._task_handler = self._loop.create_task(self.start()) assert self._manager is not None self._manager.connection_made(self, real_transport) + loop = self._loop + if sys.version_info >= (3, 12): + task = asyncio.Task(self.start(), loop=loop, eager_start=True) + else: + task = loop.create_task(self.start()) + self._task_handler = task + def connection_lost(self, exc: Optional[BaseException]) -> None: if self._manager is None: return @@ -494,7 +500,7 @@ async def start(self) -> None: keep_alive(True) specified. """ loop = self._loop - handler = self._task_handler + handler = asyncio.current_task(loop) assert handler is not None manager = self._manager assert manager is not None From 635ae62731d16191947143f9b16947e0a936c59a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 14:23:35 +0000 Subject: [PATCH 0337/1511] [PR #8682/490fca61 backport][3.10] Reduce WebSocket frame parser complexity (#8727) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/http_websocket.py | 200 ++++++++++++++++++-------------------- 1 file changed, 93 insertions(+), 107 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index b513a45ebdc..9b2c5128804 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -296,7 +296,7 @@ def __init__( self._frame_opcode: Optional[int] = None self._frame_payload = bytearray() - self._tail = b"" + self._tail: bytes = b"" self._has_mask = False self._frame_mask: Optional[bytes] = None self._payload_length = 0 @@ -447,126 +447,113 @@ def parse_frame( self, buf: bytes ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: """Return the next frame from the socket.""" - frames = [] + frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] if self._tail: buf, self._tail = self._tail + buf, b"" - start_pos = 0 + start_pos: int = 0 buf_length = len(buf) while True: # read header - if self._state == WSParserState.READ_HEADER: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) + if self._state is WSParserState.READ_HEADER: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", - ) + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - else: - break + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH # read payload length - if self._state == WSParserState.READ_PAYLOAD_LENGTH: + if self._state is WSParserState.READ_PAYLOAD_LENGTH: length = self._payload_length_flag if length == 126: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: + if buf_length - start_pos < 2: break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length elif length > 126: - if buf_length - start_pos >= 8: - data = buf[start_pos : start_pos + 8] - start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: + if buf_length - start_pos < 8: break + data = buf[start_pos : start_pos + 8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length else: self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) + + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) # read payload mask - if self._state == WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos >= 4: - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - else: + if self._state is WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos < 4: break + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD - if self._state == WSParserState.READ_PAYLOAD: + if self._state is WSParserState.READ_PAYLOAD: length = self._payload_length payload = self._frame_payload @@ -580,19 +567,18 @@ def parse_frame( payload.extend(buf[start_pos : start_pos + length]) start_pos = start_pos + length - if self._payload_length == 0: - if self._has_mask: - assert self._frame_mask is not None - _websocket_mask(self._frame_mask, payload) + if self._payload_length != 0: + break - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - else: - break + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER self._tail = buf[start_pos:] From 4467da30203b48ebdf49589c50f23d73245c735b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 14:43:27 +0000 Subject: [PATCH 0338/1511] [PR #8682/490fca61 backport][3.11] Reduce WebSocket frame parser complexity (#8728) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/http_websocket.py | 200 ++++++++++++++++++-------------------- 1 file changed, 93 insertions(+), 107 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index b513a45ebdc..9b2c5128804 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -296,7 +296,7 @@ def __init__( self._frame_opcode: Optional[int] = None self._frame_payload = bytearray() - self._tail = b"" + self._tail: bytes = b"" self._has_mask = False self._frame_mask: Optional[bytes] = None self._payload_length = 0 @@ -447,126 +447,113 @@ def parse_frame( self, buf: bytes ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: """Return the next frame from the socket.""" - frames = [] + frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] if self._tail: buf, self._tail = self._tail + buf, b"" - start_pos = 0 + start_pos: int = 0 buf_length = len(buf) while True: # read header - if self._state == WSParserState.READ_HEADER: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) + if self._state is WSParserState.READ_HEADER: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", - ) + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - else: - break + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH # read payload length - if self._state == WSParserState.READ_PAYLOAD_LENGTH: + if self._state is WSParserState.READ_PAYLOAD_LENGTH: length = self._payload_length_flag if length == 126: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: + if buf_length - start_pos < 2: break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length elif length > 126: - if buf_length - start_pos >= 8: - data = buf[start_pos : start_pos + 8] - start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: + if buf_length - start_pos < 8: break + data = buf[start_pos : start_pos + 8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length else: self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) + + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) # read payload mask - if self._state == WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos >= 4: - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - else: + if self._state is WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos < 4: break + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD - if self._state == WSParserState.READ_PAYLOAD: + if self._state is WSParserState.READ_PAYLOAD: length = self._payload_length payload = self._frame_payload @@ -580,19 +567,18 @@ def parse_frame( payload.extend(buf[start_pos : start_pos + length]) start_pos = start_pos + length - if self._payload_length == 0: - if self._has_mask: - assert self._frame_mask is not None - _websocket_mask(self._frame_mask, payload) + if self._payload_length != 0: + break - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - else: - break + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER self._tail = buf[start_pos:] From 0f1a00817d71f8e75fa823c19a6273cf0cf6b3f2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 16:07:14 +0000 Subject: [PATCH 0339/1511] [PR #8685/e7c02ca4 backport][3.10] Fix exceptions from WebSocket ping task not being consumed (#8729) Co-authored-by: J. Nick Koston <nick@koston.org> closes #7238 fixes #5182 fixes #4153 fixes #2309 --- CHANGES/8685.bugfix.rst | 3 ++ aiohttp/client_ws.py | 25 +++++---- aiohttp/web_ws.py | 18 +++++-- tests/test_client_ws_functional.py | 30 +++++++++++ tests/test_web_websocket_functional.py | 73 +++++++++++++++++++++++++- 5 files changed, 136 insertions(+), 13 deletions(-) create mode 100644 CHANGES/8685.bugfix.rst diff --git a/CHANGES/8685.bugfix.rst b/CHANGES/8685.bugfix.rst new file mode 100644 index 00000000000..8bd20196ee3 --- /dev/null +++ b/CHANGES/8685.bugfix.rst @@ -0,0 +1,3 @@ +Fixed unconsumed exceptions raised by the WebSocket heartbeat -- by :user:`bdraco`. + +If the heartbeat ping raised an exception, it would not be consumed and would be logged as an warning. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 7fd141248bd..7b3a5bf952d 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -141,21 +141,28 @@ def _send_heartbeat(self) -> None: if not ping_task.done(): self._ping_task = ping_task ping_task.add_done_callback(self._ping_task_done) + else: + self._ping_task_done(ping_task) def _ping_task_done(self, task: "asyncio.Task[None]") -> None: """Callback for when the ping task completes.""" + if not task.cancelled() and (exc := task.exception()): + self._handle_ping_pong_exception(exc) self._ping_task = None def _pong_not_received(self) -> None: - if not self._closed: - self._set_closed() - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = ServerTimeoutError() - self._response.close() - if self._waiting and not self._closing: - self._reader.feed_data( - WSMessage(WSMsgType.ERROR, self._exception, None) - ) + self._handle_ping_pong_exception(ServerTimeoutError()) + + def _handle_ping_pong_exception(self, exc: BaseException) -> None: + """Handle exceptions raised during ping/pong processing.""" + if self._closed: + return + self._set_closed() + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + if self._waiting and not self._closing: + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) def _set_closed(self) -> None: """Set the connection to closed. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 98f26cc48c6..382223097ea 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -164,16 +164,28 @@ def _send_heartbeat(self) -> None: if not ping_task.done(): self._ping_task = ping_task ping_task.add_done_callback(self._ping_task_done) + else: + self._ping_task_done(ping_task) def _ping_task_done(self, task: "asyncio.Task[None]") -> None: """Callback for when the ping task completes.""" + if not task.cancelled() and (exc := task.exception()): + self._handle_ping_pong_exception(exc) self._ping_task = None def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._set_closed() - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() + self._handle_ping_pong_exception(asyncio.TimeoutError()) + + def _handle_ping_pong_exception(self, exc: BaseException) -> None: + """Handle exceptions raised during ping/pong processing.""" + if self._closed: + return + self._set_closed() + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = exc + if self._waiting and not self._closing and self._reader is not None: + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) def _set_closed(self) -> None: """Set the connection to closed. diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 907ae232e9a..274092a189a 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -600,6 +600,36 @@ async def handler(request): assert ping_received +async def test_heartbeat_connection_closed(aiohttp_client: AiohttpClient) -> None: + """Test that the connection is closed while ping is in progress.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + ping_count = 0 + # We patch write here to simulate a connection reset error + # since if we closed the connection normally, the client would + # would cancel the heartbeat task and we wouldn't get a ping + assert resp._conn is not None + with mock.patch.object( + resp._conn.transport, "write", side_effect=ConnectionResetError + ), mock.patch.object(resp._writer, "ping", wraps=resp._writer.ping) as ping: + await resp.receive() + ping_count = ping.call_count + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) + assert ping_count == 1 + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + async def test_heartbeat_no_pong(aiohttp_client: AiohttpClient) -> None: """Test that the connection is closed if no pong is received without sending messages.""" ping_received = False diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 6540f134da8..2be54486ee9 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -4,7 +4,8 @@ import contextlib import sys import weakref -from typing import Any, Optional +from typing import Any, NoReturn, Optional +from unittest import mock import pytest @@ -724,6 +725,76 @@ async def handler(request): await ws.close() +async def test_heartbeat_connection_closed( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that the connection is closed while ping is in progress.""" + ping_count = 0 + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_count + ws_server = web.WebSocketResponse(heartbeat=0.05) + await ws_server.prepare(request) + # We patch write here to simulate a connection reset error + # since if we closed the connection normally, the server would + # would cancel the heartbeat task and we wouldn't get a ping + with mock.patch.object( + ws_server._req.transport, "write", side_effect=ConnectionResetError + ), mock.patch.object( + ws_server._writer, "ping", wraps=ws_server._writer.ping + ) as ping: + try: + await ws_server.receive() + finally: + ping_count = ping.call_count + assert False + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert msg.extra is None + assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE + assert ping_count == 1 + await ws.close() + + +async def test_heartbeat_failure_ends_receive( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that no heartbeat response to the server ends the receive call.""" + ws_server_close_code = None + ws_server_exception = None + + async def handler(request: web.Request) -> NoReturn: + nonlocal ws_server_close_code, ws_server_exception + ws_server = web.WebSocketResponse(heartbeat=0.05) + await ws_server.prepare(request) + try: + await ws_server.receive() + finally: + ws_server_close_code = ws_server.close_code + ws_server_exception = ws_server.exception() + assert False + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE + assert ws_server_close_code == WSCloseCode.ABNORMAL_CLOSURE + assert isinstance(ws_server_exception, asyncio.TimeoutError) + await ws.close() + + async def test_heartbeat_no_pong_send_many_messages( loop: Any, aiohttp_client: Any ) -> None: From 4c6dc7fe3e5df9a08e2325230b2e17afad2a161c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 16:16:48 +0000 Subject: [PATCH 0340/1511] [PR #8685/e7c02ca4 backport][3.11] Fix exceptions from WebSocket ping task not being consumed (#8730) Co-authored-by: J. Nick Koston <nick@koston.org> closes #7238 fixes #5182 fixes #4153 fixes #2309 --- CHANGES/8685.bugfix.rst | 3 ++ aiohttp/client_ws.py | 25 +++++---- aiohttp/web_ws.py | 18 +++++-- tests/test_client_ws_functional.py | 30 +++++++++++ tests/test_web_websocket_functional.py | 73 +++++++++++++++++++++++++- 5 files changed, 136 insertions(+), 13 deletions(-) create mode 100644 CHANGES/8685.bugfix.rst diff --git a/CHANGES/8685.bugfix.rst b/CHANGES/8685.bugfix.rst new file mode 100644 index 00000000000..8bd20196ee3 --- /dev/null +++ b/CHANGES/8685.bugfix.rst @@ -0,0 +1,3 @@ +Fixed unconsumed exceptions raised by the WebSocket heartbeat -- by :user:`bdraco`. + +If the heartbeat ping raised an exception, it would not be consumed and would be logged as an warning. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 7fd141248bd..7b3a5bf952d 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -141,21 +141,28 @@ def _send_heartbeat(self) -> None: if not ping_task.done(): self._ping_task = ping_task ping_task.add_done_callback(self._ping_task_done) + else: + self._ping_task_done(ping_task) def _ping_task_done(self, task: "asyncio.Task[None]") -> None: """Callback for when the ping task completes.""" + if not task.cancelled() and (exc := task.exception()): + self._handle_ping_pong_exception(exc) self._ping_task = None def _pong_not_received(self) -> None: - if not self._closed: - self._set_closed() - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = ServerTimeoutError() - self._response.close() - if self._waiting and not self._closing: - self._reader.feed_data( - WSMessage(WSMsgType.ERROR, self._exception, None) - ) + self._handle_ping_pong_exception(ServerTimeoutError()) + + def _handle_ping_pong_exception(self, exc: BaseException) -> None: + """Handle exceptions raised during ping/pong processing.""" + if self._closed: + return + self._set_closed() + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + if self._waiting and not self._closing: + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) def _set_closed(self) -> None: """Set the connection to closed. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 98f26cc48c6..382223097ea 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -164,16 +164,28 @@ def _send_heartbeat(self) -> None: if not ping_task.done(): self._ping_task = ping_task ping_task.add_done_callback(self._ping_task_done) + else: + self._ping_task_done(ping_task) def _ping_task_done(self, task: "asyncio.Task[None]") -> None: """Callback for when the ping task completes.""" + if not task.cancelled() and (exc := task.exception()): + self._handle_ping_pong_exception(exc) self._ping_task = None def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._set_closed() - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() + self._handle_ping_pong_exception(asyncio.TimeoutError()) + + def _handle_ping_pong_exception(self, exc: BaseException) -> None: + """Handle exceptions raised during ping/pong processing.""" + if self._closed: + return + self._set_closed() + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = exc + if self._waiting and not self._closing and self._reader is not None: + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) def _set_closed(self) -> None: """Set the connection to closed. diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 907ae232e9a..274092a189a 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -600,6 +600,36 @@ async def handler(request): assert ping_received +async def test_heartbeat_connection_closed(aiohttp_client: AiohttpClient) -> None: + """Test that the connection is closed while ping is in progress.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + await ws.receive() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + ping_count = 0 + # We patch write here to simulate a connection reset error + # since if we closed the connection normally, the client would + # would cancel the heartbeat task and we wouldn't get a ping + assert resp._conn is not None + with mock.patch.object( + resp._conn.transport, "write", side_effect=ConnectionResetError + ), mock.patch.object(resp._writer, "ping", wraps=resp._writer.ping) as ping: + await resp.receive() + ping_count = ping.call_count + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) + assert ping_count == 1 + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + + async def test_heartbeat_no_pong(aiohttp_client: AiohttpClient) -> None: """Test that the connection is closed if no pong is received without sending messages.""" ping_received = False diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 6540f134da8..2be54486ee9 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -4,7 +4,8 @@ import contextlib import sys import weakref -from typing import Any, Optional +from typing import Any, NoReturn, Optional +from unittest import mock import pytest @@ -724,6 +725,76 @@ async def handler(request): await ws.close() +async def test_heartbeat_connection_closed( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that the connection is closed while ping is in progress.""" + ping_count = 0 + + async def handler(request: web.Request) -> NoReturn: + nonlocal ping_count + ws_server = web.WebSocketResponse(heartbeat=0.05) + await ws_server.prepare(request) + # We patch write here to simulate a connection reset error + # since if we closed the connection normally, the server would + # would cancel the heartbeat task and we wouldn't get a ping + with mock.patch.object( + ws_server._req.transport, "write", side_effect=ConnectionResetError + ), mock.patch.object( + ws_server._writer, "ping", wraps=ws_server._writer.ping + ) as ping: + try: + await ws_server.receive() + finally: + ping_count = ping.call_count + assert False + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert msg.extra is None + assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE + assert ping_count == 1 + await ws.close() + + +async def test_heartbeat_failure_ends_receive( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that no heartbeat response to the server ends the receive call.""" + ws_server_close_code = None + ws_server_exception = None + + async def handler(request: web.Request) -> NoReturn: + nonlocal ws_server_close_code, ws_server_exception + ws_server = web.WebSocketResponse(heartbeat=0.05) + await ws_server.prepare(request) + try: + await ws_server.receive() + finally: + ws_server_close_code = ws_server.close_code + ws_server_exception = ws_server.exception() + assert False + + app = web.Application() + app.router.add_get("/", handler) + + client = await aiohttp_client(app) + ws = await client.ws_connect("/", autoping=False) + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.PING + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.CLOSED + assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE + assert ws_server_close_code == WSCloseCode.ABNORMAL_CLOSURE + assert isinstance(ws_server_exception, asyncio.TimeoutError) + await ws.close() + + async def test_heartbeat_no_pong_send_many_messages( loop: Any, aiohttp_client: Any ) -> None: From f4d4f2a646950f7a6c5cfe99de67e03703ec16fa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 17 Aug 2024 11:40:09 -0500 Subject: [PATCH 0341/1511] Release 3.10.4 (#8731) --- CHANGES.rst | 70 +++++++++++++++++++++++++++++++++++++++++ CHANGES/3867.bugfix.rst | 1 - CHANGES/8680.bugfix.rst | 1 - CHANGES/8681.misc.rst | 3 -- CHANGES/8685.bugfix.rst | 3 -- CHANGES/8700.doc.rst | 1 - CHANGES/8720.bugfix.rst | 1 - 7 files changed, 70 insertions(+), 10 deletions(-) delete mode 100644 CHANGES/3867.bugfix.rst delete mode 100644 CHANGES/8680.bugfix.rst delete mode 100644 CHANGES/8681.misc.rst delete mode 100644 CHANGES/8685.bugfix.rst delete mode 100644 CHANGES/8700.doc.rst delete mode 100644 CHANGES/8720.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 43ca69235e3..a90142e8505 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,76 @@ .. towncrier release notes start +3.10.4 (2024-08-17) +=================== + +Bug fixes +--------- + +- Fixed decoding base64 chunk in BodyPartReader -- by :user:`hyzyla`. + + + *Related issues and pull requests on GitHub:* + :issue:`3867`. + + + +- Fixed a race closing the server-side WebSocket where the close code would not reach the client -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8680`. + + + +- Fixed unconsumed exceptions raised by the WebSocket heartbeat -- by :user:`bdraco`. + + If the heartbeat ping raised an exception, it would not be consumed and would be logged as an warning. + + + *Related issues and pull requests on GitHub:* + :issue:`8685`. + + + +- Fixed an edge case in the Python parser when chunk separators happen to align with network chunks -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8720`. + + + + +Improved documentation +---------------------- + +- Added ``aiohttp-apischema`` to supported libraries -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8700`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. + + This change is a followup to :issue:`8661` to make the same optimization for Python 3.12+ where the request is connected. + + + *Related issues and pull requests on GitHub:* + :issue:`8681`. + + + + +---- + + 3.10.3 (2024-08-10) ======================== diff --git a/CHANGES/3867.bugfix.rst b/CHANGES/3867.bugfix.rst deleted file mode 100644 index 12376bf6ef0..00000000000 --- a/CHANGES/3867.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed decoding base64 chunk in BodyPartReader -- by :user:`hyzyla`. diff --git a/CHANGES/8680.bugfix.rst b/CHANGES/8680.bugfix.rst deleted file mode 100644 index 2149f12aaaf..00000000000 --- a/CHANGES/8680.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a race closing the server-side WebSocket where the close code would not reach the client. -- by :user:`bdraco`. diff --git a/CHANGES/8681.misc.rst b/CHANGES/8681.misc.rst deleted file mode 100644 index 222787d36da..00000000000 --- a/CHANGES/8681.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved performance of starting request handlers with Python 3.12+ -- by :user:`bdraco`. - -This change is a followup to :issue:`8661` to make the same optimization for Python 3.12+ where the request is connected. diff --git a/CHANGES/8685.bugfix.rst b/CHANGES/8685.bugfix.rst deleted file mode 100644 index 8bd20196ee3..00000000000 --- a/CHANGES/8685.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed unconsumed exceptions raised by the WebSocket heartbeat -- by :user:`bdraco`. - -If the heartbeat ping raised an exception, it would not be consumed and would be logged as an warning. diff --git a/CHANGES/8700.doc.rst b/CHANGES/8700.doc.rst deleted file mode 100644 index 71157969a75..00000000000 --- a/CHANGES/8700.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``aiohttp-apischema`` to supported libraries -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8720.bugfix.rst b/CHANGES/8720.bugfix.rst deleted file mode 100644 index 9941be27530..00000000000 --- a/CHANGES/8720.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an edge case in the Python parser when chunk separators happen to align with network chunks -- by :user:`Dreamsorcerer`. From 8dcd3a419f0266d5941920d0161422a5a7697b98 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 17 Aug 2024 11:44:55 -0500 Subject: [PATCH 0342/1511] Release 3.10.4 (Attempt 2) (#8732) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index bbda1fe6b57..39a98892c22 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.4.dev0" +__version__ = "3.10.4" from typing import TYPE_CHECKING, Tuple From 92dc91bea6e144d85206fc0a7f5d4907d5bd08ed Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 17 Aug 2024 15:27:15 -0500 Subject: [PATCH 0343/1511] Bump version to 3.10.5.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 39a98892c22..d54dfedce2c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.4" +__version__ = "3.10.5.dev0" from typing import TYPE_CHECKING, Tuple From 9a03467cc44f3d39ce5175cdc377b3c203d48e5b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:03:55 +0000 Subject: [PATCH 0344/1511] Bump aiohappyeyeballs from 2.3.6 to 2.3.7 (#8739) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.3.6 to 2.3.7. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.3.7 (2024-08-17)</h1> <h2>Fix</h2> <ul> <li>fix: correct classifier for license Python-2.0.1 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/83">#83</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/186be056ea441bb3fa7620864f46c6f8431f3a34"><code>186be05</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.3.7 (2024-08-17)</h2> <h3>Fix</h3> <ul> <li>Correct classifier for license python-2.0.1 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/83">#83</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/186be056ea441bb3fa7620864f46c6f8431f3a34"><code>186be05</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/08315175bcac65db429fcc4119a0087e69d67fa7"><code>0831517</code></a> 2.3.7</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/186be056ea441bb3fa7620864f46c6f8431f3a34"><code>186be05</code></a> fix: correct classifier for license Python-2.0.1 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/83">#83</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.3.6...v2.3.7">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.6&new-version=2.3.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 9aa29b61f03..1d9a5bcc2a7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.6 +aiohappyeyeballs==2.3.7 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3ec4e6fe313..2bcef5994e7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.6 +aiohappyeyeballs==2.3.7 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 0c7fa1280d4..525f2cc42cf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.6 +aiohappyeyeballs==2.3.7 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 5c42f7a712c..fa715abe96f 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.6 +aiohappyeyeballs==2.3.7 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 1b647f78edd..0eb9c032877 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.6 +aiohappyeyeballs==2.3.7 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From dba260545e74612fb9968c681a0ef1bff1f03cd4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 10:48:27 -0500 Subject: [PATCH 0345/1511] [3.10] Bump pypa/cibuildwheel to v2.20.0 (#8740) We want to start building wheels for 3.13 to unblock uvloop --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 93d4575da2d..a6a58cef9c2 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -351,7 +351,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.19.2 + uses: pypa/cibuildwheel@v2.20.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From fbfedd6c32deb75a5564fe584e5c8c127beb3a29 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 12:12:36 -0500 Subject: [PATCH 0346/1511] [PR #8736/1b88af2 backport][3.10] Improve performance of WebSocketReader (#8743) --- CHANGES/8736.misc.rst | 1 + aiohttp/http_websocket.py | 200 ++++++++++++++++++++------------------ 2 files changed, 105 insertions(+), 96 deletions(-) create mode 100644 CHANGES/8736.misc.rst diff --git a/CHANGES/8736.misc.rst b/CHANGES/8736.misc.rst new file mode 100644 index 00000000000..34ed19aebba --- /dev/null +++ b/CHANGES/8736.misc.rst @@ -0,0 +1 @@ +Improved performance of the WebSocket reader -- by :user:`bdraco`. diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 9b2c5128804..ab1af44da7b 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -94,6 +94,12 @@ class WSMsgType(IntEnum): error = ERROR +MESSAGE_TYPES_WITH_CONTENT: Final = ( + WSMsgType.BINARY, + WSMsgType.TEXT, + WSMsgType.CONTINUATION, +) + WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" @@ -313,17 +319,101 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return True, data try: - return self._feed_data(data) + self._feed_data(data) except Exception as exc: self._exc = exc set_exception(self.queue, exc) return True, b"" - def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + return False, b"" + + def _feed_data(self, data: bytes) -> None: for fin, opcode, payload, compressed in self.parse_frame(data): - if compressed and not self._decompressobj: - self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) - if opcode == WSMsgType.CLOSE: + if opcode in MESSAGE_TYPES_WITH_CONTENT: + # load text/binary + is_continuation = opcode == WSMsgType.CONTINUATION + if not fin: + # got partial frame payload + if not is_continuation: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + continue + + has_partial = bool(self._partial) + if is_continuation: + if self._opcode is None: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + opcode = self._opcode + self._opcode = None + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload + + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(assembled_payload), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor( + suppress_deflate_header=True + ) + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + _WS_DEFLATE_TRAILING, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(assembled_payload) + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text)) + continue + + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged) + ) + elif opcode == WSMsgType.CLOSE: if len(payload) >= 2: close_code = UNPACK_CLOSE_CODE(payload[:2])[0] if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: @@ -358,90 +448,10 @@ def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: WSMessage(WSMsgType.PONG, payload, ""), len(payload) ) - elif ( - opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) - and self._opcode is None - ): + else: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" ) - else: - # load text/binary - if not fin: - # got partial frame payload - if opcode != WSMsgType.CONTINUATION: - self._opcode = opcode - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - else: - # previous frame was non finished - # we should get continuation opcode - if self._partial: - if opcode != WSMsgType.CONTINUATION: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if opcode == WSMsgType.CONTINUATION: - assert self._opcode is not None - opcode = self._opcode - self._opcode = None - - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - assert self._decompressobj is not None - self._partial.extend(_WS_DEFLATE_TRAILING) - payload_merged = self._decompressobj.decompress_sync( - self._partial, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(self._partial) - - self._partial.clear() - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - self.queue.feed_data( - WSMessage(WSMsgType.TEXT, text, ""), len(text) - ) - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - else: - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), - len(payload_merged), - ) - - return False, b"" def parse_frame( self, buf: bytes @@ -521,23 +531,21 @@ def parse_frame( # read payload length if self._state is WSParserState.READ_PAYLOAD_LENGTH: - length = self._payload_length_flag - if length == 126: + length_flag = self._payload_length_flag + if length_flag == 126: if buf_length - start_pos < 2: break data = buf[start_pos : start_pos + 2] start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - elif length > 126: + self._payload_length = UNPACK_LEN2(data)[0] + elif length_flag > 126: if buf_length - start_pos < 8: break data = buf[start_pos : start_pos + 8] start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length + self._payload_length = UNPACK_LEN3(data)[0] else: - self._payload_length = length + self._payload_length = length_flag self._state = ( WSParserState.READ_PAYLOAD_MASK @@ -560,11 +568,11 @@ def parse_frame( chunk_len = buf_length - start_pos if length >= chunk_len: self._payload_length = length - chunk_len - payload.extend(buf[start_pos:]) + payload += buf[start_pos:] start_pos = buf_length else: self._payload_length = 0 - payload.extend(buf[start_pos : start_pos + length]) + payload += buf[start_pos : start_pos + length] start_pos = start_pos + length if self._payload_length != 0: From 8c19defa654020edb53f8ebf1d802616eb3e4294 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 12:12:40 -0500 Subject: [PATCH 0347/1511] [PR #8736/1b88af2 backport][3.11] Improve performance of WebSocketReader (#8744) --- CHANGES/8736.misc.rst | 1 + aiohttp/http_websocket.py | 200 ++++++++++++++++++++------------------ 2 files changed, 105 insertions(+), 96 deletions(-) create mode 100644 CHANGES/8736.misc.rst diff --git a/CHANGES/8736.misc.rst b/CHANGES/8736.misc.rst new file mode 100644 index 00000000000..34ed19aebba --- /dev/null +++ b/CHANGES/8736.misc.rst @@ -0,0 +1 @@ +Improved performance of the WebSocket reader -- by :user:`bdraco`. diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 9b2c5128804..ab1af44da7b 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -94,6 +94,12 @@ class WSMsgType(IntEnum): error = ERROR +MESSAGE_TYPES_WITH_CONTENT: Final = ( + WSMsgType.BINARY, + WSMsgType.TEXT, + WSMsgType.CONTINUATION, +) + WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" @@ -313,17 +319,101 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return True, data try: - return self._feed_data(data) + self._feed_data(data) except Exception as exc: self._exc = exc set_exception(self.queue, exc) return True, b"" - def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + return False, b"" + + def _feed_data(self, data: bytes) -> None: for fin, opcode, payload, compressed in self.parse_frame(data): - if compressed and not self._decompressobj: - self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) - if opcode == WSMsgType.CLOSE: + if opcode in MESSAGE_TYPES_WITH_CONTENT: + # load text/binary + is_continuation = opcode == WSMsgType.CONTINUATION + if not fin: + # got partial frame payload + if not is_continuation: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + continue + + has_partial = bool(self._partial) + if is_continuation: + if self._opcode is None: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + opcode = self._opcode + self._opcode = None + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload + + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(assembled_payload), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor( + suppress_deflate_header=True + ) + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + _WS_DEFLATE_TRAILING, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(assembled_payload) + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text)) + continue + + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged) + ) + elif opcode == WSMsgType.CLOSE: if len(payload) >= 2: close_code = UNPACK_CLOSE_CODE(payload[:2])[0] if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: @@ -358,90 +448,10 @@ def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: WSMessage(WSMsgType.PONG, payload, ""), len(payload) ) - elif ( - opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) - and self._opcode is None - ): + else: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" ) - else: - # load text/binary - if not fin: - # got partial frame payload - if opcode != WSMsgType.CONTINUATION: - self._opcode = opcode - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - else: - # previous frame was non finished - # we should get continuation opcode - if self._partial: - if opcode != WSMsgType.CONTINUATION: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if opcode == WSMsgType.CONTINUATION: - assert self._opcode is not None - opcode = self._opcode - self._opcode = None - - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - assert self._decompressobj is not None - self._partial.extend(_WS_DEFLATE_TRAILING) - payload_merged = self._decompressobj.decompress_sync( - self._partial, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(self._partial) - - self._partial.clear() - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - self.queue.feed_data( - WSMessage(WSMsgType.TEXT, text, ""), len(text) - ) - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - else: - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), - len(payload_merged), - ) - - return False, b"" def parse_frame( self, buf: bytes @@ -521,23 +531,21 @@ def parse_frame( # read payload length if self._state is WSParserState.READ_PAYLOAD_LENGTH: - length = self._payload_length_flag - if length == 126: + length_flag = self._payload_length_flag + if length_flag == 126: if buf_length - start_pos < 2: break data = buf[start_pos : start_pos + 2] start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - elif length > 126: + self._payload_length = UNPACK_LEN2(data)[0] + elif length_flag > 126: if buf_length - start_pos < 8: break data = buf[start_pos : start_pos + 8] start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length + self._payload_length = UNPACK_LEN3(data)[0] else: - self._payload_length = length + self._payload_length = length_flag self._state = ( WSParserState.READ_PAYLOAD_MASK @@ -560,11 +568,11 @@ def parse_frame( chunk_len = buf_length - start_pos if length >= chunk_len: self._payload_length = length - chunk_len - payload.extend(buf[start_pos:]) + payload += buf[start_pos:] start_pos = buf_length else: self._payload_length = 0 - payload.extend(buf[start_pos : start_pos + length]) + payload += buf[start_pos : start_pos + length] start_pos = start_pos + length if self._payload_length != 0: From c7ff47b3916aaa4bb94074660e6969dbf1b77305 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 17:35:50 +0000 Subject: [PATCH 0348/1511] [PR #8742/b4f9cb35 backport][3.10] Fix status not being set when ClientResponse.json raises ContentTypeError (#8745) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #8741 --- CHANGES/8742.bugfix.rst | 1 + aiohttp/client_reqrep.py | 1 + tests/test_client_response.py | 2 ++ 3 files changed, 4 insertions(+) create mode 100644 CHANGES/8742.bugfix.rst diff --git a/CHANGES/8742.bugfix.rst b/CHANGES/8742.bugfix.rst new file mode 100644 index 00000000000..850f0390399 --- /dev/null +++ b/CHANGES/8742.bugfix.rst @@ -0,0 +1 @@ +Fixed :meth:`aiohttp.ClientResponse.json()` not setting ``status`` when :exc:`aiohttp.ContentTypeError` is raised -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 2c10da4ff81..bea76d84c39 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1199,6 +1199,7 @@ async def json( raise ContentTypeError( self.request_info, self.history, + status=self.status, message=( "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype ), diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 166089cc84a..628e3d71b92 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -659,11 +659,13 @@ async def test_json_invalid_content_type(loop, session) -> None: ) response._headers = {"Content-Type": "data/octet-stream"} response._body = b"" + response.status = 500 with pytest.raises(aiohttp.ContentTypeError) as info: await response.json() assert info.value.request_info == response.request_info + assert info.value.status == 500 async def test_json_no_content(loop, session) -> None: From 096bc9d5e6627074edfebb4d39090b06c082fc2c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 17:36:43 +0000 Subject: [PATCH 0349/1511] [PR #8742/b4f9cb35 backport][3.11] Fix status not being set when ClientResponse.json raises ContentTypeError (#8746) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #8741 --- CHANGES/8742.bugfix.rst | 1 + aiohttp/client_reqrep.py | 1 + tests/test_client_response.py | 2 ++ 3 files changed, 4 insertions(+) create mode 100644 CHANGES/8742.bugfix.rst diff --git a/CHANGES/8742.bugfix.rst b/CHANGES/8742.bugfix.rst new file mode 100644 index 00000000000..850f0390399 --- /dev/null +++ b/CHANGES/8742.bugfix.rst @@ -0,0 +1 @@ +Fixed :meth:`aiohttp.ClientResponse.json()` not setting ``status`` when :exc:`aiohttp.ContentTypeError` is raised -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 2c10da4ff81..bea76d84c39 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1199,6 +1199,7 @@ async def json( raise ContentTypeError( self.request_info, self.history, + status=self.status, message=( "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype ), diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 166089cc84a..628e3d71b92 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -659,11 +659,13 @@ async def test_json_invalid_content_type(loop, session) -> None: ) response._headers = {"Content-Type": "data/octet-stream"} response._body = b"" + response.status = 500 with pytest.raises(aiohttp.ContentTypeError) as info: await response.json() assert info.value.request_info == response.request_info + assert info.value.status == 500 async def test_json_no_content(loop, session) -> None: From 83bd23970e5ccd8f40be7325d56cbf140e252367 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 18:17:16 +0000 Subject: [PATCH 0350/1511] [PR #8747/e97b17a6 backport][3.10] Make MESSAGE_TYPES_WITH_CONTENT a frozenset for the WebSocketReader (#8750) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8747.misc.rst | 1 + aiohttp/http_websocket.py | 10 ++++++---- 2 files changed, 7 insertions(+), 4 deletions(-) create mode 120000 CHANGES/8747.misc.rst diff --git a/CHANGES/8747.misc.rst b/CHANGES/8747.misc.rst new file mode 120000 index 00000000000..9eb648d0e2b --- /dev/null +++ b/CHANGES/8747.misc.rst @@ -0,0 +1 @@ +8736.misc.rst \ No newline at end of file diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index ab1af44da7b..db0cb429d83 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -94,10 +94,12 @@ class WSMsgType(IntEnum): error = ERROR -MESSAGE_TYPES_WITH_CONTENT: Final = ( - WSMsgType.BINARY, - WSMsgType.TEXT, - WSMsgType.CONTINUATION, +MESSAGE_TYPES_WITH_CONTENT: Final = frozenset( + { + WSMsgType.BINARY, + WSMsgType.TEXT, + WSMsgType.CONTINUATION, + } ) WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" From 6086a2acc9d1dc211780557dda22db52127ea7a5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 18:30:44 +0000 Subject: [PATCH 0351/1511] [PR #8747/e97b17a6 backport][3.11] Make MESSAGE_TYPES_WITH_CONTENT a frozenset for the WebSocketReader (#8751) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8747.misc.rst | 1 + aiohttp/http_websocket.py | 10 ++++++---- 2 files changed, 7 insertions(+), 4 deletions(-) create mode 120000 CHANGES/8747.misc.rst diff --git a/CHANGES/8747.misc.rst b/CHANGES/8747.misc.rst new file mode 120000 index 00000000000..9eb648d0e2b --- /dev/null +++ b/CHANGES/8747.misc.rst @@ -0,0 +1 @@ +8736.misc.rst \ No newline at end of file diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index ab1af44da7b..db0cb429d83 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -94,10 +94,12 @@ class WSMsgType(IntEnum): error = ERROR -MESSAGE_TYPES_WITH_CONTENT: Final = ( - WSMsgType.BINARY, - WSMsgType.TEXT, - WSMsgType.CONTINUATION, +MESSAGE_TYPES_WITH_CONTENT: Final = frozenset( + { + WSMsgType.BINARY, + WSMsgType.TEXT, + WSMsgType.CONTINUATION, + } ) WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" From 5953df16d2329a05dc861607d54e9b7425e91119 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 13:49:00 -0500 Subject: [PATCH 0352/1511] [PR #8749/5e30b49 backport][3.10] Bump cffi to 1.17.0 (#8752) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 6 ++++-- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 9 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 888f9a77899..fbdc31772a4 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -16,7 +16,7 @@ attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.15.1 +cffi==1.17.0 # via pycares frozenlist==1.4.1 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b40b4440ae0..d638e1c2a46 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -38,7 +38,7 @@ build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.15.0 +cffi==1.17.0 # via # cryptography # pycares diff --git a/requirements/dev.txt b/requirements/dev.txt index 3ad4f54b209..a4e341be912 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -36,7 +36,7 @@ build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.17.0 # via # cryptography # pycares diff --git a/requirements/lint.txt b/requirements/lint.txt index 97809fe3dde..7ca49ba88d7 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -14,8 +14,10 @@ async-timeout==4.0.3 # via aioredis certifi==2024.2.2 # via requests -cffi==1.16.0 - # via pycares +cffi==1.17.0 + # via + # cryptography + # pycares cfgv==3.3.1 # via pre-commit charset-normalizer==3.3.2 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 5f98dceaf9c..4c48153d142 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -16,7 +16,7 @@ attrs==23.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.15.1 +cffi==1.17.0 # via pycares frozenlist==1.4.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index 803705f6da0..c25b3b8ea44 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -20,7 +20,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.17.0 # via # cryptography # pycares From bc996dfffe2ae8ae965ec6b7fd1d13a43cc544b5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 13:54:46 -0500 Subject: [PATCH 0353/1511] [PR #8749/5e30b49 backport][3.11] Bump cffi to 1.17.0 (#8753) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1d9a5bcc2a7..d90626583b0 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -16,7 +16,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.15.1 +cffi==1.17.0 # via pycares frozenlist==1.4.1 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2bcef5994e7..93e94b158b0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -38,7 +38,7 @@ build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.15.0 +cffi==1.17.0 # via # cryptography # pycares diff --git a/requirements/dev.txt b/requirements/dev.txt index 525f2cc42cf..15cf7d21685 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -36,7 +36,7 @@ build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.17.0 # via # cryptography # pycares diff --git a/requirements/lint.txt b/requirements/lint.txt index ce24882252f..d5a6435c0b4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -14,7 +14,7 @@ async-timeout==4.0.3 # via aioredis certifi==2024.2.2 # via requests -cffi==1.16.0 +cffi==1.17.0 # via # cryptography # pycares diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index fa715abe96f..977f25cb3be 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -16,7 +16,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.15.1 +cffi==1.17.0 # via pycares frozenlist==1.4.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index 0eb9c032877..d663e411bdb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -20,7 +20,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.17.0 # via # cryptography # pycares From ec453ba6d1cf8f5bb502f2e2a861c463e755f387 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 13:56:06 -0500 Subject: [PATCH 0354/1511] Release 3.10.5 (#8754) --- CHANGES.rst | 30 ++++++++++++++++++++++++++++++ CHANGES/8736.misc.rst | 1 - CHANGES/8742.bugfix.rst | 1 - CHANGES/8747.misc.rst | 1 - aiohttp/__init__.py | 2 +- 5 files changed, 31 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/8736.misc.rst delete mode 100644 CHANGES/8742.bugfix.rst delete mode 120000 CHANGES/8747.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index a90142e8505..2bd19de71d6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,36 @@ .. towncrier release notes start +3.10.5 (2024-08-19) +========================= + +Bug fixes +--------- + +- Fixed :meth:`aiohttp.ClientResponse.json()` not setting ``status`` when :exc:`aiohttp.ContentTypeError` is raised -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8742`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the WebSocket reader -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8736`, :issue:`8747`. + + + + +---- + + 3.10.4 (2024-08-17) =================== diff --git a/CHANGES/8736.misc.rst b/CHANGES/8736.misc.rst deleted file mode 100644 index 34ed19aebba..00000000000 --- a/CHANGES/8736.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of the WebSocket reader -- by :user:`bdraco`. diff --git a/CHANGES/8742.bugfix.rst b/CHANGES/8742.bugfix.rst deleted file mode 100644 index 850f0390399..00000000000 --- a/CHANGES/8742.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :meth:`aiohttp.ClientResponse.json()` not setting ``status`` when :exc:`aiohttp.ContentTypeError` is raised -- by :user:`bdraco`. diff --git a/CHANGES/8747.misc.rst b/CHANGES/8747.misc.rst deleted file mode 120000 index 9eb648d0e2b..00000000000 --- a/CHANGES/8747.misc.rst +++ /dev/null @@ -1 +0,0 @@ -8736.misc.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index d54dfedce2c..bd65f92f3c7 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.5.dev0" +__version__ = "3.10.5" from typing import TYPE_CHECKING, Tuple From 8f15f072a1e73d9f2a577d86fe6483bebd755447 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 Aug 2024 15:31:10 -0500 Subject: [PATCH 0355/1511] Bump version to 3.10.6.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index bd65f92f3c7..bcc73f51ccd 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.5" +__version__ = "3.10.6.dev0" from typing import TYPE_CHECKING, Tuple From b6ebab27b38b109627484bd13c29a6cabb4b97ef Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 20:53:30 +0000 Subject: [PATCH 0356/1511] [PR #8755/d50f2759 backport][3.10] Update indirect dependencies with Dependabot (#8756) **This is a backport of PR #8755 as merged into master (d50f275992484d9a1d47c0cf09aba0bf5ff79cb1).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/dependabot.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d1898c69e6e..a67d0133738 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -12,6 +12,8 @@ updates: # Maintain dependencies for Python - package-ecosystem: "pip" directory: "/" + allow: + - dependency-type: "all" labels: - dependencies schedule: @@ -31,6 +33,8 @@ updates: # Maintain dependencies for Python aiohttp backport - package-ecosystem: "pip" directory: "/" + allow: + - dependency-type: "all" labels: - dependencies target-branch: "3.10" From 947b9a0624c1f13cfda4b48f896658e262b83494 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:04:12 +0000 Subject: [PATCH 0357/1511] [PR #8755/d50f2759 backport][3.11] Update indirect dependencies with Dependabot (#8757) **This is a backport of PR #8755 as merged into master (d50f275992484d9a1d47c0cf09aba0bf5ff79cb1).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/dependabot.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index deb81163faf..9cf1501e811 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -12,6 +12,8 @@ updates: # Maintain dependencies for Python - package-ecosystem: "pip" directory: "/" + allow: + - dependency-type: "all" labels: - dependencies schedule: @@ -31,6 +33,8 @@ updates: # Maintain dependencies for Python aiohttp backport - package-ecosystem: "pip" directory: "/" + allow: + - dependency-type: "all" labels: - dependencies target-branch: "3.11" From c1b110bae997c5d5b31ad6d489396e7bb6546484 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:06:34 +0000 Subject: [PATCH 0358/1511] Bump sphinxcontrib-applehelp from 1.0.2 to 1.0.4 (#8759) Bumps [sphinxcontrib-applehelp](https://github.com/sphinx-doc/sphinxcontrib-applehelp) from 1.0.2 to 1.0.4. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/blob/master/CHANGES.rst">sphinxcontrib-applehelp's changelog</a>.</em></p> <blockquote> <h1>Release 1.0.4 (2023-01-21)</h1> <ul> <li>Fix package name</li> </ul> <h1>Release 1.0.3 (2023-01-08)</h1> <ul> <li>Drop Python 3.7 and lower</li> <li>Fix deprecation warnings from Sphinx 6.1</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/0af552585aed2193185577c4726c370fb0e11892"><code>0af5525</code></a> Bump 1.0.4 final</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/11b0c9e58949fcf18e8c2273133a01d901a596e7"><code>11b0c9e</code></a> Switch back to <code>setuptools</code></li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/5657fa041f6bae5e10260adcfa278f8236736bdf"><code>5657fa0</code></a> Bump version</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/dcf26a5b9914d05e2a27eed341b44fae7aa1bf31"><code>dcf26a5</code></a> Bump 1.0.3 final</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/5556fda893a9cea5b65d1aa7551db832f9e04ad6"><code>5556fda</code></a> Add newer Python versions to trove classifiers</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/a129a3065d31ac0dd02636a11d734cc2163312a6"><code>a129a30</code></a> Satisfy MyPy</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/1fb2a0ddaebcb6d5eb31846e7a17687903d10e3f"><code>1fb2a0d</code></a> Update Tox commands</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/5e6876ea36f389e3a929799f40643dc6a7f42c83"><code>5e6876e</code></a> Update Tox environment list</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/8ead5e941c6fae3e6a740578bf91d777b244b331"><code>8ead5e9</code></a> Merge branch 'pyproject'</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/commit/a5f90db8409fc20dd0b24830ba51827ddd5a4b37"><code>a5f90db</code></a> Move tool configuration to individual files</li> <li>Additional commits viewable in <a href="https://github.com/sphinx-doc/sphinxcontrib-applehelp/compare/1.0.2...1.0.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sphinxcontrib-applehelp&package-manager=pip&previous-version=1.0.2&new-version=1.0.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/doc-spelling.txt | 9 +++++---- requirements/doc.txt | 9 +++++---- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 93e94b158b0..ae739e9a47e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -215,7 +215,7 @@ sphinx==7.1.2 # sphinxcontrib-blockdiag # sphinxcontrib-spelling # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-applehelp==1.0.4 # via sphinx sphinxcontrib-blockdiag==3.0.0 # via -r requirements/doc.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9ee15189662..858c43782fc 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -54,11 +54,10 @@ requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.1.2 # via # -r requirements/doc.in # sphinxcontrib-blockdiag - # sphinxcontrib-serializinghtml # sphinxcontrib-spelling # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 @@ -73,7 +72,7 @@ sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.3 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in @@ -96,4 +95,6 @@ zipp==3.17.0 # The following packages are considered to be unsafe in a requirements file: setuptools==68.0.0 - # via blockdiag + # via + # blockdiag + # sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index d9e7fb0ad7f..89a38bd8b5b 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -52,11 +52,10 @@ requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.1.2 # via # -r requirements/doc.in # sphinxcontrib-blockdiag - # sphinxcontrib-serializinghtml # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 # via sphinx @@ -70,7 +69,7 @@ sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.3 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in @@ -91,4 +90,6 @@ zipp==3.17.0 # The following packages are considered to be unsafe in a requirements file: setuptools==68.0.0 - # via blockdiag + # via + # blockdiag + # sphinx From 787699787028366aa5956da65edccc5f430550b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:13:58 +0000 Subject: [PATCH 0359/1511] Bump cfgv from 3.3.1 to 3.4.0 (#8764) Bumps [cfgv](https://github.com/asottile/cfgv) from 3.3.1 to 3.4.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/asottile/cfgv/commit/44cc7353ad4238cee1fc2eb2f0aa07b78f0cbe27"><code>44cc735</code></a> v3.4.0</li> <li><a href="https://github.com/asottile/cfgv/commit/53641d074ab8294d36000509c2ffbe761f41abfa"><code>53641d0</code></a> Merge pull request <a href="https://redirect.github.com/asottile/cfgv/issues/122">#122</a> from asottile/custom-display-name</li> <li><a href="https://github.com/asottile/cfgv/commit/a9dbeca6fac21e1c38c03a1f67b01873a72de7f8"><code>a9dbeca</code></a> add a custom display name for loading from a file</li> <li><a href="https://github.com/asottile/cfgv/commit/a05f8f2d1842072775bea0d6d92a509102b01ad9"><code>a05f8f2</code></a> Merge pull request <a href="https://redirect.github.com/asottile/cfgv/issues/121">#121</a> from asottile/pre-commit-ci-update-config</li> <li><a href="https://github.com/asottile/cfgv/commit/38a74b3ca766a1721f7e049c05a807f48cbec51b"><code>38a74b3</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/asottile/cfgv/commit/9a0589fa8d22eeafac10a0aa2ecfc97e0f3ae460"><code>9a0589f</code></a> Merge pull request <a href="https://redirect.github.com/asottile/cfgv/issues/120">#120</a> from asottile/pre-commit-ci-update-config</li> <li><a href="https://github.com/asottile/cfgv/commit/1cd03ccb9365f2f77db54a8320d5bceb40a82d3b"><code>1cd03cc</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/asottile/cfgv/commit/285c4bcaab8ea312a8d74abed90c83457dadaecf"><code>285c4bc</code></a> Merge pull request <a href="https://redirect.github.com/asottile/cfgv/issues/119">#119</a> from asottile/pre-commit-ci-update-config</li> <li><a href="https://github.com/asottile/cfgv/commit/4cc61b20060c53db520f51f5079823a25e70c7ff"><code>4cc61b2</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/asottile/cfgv/commit/26255c643f625e380d49dd8d24a1039a0cb4a264"><code>26255c6</code></a> Merge pull request <a href="https://redirect.github.com/asottile/cfgv/issues/118">#118</a> from asottile/all-repos_autofix_py38-plus</li> <li>Additional commits viewable in <a href="https://github.com/asottile/cfgv/compare/v3.3.1...v3.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cfgv&package-manager=pip&previous-version=3.3.1&new-version=3.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ae739e9a47e..d9efad1c215 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -42,7 +42,7 @@ cffi==1.17.0 # via # cryptography # pycares -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit charset-normalizer==3.2.0 # via requests diff --git a/requirements/dev.txt b/requirements/dev.txt index 15cf7d21685..bb14919009d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -40,7 +40,7 @@ cffi==1.17.0 # via # cryptography # pycares -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit charset-normalizer==3.2.0 # via requests diff --git a/requirements/lint.txt b/requirements/lint.txt index d5a6435c0b4..23878d681f7 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -18,7 +18,7 @@ cffi==1.17.0 # via # cryptography # pycares -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit charset-normalizer==3.3.2 # via requests From b4b80393ef9f0d7ba76b4bb0e591db1b4e071f09 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:14:06 +0000 Subject: [PATCH 0360/1511] Bump pygments from 2.15.1 to 2.18.0 (#8766) Bumps [pygments](https://github.com/pygments/pygments) from 2.15.1 to 2.18.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pygments/pygments/releases">pygments's releases</a>.</em></p> <blockquote> <h2>2.18.0</h2> <ul> <li> <p>New lexers:</p> <ul> <li>Janet (<a href="https://redirect.github.com/pygments/pygments/issues/2557">#2557</a>)</li> <li>Lean 4 (<a href="https://redirect.github.com/pygments/pygments/issues/2618">#2618</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2626">#2626</a>)</li> <li>Luau (<a href="https://redirect.github.com/pygments/pygments/issues/2605">#2605</a>)</li> <li>Mojo (<a href="https://redirect.github.com/pygments/pygments/issues/2691">#2691</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2515">#2515</a>)</li> <li>org-mode (<a href="https://redirect.github.com/pygments/pygments/issues/2628">#2628</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2636">#2636</a>)</li> <li>Promela (<a href="https://redirect.github.com/pygments/pygments/issues/2620">#2620</a>)</li> <li>Soong / <code>Android.bp</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2659">#2659</a>)</li> <li>Tact (<a href="https://redirect.github.com/pygments/pygments/issues/2571">#2571</a>)</li> <li>Typst (<a href="https://redirect.github.com/pygments/pygments/issues/2596">#2596</a>)</li> </ul> </li> <li> <p>Updated lexers:</p> <ul> <li>Awk: recognize ternary operator (<a href="https://redirect.github.com/pygments/pygments/issues/2687">#2687</a>)</li> <li>Bash: add <code>openrc</code> alias (<a href="https://redirect.github.com/pygments/pygments/issues/2599">#2599</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2371">#2371</a>)</li> <li>Coq: add keywords, lex more vernacular command arguments, produce fewer tokens on heading comments (<a href="https://redirect.github.com/pygments/pygments/issues/2678">#2678</a>)</li> <li>DNS zone files: Fix comment parsing (<a href="https://redirect.github.com/pygments/pygments/issues/2595">#2595</a>)</li> <li>Hy: Support unicode literals (<a href="https://redirect.github.com/pygments/pygments/issues/1126">#1126</a>)</li> <li>Inform6: Update to Inform 6.42 (<a href="https://redirect.github.com/pygments/pygments/issues/2644">#2644</a>)</li> <li>lean: Fix name handling (<a href="https://redirect.github.com/pygments/pygments/issues/2614">#2614</a>)</li> <li>Logtalk: add <code>uninstantiation</code> keyword and recognize escape sequences (<a href="https://redirect.github.com/pygments/pygments/issues/2619">#2619</a>)</li> <li>Macaulay2: Update to 1.23 (<a href="https://redirect.github.com/pygments/pygments/issues/2655">#2655</a>)</li> <li>Python: fix highlighting of soft keywords before <code>None</code>/<code>True</code>/<code>False</code></li> <li>reStructuredText: use <code>Token.Comment</code> for comments instead of <code>Comment.Preproc</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2598">#2598</a>)</li> <li>Rust: highlight <code>:</code>, <code>::</code> and <code>-></code> as <code>Punctuation</code> and whitespace as <code>Whitespace</code>, instead of <code>Text</code> in both cases (<a href="https://redirect.github.com/pygments/pygments/issues/2631">#2631</a>)</li> <li>Spice: Add keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2621">#2621</a>)</li> <li>SQL Explain: allow negative numbers (<a href="https://redirect.github.com/pygments/pygments/issues/2610">#2610</a>)</li> <li>Swift: Support multiline strings (<a href="https://redirect.github.com/pygments/pygments/issues/2681">#2681</a>)</li> <li>ThingsDB: add constants and new functions; support template strings (<a href="https://redirect.github.com/pygments/pygments/issues/2624">#2624</a>)</li> <li>UL4: support nested <code><?doc?></code> and <code><?note?></code> tags (<a href="https://redirect.github.com/pygments/pygments/issues/2597">#2597</a>)</li> <li>VHDL: support multi-line comments of VHDL-2008 (<a href="https://redirect.github.com/pygments/pygments/issues/2622">#2622</a>)</li> <li>Wikitext: Remove <code>kk-*</code> in <code>variant_langs</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2647">#2647</a>)</li> <li>Xtend: Add <code>val</code> and <code>var</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2602">#2602</a>)</li> </ul> </li> <li> <p>New styles:</p> <ul> <li>Coffee (<a href="https://redirect.github.com/pygments/pygments/issues/2609">#2609</a>)</li> </ul> </li> <li> <p>Make background colors in the image formatter work with Pillow 10.0 (<a href="https://redirect.github.com/pygments/pygments/issues/2623">#2623</a>)</p> </li> <li> <p>Require Python 3.8. As a result, the <code>importlib-metadata</code> package is no longer needed for fast plugin discovery on Python 3.7. The <code>plugins</code> extra (used as, e.g., <code>pip install pygments[plugins]</code>)</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pygments/pygments/blob/master/CHANGES">pygments's changelog</a>.</em></p> <blockquote> <h2>Version 2.18.0</h2> <p>(released May 4th, 2024)</p> <ul> <li> <p>New lexers:</p> <ul> <li>Janet (<a href="https://redirect.github.com/pygments/pygments/issues/2557">#2557</a>)</li> <li>Lean 4 (<a href="https://redirect.github.com/pygments/pygments/issues/2618">#2618</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2626">#2626</a>)</li> <li>Luau (<a href="https://redirect.github.com/pygments/pygments/issues/2605">#2605</a>)</li> <li>Mojo (<a href="https://redirect.github.com/pygments/pygments/issues/2691">#2691</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2515">#2515</a>)</li> <li>org-mode (<a href="https://redirect.github.com/pygments/pygments/issues/2628">#2628</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2636">#2636</a>)</li> <li>Promela (<a href="https://redirect.github.com/pygments/pygments/issues/2620">#2620</a>)</li> <li>Soong / <code>Android.bp</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2659">#2659</a>)</li> <li>Tact (<a href="https://redirect.github.com/pygments/pygments/issues/2571">#2571</a>)</li> <li>Typst (<a href="https://redirect.github.com/pygments/pygments/issues/2596">#2596</a>)</li> </ul> </li> <li> <p>Updated lexers:</p> <ul> <li>Awk: recognize ternary operator (<a href="https://redirect.github.com/pygments/pygments/issues/2687">#2687</a>)</li> <li>Bash: add <code>openrc</code> alias (<a href="https://redirect.github.com/pygments/pygments/issues/2599">#2599</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2371">#2371</a>)</li> <li>Coq: add keywords, lex more vernacular command arguments, produce fewer tokens on heading comments (<a href="https://redirect.github.com/pygments/pygments/issues/2678">#2678</a>)</li> <li>DNS zone files: Fix comment parsing (<a href="https://redirect.github.com/pygments/pygments/issues/2595">#2595</a>)</li> <li>Hy: Support unicode literals (<a href="https://redirect.github.com/pygments/pygments/issues/1126">#1126</a>)</li> <li>Inform6: Update to Inform 6.42 (<a href="https://redirect.github.com/pygments/pygments/issues/2644">#2644</a>)</li> <li>lean: Fix name handling (<a href="https://redirect.github.com/pygments/pygments/issues/2614">#2614</a>)</li> <li>Logtalk: add <code>uninstantiation</code> keyword and recognize escape sequences (<a href="https://redirect.github.com/pygments/pygments/issues/2619">#2619</a>)</li> <li>Macaulay2: Update to 1.23 (<a href="https://redirect.github.com/pygments/pygments/issues/2655">#2655</a>)</li> <li>Python: fix highlighting of soft keywords before <code>None</code>/<code>True</code>/<code>False</code></li> <li>reStructuredText: use <code>Token.Comment</code> for comments instead of <code>Comment.Preproc</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2598">#2598</a>)</li> <li>Rust: highlight <code>:</code>, <code>::</code> and <code>-></code> as <code>Punctuation</code> and whitespace as <code>Whitespace</code>, instead of <code>Text</code> in both cases (<a href="https://redirect.github.com/pygments/pygments/issues/2631">#2631</a>)</li> <li>Spice: Add keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2621">#2621</a>)</li> <li>SQL Explain: allow negative numbers (<a href="https://redirect.github.com/pygments/pygments/issues/2610">#2610</a>)</li> <li>Swift: Support multiline strings (<a href="https://redirect.github.com/pygments/pygments/issues/2681">#2681</a>)</li> <li>ThingsDB: add constants and new functions; support template strings (<a href="https://redirect.github.com/pygments/pygments/issues/2624">#2624</a>)</li> <li>UL4: support nested <code><?doc?></code> and <code><?note?></code> tags (<a href="https://redirect.github.com/pygments/pygments/issues/2597">#2597</a>)</li> <li>VHDL: support multi-line comments of VHDL-2008 (<a href="https://redirect.github.com/pygments/pygments/issues/2622">#2622</a>)</li> <li>Wikitext: Remove <code>kk-*</code> in <code>variant_langs</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2647">#2647</a>)</li> <li>Xtend: Add <code>val</code> and <code>var</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2602">#2602</a>)</li> </ul> </li> <li> <p>New styles:</p> <ul> <li>Coffee (<a href="https://redirect.github.com/pygments/pygments/issues/2609">#2609</a>)</li> </ul> </li> <li> <p>Make background colors in the image formatter work with Pillow 10.0 (<a href="https://redirect.github.com/pygments/pygments/issues/2623">#2623</a>)</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pygments/pygments/commit/d7d11f6e6d3aa97805215c1cc833ea5f0ef1fcbb"><code>d7d11f6</code></a> Last steps for 2.18 release.</li> <li><a href="https://github.com/pygments/pygments/commit/ec7bfd2cc91a1bb2a7200b27c2c553309d689839"><code>ec7bfd2</code></a> Fix Janet version_added.</li> <li><a href="https://github.com/pygments/pygments/commit/ea9c8232b4edfdc7193f25f1253040e77342f878"><code>ea9c823</code></a> Update CHANGES.</li> <li><a href="https://github.com/pygments/pygments/commit/338d36665371cd9e4193b59a267d4f576d8eb05d"><code>338d366</code></a> Merge pull request <a href="https://redirect.github.com/pygments/pygments/issues/2670">#2670</a> from Kodiologist/hylex</li> <li><a href="https://github.com/pygments/pygments/commit/4d1371b30af2de7c6a74af6ef64673b657dfe3ea"><code>4d1371b</code></a> Lock down the pytest version.</li> <li><a href="https://github.com/pygments/pygments/commit/8dd97e04d47437581ca2fcb19a94aeb5cbd1dba2"><code>8dd97e0</code></a> Improve docs.</li> <li><a href="https://github.com/pygments/pygments/commit/26179d66122f2afacdc115071ce344af1984a55c"><code>26179d6</code></a> Fix deprecated variable usage in tests.</li> <li><a href="https://github.com/pygments/pygments/commit/ad125ca614097b5b02c4603bdbe63ec79b791473"><code>ad125ca</code></a> Prepare 2.18 release.</li> <li><a href="https://github.com/pygments/pygments/commit/24deeb9cae597db4d22496b0c2cc9e82d3e8a689"><code>24deeb9</code></a> Lock the ruff version in tox.ini.</li> <li><a href="https://github.com/pygments/pygments/commit/c9165cf7fb18f01066222ec7d063e5e5975f2a69"><code>c9165cf</code></a> Fix format string usage.</li> <li>Additional commits viewable in <a href="https://github.com/pygments/pygments/compare/2.15.1...2.18.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pygments&package-manager=pip&previous-version=2.15.1&new-version=2.18.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d9efad1c215..0d715df8139 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -156,7 +156,7 @@ pydantic-core==2.6.0 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.15.1 +pygments==2.18.0 # via sphinx pyjwt==2.3.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index bb14919009d..d068b291288 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,7 +149,7 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic -pygments==2.15.1 +pygments==2.18.0 # via sphinx pyjwt==2.8.0 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 858c43782fc..76d8f40bb13 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -46,7 +46,7 @@ pillow==9.5.0 # blockdiag pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.15.1 +pygments==2.18.0 # via sphinx pytz==2023.3.post1 # via babel diff --git a/requirements/doc.txt b/requirements/doc.txt index 89a38bd8b5b..fac8fda5c47 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -44,7 +44,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pygments==2.15.1 +pygments==2.18.0 # via sphinx pytz==2023.3.post1 # via babel diff --git a/requirements/lint.txt b/requirements/lint.txt index 23878d681f7..65e0726ae3b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -70,7 +70,7 @@ pydantic==2.7.1 # via python-on-whales pydantic-core==2.18.2 # via pydantic -pygments==2.17.2 +pygments==2.18.0 # via rich pytest==8.3.2 # via From ff8800f46f4a51f8d30da0012be13a2a06b994ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:14:10 +0000 Subject: [PATCH 0361/1511] Bump backports-entry-points-selectable from 1.1.1 to 1.3.0 (#8769) Bumps [backports-entry-points-selectable](https://github.com/jaraco/backports.entry_points_selectable) from 1.1.1 to 1.3.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jaraco/backports.entry_points_selectable/blob/main/NEWS.rst">backports-entry-points-selectable's changelog</a>.</em></p> <blockquote> <h1>v1.3.0</h1> <h2>Features</h2> <ul> <li>Require Python 3.8 or later.</li> </ul> <h1>v1.2.0</h1> <p>Refreshed packaging.</p> <p>Require Python 3.7 or later.</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/27a788feb0b51d89dd405d2d80cf3e0dac19bbf0"><code>27a788f</code></a> Finalize</li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/b15df87577cab060a02667767bcc96b2dfd5b91a"><code>b15df87</code></a> Prefer imperative voice</li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/dfddd1dd797e5bc82a1eeef2f452cdc7e21f34c1"><code>dfddd1d</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/5732ebeeaa9480f8cd80c96a3183d7b247f27214"><code>5732ebe</code></a><code>jaraco/skeleton#95</code></li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/75d9cc1b7cb6f84e7a16a83ec3abb9a478fdb130"><code>75d9cc1</code></a><code>jaraco/skeleton#94</code></li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/03f03e7802b0842b41f70b2b1c17ab26551a7533"><code>03f03e7</code></a> Limit sphinxlint jobs to 1. Workaround for <a href="https://redirect.github.com/sphinx-contrib/sphinx-lint/issues/83">sphinx-contrib/sphinx-lint#83</a>.</li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/d8e796e4e3517b678281964b23a74ffb92be4489"><code>d8e796e</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/ca1831c2148fe5ddbffd001de76ff5f6005f812c"><code>ca1831c</code></a> Prefer <code>pass_env</code> in tox config. Preferred failure mode for <a href="https://redirect.github.com/tox-dev/tox/issues/312">tox-dev/tox#312</a>...</li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/928e9a86d61d3a660948bcba7689f90216cc8243"><code>928e9a8</code></a> Add FORCE_COLOR to the TOX_OVERRIDE for GHA. Requires tox 4.11.1. Closes jara...</li> <li><a href="https://github.com/jaraco/backports.entry_points_selectable/commit/a6256e2935468b72a61aa7fda1e036faef3bfb3d"><code>a6256e2</code></a> Add descriptions to the tox environments. Closes <a href="https://redirect.github.com/jaraco/skeleton/issues/91">jaraco/skeleton#91</a>.</li> <li>Additional commits viewable in <a href="https://github.com/jaraco/backports.entry_points_selectable/compare/v1.1.1...v1.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=backports-entry-points-selectable&package-manager=pip&previous-version=1.1.1&new-version=1.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0d715df8139..f5553b21832 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -28,7 +28,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.9.1 # via sphinx -backports-entry-points-selectable==1.1.1 +backports-entry-points-selectable==1.3.0 # via virtualenv blockdiag==2.0.1 # via sphinxcontrib-blockdiag From 170c78d5ea711602304b4d133c04fda2d7f59cc3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:37:44 +0000 Subject: [PATCH 0362/1511] Bump tqdm from 4.62.3 to 4.66.5 (#8771) Bumps [tqdm](https://github.com/tqdm/tqdm) from 4.62.3 to 4.66.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tqdm/tqdm/releases">tqdm's releases</a>.</em></p> <blockquote> <h2>tqdm v4.66.5 stable</h2> <ul> <li>support <code>ncols</code> auto-detection on FreeBSD (<a href="https://redirect.github.com/tqdm/tqdm/issues/1602">#1602</a> <- <a href="https://redirect.github.com/casperdcl/git-fame/issues/98">casperdcl/git-fame#98</a>)</li> <li>fix Python 3.13 CLI (<a href="https://redirect.github.com/tqdm/tqdm/issues/1594">#1594</a> <- <a href="https://redirect.github.com/tqdm/tqdm/issues/1585">#1585</a>)</li> <li>fix Python 3.13 tests (<a href="https://redirect.github.com/tqdm/tqdm/issues/1595">#1595</a> <- <a href="https://redirect.github.com/python/cpython/issues/117536#issuecomment-2036883124">python/cpython#117536</a>)</li> <li>misc framework updates (<a href="https://redirect.github.com/tqdm/tqdm/issues/1602">#1602</a>) <ul> <li>add official Python 3.12 support</li> <li>bump deps (<a href="https://redirect.github.com/NiklasRosenstein/pydoc-markdown/issues/329">NiklasRosenstein/pydoc-markdown#329</a>, <a href="https://redirect.github.com/tikitu/jsmin/pull/44">tikitu/jsmin#44</a>)</li> </ul> </li> </ul> <h2>tqdm v4.66.4 stable</h2> <ul> <li><code>rich</code>: fix completion (<a href="https://redirect.github.com/tqdm/tqdm/issues/1395">#1395</a> <- <a href="https://redirect.github.com/tqdm/tqdm/issues/1306">#1306</a>)</li> <li>minor framework updates & code tidy (<a href="https://redirect.github.com/tqdm/tqdm/issues/1578">#1578</a>)</li> </ul> <h2>tqdm v4.66.3 stable</h2> <ul> <li><code>cli</code>: <code>eval</code> safety (fixes CVE-2024-34062, GHSA-g7vv-2v7x-gj9p)</li> </ul> <h2>tqdm v4.66.2 stable</h2> <ul> <li><code>pandas</code>: add <code>DataFrame.progress_map</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1549">#1549</a>)</li> <li><code>notebook</code>: fix HTML padding (<a href="https://redirect.github.com/tqdm/tqdm/issues/1506">#1506</a>)</li> <li><code>keras</code>: fix resuming training when <code>verbose>=2</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1508">#1508</a>)</li> <li>fix <code>format_num</code> negative fractions missing leading zero (<a href="https://redirect.github.com/tqdm/tqdm/issues/1548">#1548</a>)</li> <li>fix Python 3.12 <code>DeprecationWarning</code> on <code>import</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1519">#1519</a>)</li> <li>linting: use f-strings (<a href="https://redirect.github.com/tqdm/tqdm/issues/1549">#1549</a>)</li> <li>update tests (<a href="https://redirect.github.com/tqdm/tqdm/issues/1549">#1549</a>) <ul> <li>fix <code>pandas</code> warnings</li> <li>fix <code>asv</code> (<a href="https://redirect.github.com/airspeed-velocity/asv/issues/1323">airspeed-velocity/asv#1323</a>)</li> <li>fix macos <code>notebook</code> docstring indentation</li> </ul> </li> <li>CI: bump actions (<a href="https://redirect.github.com/tqdm/tqdm/issues/1549">#1549</a>)</li> </ul> <h2>tqdm v4.66.1 stable</h2> <ul> <li>fix <code>utils.envwrap</code> types (<a href="https://redirect.github.com/tqdm/tqdm/issues/1493">#1493</a> <- <a href="https://redirect.github.com/tqdm/tqdm/issues/1491">#1491</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/1320">#1320</a> <- <a href="https://redirect.github.com/tqdm/tqdm/issues/966">#966</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/1319">#1319</a>) <ul> <li>e.g. cloudwatch & kubernetes workaround: <code>export TQDM_POSITION=-1</code></li> </ul> </li> <li>drop mentions of unsupported Python versions</li> </ul> <h2>tqdm v4.66.0 stable</h2> <ul> <li>environment variables to override defaults (<code>TQDM_*</code>) (<a href="https://redirect.github.com/tqdm/tqdm/issues/1491">#1491</a> <- <a href="https://redirect.github.com/tqdm/tqdm/issues/1061">#1061</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/950">#950</a> <- <a href="https://redirect.github.com/tqdm/tqdm/issues/614">#614</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/1318">#1318</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/619">#619</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/612">#612</a>, <a href="https://redirect.github.com/tqdm/tqdm/issues/370">#370</a>) <ul> <li>e.g. in CI jobs, <code>export TQDM_MININTERVAL=5</code> to avoid log spam</li> <li>add tests & docs for <code>tqdm.utils.envwrap</code></li> </ul> </li> <li>fix & update CLI completion</li> <li>fix & update API docs</li> <li>minor code tidy: replace <code>os.path</code> => <code>pathlib.Path</code></li> <li>fix docs image hosting</li> <li>release with CI bot account again (<a href="https://redirect.github.com/cli/cli/issues/6680">cli/cli#6680</a>)</li> </ul> <h2>tqdm v4.65.2 stable</h2> <ul> <li>exclude <code>examples</code> from distributed wheel (<a href="https://redirect.github.com/tqdm/tqdm/issues/1492">#1492</a>)</li> </ul> <h2>tqdm v4.65.1 stable</h2> <ul> <li>migrate <code>setup.{cfg,py}</code> => <code>pyproject.toml</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1490">#1490</a>) <ul> <li>fix <code>asv</code> benchmarks</li> </ul> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tqdm/tqdm/commit/951a2ba8d8754b7385e6e8c08dae9045f73b1438"><code>951a2ba</code></a> Merge pull request <a href="https://redirect.github.com/tqdm/tqdm/issues/1595">#1595</a> from hroncok/py3.13-await-aclose</li> <li><a href="https://github.com/tqdm/tqdm/commit/2fbad6ad511e551efe868e70f61c876e0c467fd0"><code>2fbad6a</code></a> Avoid Python 3.13+ RuntimeWarning: coroutine method 'aclose' of 'acount' was ...</li> <li><a href="https://github.com/tqdm/tqdm/commit/025434544eeda158e340d330391af9bc7278d5d9"><code>0254345</code></a> Merge pull request <a href="https://redirect.github.com/tqdm/tqdm/issues/1594">#1594</a> from mgorny/py313-docstring</li> <li><a href="https://github.com/tqdm/tqdm/commit/43230f6095a1ab5068481d543dc7ec3a60a3c08b"><code>43230f6</code></a> slight lint</li> <li><a href="https://github.com/tqdm/tqdm/commit/5ba65950bdca7e7a7520869df87f398cffbbe585"><code>5ba6595</code></a> cli: Fix docstring processing with Python 3.13+</li> <li><a href="https://github.com/tqdm/tqdm/commit/448946ae03ddafcbbb0f622bbad8f58dd12b2b58"><code>448946a</code></a> Merge pull request <a href="https://redirect.github.com/tqdm/tqdm/issues/1602">#1602</a> from tqdm/devel</li> <li><a href="https://github.com/tqdm/tqdm/commit/46cd3958045370f56f68faf6e12877c540419ec8"><code>46cd395</code></a> add py3.12 support</li> <li><a href="https://github.com/tqdm/tqdm/commit/d8ac65641ddfa87c3c6b1f729b3e89bb002fa600"><code>d8ac656</code></a> ncols: support FreeBSD</li> <li><a href="https://github.com/tqdm/tqdm/commit/4f662763e6b97c3288497e653d394681427694ac"><code>4f66276</code></a> bump deps & linters</li> <li><a href="https://github.com/tqdm/tqdm/commit/54796cc1519dceb9bd20edaf3e1664d4cdf125a7"><code>54796cc</code></a> docs: bump versions</li> <li>Additional commits viewable in <a href="https://github.com/tqdm/tqdm/compare/v4.62.3...v4.66.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=tqdm&package-manager=pip&previous-version=4.62.3&new-version=4.66.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f5553b21832..2512d5e5bf9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -248,7 +248,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.62.3 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index d068b291288..1a449e6abf1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -234,7 +234,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.65.0 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 65e0726ae3b..066e73c6ad4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -99,7 +99,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -tqdm==4.66.2 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index d663e411bdb..2d07a0e787c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -105,7 +105,7 @@ tomli==2.0.1 # coverage # mypy # pytest -tqdm==4.65.0 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in From 0c2626958756c8fb24a7bdd05e4b2009e367f6c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 23:02:29 +0000 Subject: [PATCH 0363/1511] Bump build from 1.0.3 to 1.2.1 (#8775) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [build](https://github.com/pypa/build) from 1.0.3 to 1.2.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/build/releases">build's releases</a>.</em></p> <blockquote> <h2>Version 1.2.1</h2> <h2>What's Changed</h2> <ul> <li>Avoid error when terminal width is undetectable on Python < 3.11 (PR <a href="https://redirect.github.com/pypa/build/issues/761">#761</a>)</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/build/compare/1.2.0...1.2.1">https://github.com/pypa/build/compare/1.2.0...1.2.1</a></p> <h2>Version 1.2.0</h2> <h2>What's Changed</h2> <ul> <li>Add <code>--installer</code> option, supporting <code>pip</code> and <code>uv</code>. Added <code>uv</code> extra. (PR <a href="https://redirect.github.com/pypa/build/issues/751">#751</a>)</li> <li>Improve console output and provide <code>-v</code> for dependency installation (PR <a href="https://redirect.github.com/pypa/build/issues/749">#749</a>)</li> <li>Avoid compiling unused bytecode when using <code>pip</code> (PR <a href="https://redirect.github.com/pypa/build/issues/752">#752</a>)</li> <li>Dropped support for Python 3.7 (PR <a href="https://redirect.github.com/pypa/build/issues/743">#743</a>)</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/build/compare/1.1.1...1.2.0">https://github.com/pypa/build/compare/1.1.1...1.2.0</a></p> <h2>Version 1.1.1</h2> <h2>What's Changed</h2> <ul> <li>Fixed invoking outer pip from user site packages (PR <a href="https://redirect.github.com/pypa/build/issues/746">#746</a>, fixes issue <a href="https://redirect.github.com/pypa/build/issues/745">#745</a>)</li> <li>Corrected the minimum pip version required to use an outer pip (PR <a href="https://redirect.github.com/pypa/build/issues/746">#746</a>, fixes issue <a href="https://redirect.github.com/pypa/build/issues/745">#745</a>)</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/build/compare/v1.1.0...1.1.1">https://github.com/pypa/build/compare/v1.1.0...1.1.1</a></p> <h2>Version 1.1.0</h2> <h2>What's Changed</h2> <ul> <li>Use external pip if available instead of installing, speeds up environment setup with virtualenv slightly and venv significantly. (PR <a href="https://redirect.github.com/pypa/build/issues/736">#736</a>)</li> <li>Stopped injecting <code>wheel</code> as a build dependency automatically, in the case of missing <code>pyproject.toml</code> -- by <a href="https://github.com/webknjaz"><code>@​webknjaz</code></a>. (PR <a href="https://redirect.github.com/pypa/build/issues/716">#716</a>)</li> <li>Use <code>importlib_metadata</code> on Python <3.10.2 for bugfixes not present in those CPython standard libraries (not required when bootstrapping) -- by <a href="https://github.com/GianlucaFicarelli"><code>@​GianlucaFicarelli</code></a>. (PR <a href="https://redirect.github.com/pypa/build/issues/693">#693</a>, fixes issue <a href="https://redirect.github.com/pypa/build/issues/692">#692</a>)</li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/MichaReiser"><code>@​MichaReiser</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/build/pull/697">pypa/build#697</a></li> <li><a href="https://github.com/GianlucaFicarelli"><code>@​GianlucaFicarelli</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/build/pull/693">pypa/build#693</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/build/compare/1.0.3...v1.1.0">https://github.com/pypa/build/compare/1.0.3...v1.1.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/build/blob/main/CHANGELOG.rst">build's changelog</a>.</em></p> <blockquote> <h1>1.2.1 (2024-03-28)</h1> <ul> <li>Avoid error when terminal width is undetectable on Python < 3.11 (PR :pr:<code>761</code>)</li> </ul> <h1>1.2.0 (2024-03-27)</h1> <ul> <li>Add <code>--installer</code> option, supporting <code>pip</code> and <code>uv</code>. Added <code>uv</code> extra. (PR :pr:<code>751</code>)</li> <li>Improve console output and provide <code>-v</code> for dependency installation (PR :pr:<code>749</code>)</li> <li>Avoid compiling unused bytecode when using <code>pip</code> (PR :pr:<code>752</code>)</li> <li>Dropped support for Python 3.7 (PR :pr:<code>743</code>)</li> </ul> <h1>1.1.1 (2024-02-29)</h1> <ul> <li>Fixed invoking outer pip from user site packages (PR :pr:<code>746</code>, fixes issue :issue:<code>745</code>)</li> <li>Corrected the minimum pip version required to use an outer pip (PR :pr:<code>746</code>, fixes issue :issue:<code>745</code>)</li> </ul> <h1>1.1.0 (2024-02-29)</h1> <ul> <li>Use external pip if available instead of installing, speeds up environment setup with virtualenv slightly and venv significantly. (PR :pr:<code>736</code>)</li> <li>Stopped injecting <code>wheel</code> as a build dependency automatically, in the case of missing <code>pyproject.toml</code> -- by :user:<code>webknjaz</code>. (PR :pr:<code>716</code>)</li> <li>Use <code>importlib_metadata</code> on Python <3.10.2 for bugfixes not present in those CPython standard libraries (not required when bootstrapping) -- by :user:<code>GianlucaFicarelli</code>. (PR :pr:<code>693</code>, fixes issue :issue:<code>692</code>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/build/commit/1e67c062e9d1b1a6d5ffed621f4b29902bb764e5"><code>1e67c06</code></a> chore: bump version number to 1.2.1</li> <li><a href="https://github.com/pypa/build/commit/e5072e3de2eb1baa4034247d345916f54f9e8e91"><code>e5072e3</code></a> fix: support min width not detectable (<a href="https://redirect.github.com/pypa/build/issues/761">#761</a>)</li> <li><a href="https://github.com/pypa/build/commit/d5fb6fbecce164e01065b62b52ac4f270d09183d"><code>d5fb6fb</code></a> chore: prepare for 1.2.0 (<a href="https://redirect.github.com/pypa/build/issues/758">#758</a>)</li> <li><a href="https://github.com/pypa/build/commit/1ae6eb177dc8b5282976e9c6796652616c20ff9d"><code>1ae6eb1</code></a> pre-commit: bump repositories (<a href="https://redirect.github.com/pypa/build/issues/757">#757</a>)</li> <li><a href="https://github.com/pypa/build/commit/a1f005d840d51116e707b6f62c4abc25b57258de"><code>a1f005d</code></a> pre-commit: bump repositories (<a href="https://redirect.github.com/pypa/build/issues/756">#756</a>)</li> <li><a href="https://github.com/pypa/build/commit/5076a56d90f34e227eb368d5dded1e3122e15115"><code>5076a56</code></a> uv: support double verbosity flag</li> <li><a href="https://github.com/pypa/build/commit/566266918357bc032837ca273729a8ddfe2ac4a8"><code>5662669</code></a> chore: bump mypy</li> <li><a href="https://github.com/pypa/build/commit/24c513d4c9402cdac1a1c46cc01597f011ba6645"><code>24c513d</code></a> chore: reformat using Black 2024 style</li> <li><a href="https://github.com/pypa/build/commit/08cdb76c17b2599ebfc85dbfe33b33271a48f221"><code>08cdb76</code></a> ruff: bump version and update config</li> <li><a href="https://github.com/pypa/build/commit/97ea57bb79e41f17d76023a4384cb7199f743060"><code>97ea57b</code></a> perf: avoid compiling unused bytecode (<a href="https://redirect.github.com/pypa/build/issues/752">#752</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/build/compare/1.0.3...1.2.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=build&package-manager=pip&previous-version=1.0.3&new-version=1.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2512d5e5bf9..51e32642ad4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -34,7 +34,7 @@ blockdiag==2.0.1 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.0.3 +build==1.2.1 # via pip-tools certifi==2023.7.22 # via requests diff --git a/requirements/dev.txt b/requirements/dev.txt index 1a449e6abf1..01255801616 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -32,7 +32,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.0.3 +build==1.2.1 # via pip-tools certifi==2023.7.22 # via requests From eadc376b4e891ece42aeae71c7bab688d750ba41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 23:49:22 +0000 Subject: [PATCH 0364/1511] Bump pycares from 4.3.0 to 4.4.0 (#8774) Bumps [pycares](https://github.com/saghul/pycares) from 4.3.0 to 4.4.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/saghul/pycares/commit/3d0f9cf2a015fb78617a0d5112d1c7828847d825"><code>3d0f9cf</code></a> Bump version to 4.4.0</li> <li><a href="https://github.com/saghul/pycares/commit/bc7630f4c0f214c8b80de98ca5f9305e38af0bdc"><code>bc7630f</code></a> Add support for 3.12, drop EOL 3.7</li> <li><a href="https://github.com/saghul/pycares/commit/86baf7598e4cf599ce3f88601bea155054c2282c"><code>86baf75</code></a> Bump versions of used GitHub Actions</li> <li><a href="https://github.com/saghul/pycares/commit/d62a60c6abb20d8b16e1de287863659bc0163a3d"><code>d62a60c</code></a> Bump GitHub Actions versions and fix warnings in the process</li> <li>See full diff in <a href="https://github.com/saghul/pycares/compare/pycares-4.3.0...v4.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycares&package-manager=pip&previous-version=4.3.0&new-version=4.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index d90626583b0..1327703b7ba 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,7 +32,7 @@ multidict==6.0.5 # yarl packaging==23.1 # via gunicorn -pycares==4.3.0 +pycares==4.4.0 # via aiodns pycparser==2.21 # via cffi diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 51e32642ad4..392c41eb376 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -146,7 +146,7 @@ pre-commit==3.5.0 # via -r requirements/lint.in proxy-py==2.4.7 # via -r requirements/test.in -pycares==4.3.0 +pycares==4.4.0 # via aiodns pycparser==2.21 # via cffi diff --git a/requirements/dev.txt b/requirements/dev.txt index 01255801616..df5552b908a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -141,7 +141,7 @@ pre-commit==3.5.0 # via -r requirements/lint.in proxy-py==2.4.7 # via -r requirements/test.in -pycares==4.3.0 +pycares==4.4.0 # via aiodns pycparser==2.21 # via cffi diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 977f25cb3be..f8bfd4b6b21 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -28,7 +28,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -pycares==4.3.0 +pycares==4.4.0 # via aiodns pycparser==2.21 # via cffi diff --git a/requirements/test.txt b/requirements/test.txt index 2d07a0e787c..7a8bf4ca058 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -69,7 +69,7 @@ pluggy==1.5.0 # via pytest proxy-py==2.4.7 # via -r requirements/test.in -pycares==4.3.0 +pycares==4.4.0 # via aiodns pycparser==2.21 # via cffi From a5b66e3bcd3d917b304fef13858ce136a339f65c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 00:05:39 +0000 Subject: [PATCH 0365/1511] Bump blockdiag from 2.0.1 to 3.0.0 (#8761) Bumps [blockdiag](https://github.com/blockdiag/blockdiag) from 2.0.1 to 3.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/blockdiag/blockdiag/blob/master/CHANGES.rst">blockdiag's changelog</a>.</em></p> <blockquote> <h2>3.0.0 (2021-12-06)</h2> <ul> <li> <p>Drop python3.6 support</p> </li> <li> <p>Use funcparserlib-1.0.0a0 or newer to support new python versions</p> </li> <li> <p>Allow to write multiline string via triple quotes (""" ... """)</p> </li> <li> <p>Fix a bug</p> <ul> <li>Fix <a href="https://redirect.github.com/blockdiag/blockdiag/issues/147">#147</a>: file existence disclosure using svg renderer</li> </ul> </li> </ul> <h2>2.0.0 (2020-02-01)</h2> <ul> <li> <p>Fix a bug</p> <ul> <li>Fix <a href="https://redirect.github.com/blockdiag/blockdiag/issues/126">#126</a>: '_io.BufferedRandom' object has no attribute 'buffer'</li> </ul> </li> </ul> <h2>2.0.0 (2020-01-26)</h2> <ul> <li> <p>Drop python2 and python3.4 support</p> </li> <li> <p>Fix a bug</p> <ul> <li>Fix <a href="https://redirect.github.com/blockdiag/blockdiag/issues/109">#109</a> blockdiag does not work with recent pillow</li> </ul> </li> </ul> <h2>1.5.4 (2018-07-22)</h2> <ul> <li> <p>Fix bug</p> <ul> <li>Fix <a href="https://redirect.github.com/blockdiag/blockdiag/issues/94">#94</a> Python 3.7 compatibility</li> </ul> </li> </ul> <h2>1.5.3 (2015-07-30)</h2> <ul> <li> <p>Fix bug</p> <ul> <li>Fix <a href="https://redirect.github.com/blockdiag/blockdiag/issues/67">#67</a> Group overlaps with nodes having href</li> </ul> </li> </ul> <h2>1.5.2 (2015-05-17)</h2> <ul> <li> <p>Fix dependency; webcolors-1.5 does not support py32</p> </li> <li> <p>Fix bug</p> <ul> <li>Fix images.open() failed with PIL</li> </ul> </li> </ul> <h2>1.5.1 (2015-02-21)</h2> <ul> <li> <p>Fix bug</p> <ul> <li>Fix labels are overwrapped on antialias mode</li> </ul> </li> </ul> <h2>1.5.0 (2015-01-01)</h2> <ul> <li>Refactor cleanup procedures</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/blockdiag/blockdiag/commit/ce37f0c18dc7f88caab3d3b24aac4f3f066b793c"><code>ce37f0c</code></a> Bump version</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/030da560763a4d0bec313552ecb2ba8cbd98dfc5"><code>030da56</code></a> Merge pull request <a href="https://redirect.github.com/blockdiag/blockdiag/issues/158">#158</a> from Mogztter/patch-1</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/8d48c48d365d0c2a45ab6671c9307b8cbcae8af8"><code>8d48c48</code></a> Update badges</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/52b3023a212de232d30d2685e24fe570b2677c31"><code>52b3023</code></a> Merge pull request <a href="https://redirect.github.com/blockdiag/blockdiag/issues/157">#157</a> from blockdiag/link_action</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/5e6a686209bfd1373d4c0061f7737bf7ee8418a0"><code>5e6a686</code></a> test: Add flake8 target to tox.ini</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/fdfde71eeb913bce207d6f17d41675e3d4c5db90"><code>fdfde71</code></a> test: Do linting on custom action</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/93b542090acf9cde1aa3726fdf1824a9e412f568"><code>93b5420</code></a> Merge pull request <a href="https://redirect.github.com/blockdiag/blockdiag/issues/156">#156</a> from blockdiag/adjust_gha</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/4e9a67f3f39d05f50b88344d8f8203524bcb7c90"><code>4e9a67f</code></a> Adjust GitHub Actions</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/7392df8a45b34bc7766e2b470bc26673c924f96c"><code>7392df8</code></a> Merge pull request <a href="https://redirect.github.com/blockdiag/blockdiag/issues/155">#155</a> from blockdiag/fix_readme</li> <li><a href="https://github.com/blockdiag/blockdiag/commit/c23489868beda05e420f687b6cbe4b401aa30c9e"><code>c234898</code></a> Fix README; python 3.7 or later</li> <li>Additional commits viewable in <a href="https://github.com/blockdiag/blockdiag/compare/2.0.1...3.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=blockdiag&package-manager=pip&previous-version=2.0.1&new-version=3.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/doc-spelling.txt | 4 +--- requirements/doc.txt | 4 +--- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 392c41eb376..ddd870760ad 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -30,7 +30,7 @@ babel==2.9.1 # via sphinx backports-entry-points-selectable==1.3.0 # via virtualenv -blockdiag==2.0.1 +blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 76d8f40bb13..925512c194c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -95,6 +95,4 @@ zipp==3.17.0 # The following packages are considered to be unsafe in a requirements file: setuptools==68.0.0 - # via - # blockdiag - # sphinx + # via blockdiag diff --git a/requirements/doc.txt b/requirements/doc.txt index fac8fda5c47..a139b99145b 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -90,6 +90,4 @@ zipp==3.17.0 # The following packages are considered to be unsafe in a requirements file: setuptools==68.0.0 - # via - # blockdiag - # sphinx + # via blockdiag From ac614c5b45e44c0f7313fa8f8bbe332b0976b102 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 11:48:21 +0000 Subject: [PATCH 0366/1511] Bump imagesize from 1.3.0 to 1.4.1 (#8783) Bumps [imagesize](https://github.com/shibukawa/imagesize_py) from 1.3.0 to 1.4.1. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/shibukawa/imagesize_py/commit/8d88ec6b646d6184b5633604551d6fc154783073"><code>8d88ec6</code></a> add <strong>init</strong>.py</li> <li><a href="https://github.com/shibukawa/imagesize_py/commit/e9b1be1998304d05401df18a5366e5d4664571ad"><code>e9b1be1</code></a> 1.4.1: add <strong>version</strong> attribute</li> <li><a href="https://github.com/shibukawa/imagesize_py/commit/27427545c9a61b0fea55cf0d323d08c20b231150"><code>2742754</code></a> bump version to 1.4.0</li> <li><a href="https://github.com/shibukawa/imagesize_py/commit/d0b4497dc168952850a70ca8529f95e7de3f36cd"><code>d0b4497</code></a> Merge pull request <a href="https://redirect.github.com/shibukawa/imagesize_py/issues/52">#52</a> from ExtReMLapin/patch-1</li> <li><a href="https://github.com/shibukawa/imagesize_py/commit/e7c81aa290e6832754dc8eaa911d831a381135c7"><code>e7c81aa</code></a> Update imagesize.py</li> <li><a href="https://github.com/shibukawa/imagesize_py/commit/013a0dd3d3be5d47befd1bddfc8f35bac77835c5"><code>013a0dd</code></a> fixed support for VP8X</li> <li><a href="https://github.com/shibukawa/imagesize_py/commit/b214d05c3899816d7f1f908145461453a6719ad2"><code>b214d05</code></a> added support for webp files</li> <li>See full diff in <a href="https://github.com/shibukawa/imagesize_py/compare/1.3.0...1.4.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=imagesize&package-manager=pip&previous-version=1.3.0&new-version=1.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ddd870760ad..cb48c7d75c9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -95,7 +95,7 @@ idna==3.3 # requests # trustme # yarl -imagesize==1.3.0 +imagesize==1.4.1 # via sphinx importlib-metadata==7.0.0 # via From f417d96147a79959a40d1a85ca256d0b6102dec7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:05:33 +0000 Subject: [PATCH 0367/1511] Bump urllib3 from 1.26.7 to 2.2.2 (#8786) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.7 to 2.2.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/releases">urllib3's releases</a>.</em></p> <blockquote> <h2>2.2.2</h2> <h2>🚀 urllib3 is fundraising for HTTP/2 support</h2> <p><a href="https://sethmlarson.dev/urllib3-is-fundraising-for-http2-support">urllib3 is raising ~$40,000 USD</a> to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support for 2023. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects <a href="https://opencollective.com/urllib3">please consider contributing financially</a> to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.</p> <p>Thank you for your support.</p> <h2>Changes</h2> <ul> <li>Added the <code>Proxy-Authorization</code> header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via <code>Retry.remove_headers_on_redirect</code>.</li> <li>Allowed passing negative integers as <code>amt</code> to read methods of <code>http.client.HTTPResponse</code> as an alternative to <code>None</code>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3122">#3122</a>)</li> <li>Fixed return types representing copying actions to use <code>typing.Self</code>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3363">#3363</a>)</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/urllib3/urllib3/compare/2.2.1...2.2.2">https://github.com/urllib3/urllib3/compare/2.2.1...2.2.2</a></p> <h2>2.2.1</h2> <h2>🚀 urllib3 is fundraising for HTTP/2 support</h2> <p><a href="https://sethmlarson.dev/urllib3-is-fundraising-for-http2-support">urllib3 is raising ~$40,000 USD</a> to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support for 2023. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects <a href="https://opencollective.com/urllib3">please consider contributing financially</a> to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.</p> <p>Thank you for your support.</p> <h2>Changes</h2> <ul> <li>Fixed issue where <code>InsecureRequestWarning</code> was emitted for HTTPS connections when using Emscripten. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3331">#3331</a>)</li> <li>Fixed <code>HTTPConnectionPool.urlopen</code> to stop automatically casting non-proxy headers to <code>HTTPHeaderDict</code>. This change was premature as it did not apply to proxy headers and <code>HTTPHeaderDict</code> does not handle byte header values correctly yet. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3343">#3343</a>)</li> <li>Changed <code>ProtocolError</code> to <code>InvalidChunkLength</code> when response terminates before the chunk length is sent. (<a href="https://redirect.github.com/urllib3/urllib3/issues/2860">#2860</a>)</li> <li>Changed <code>ProtocolError</code> to be more verbose on incomplete reads with excess content. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3261">#3261</a>)</li> </ul> <h2>2.2.0</h2> <h2>🖥️ urllib3 now works in the browser</h2> <p>:tada: <strong>This release adds experimental support for <a href="https://urllib3.readthedocs.io/en/stable/reference/contrib/emscripten.html">using urllib3 in the browser with Pyodide</a>!</strong> :tada:</p> <p>Thanks to Joe Marshall (<a href="https://github.com/joemarshall"><code>@​joemarshall</code></a>) for contributing this feature. This change was possible thanks to work done in urllib3 v2.0 to detach our API from <code>http.client</code>. Please report all bugs to the <a href="https://github.com/urllib3/urllib3/issues">urllib3 issue tracker</a>.</p> <h2>🚀 urllib3 is fundraising for HTTP/2 support</h2> <p><a href="https://sethmlarson.dev/urllib3-is-fundraising-for-http2-support">urllib3 is raising ~$40,000 USD</a> to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support for 2023. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects <a href="https://opencollective.com/urllib3">please consider contributing financially</a> to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.</p> <p>Thank you for your support.</p> <h2>Changes</h2> <ul> <li>Added support for <a href="https://urllib3.readthedocs.io/en/latest/reference/contrib/emscripten.html">Emscripten and Pyodide</a>, including streaming support in cross-origin isolated browser environments where threading is enabled. (<a href="https://redirect.github.com/urllib3/urllib3/issues/2951">#2951</a>)</li> <li>Added support for <code>HTTPResponse.read1()</code> method. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3186">#3186</a>)</li> <li>Added rudimentary support for HTTP/2. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3284">#3284</a>)</li> <li>Fixed issue where requests against urls with trailing dots were failing due to SSL errors when using proxy. (<a href="https://redirect.github.com/urllib3/urllib3/issues/2244">#2244</a>)</li> <li>Fixed <code>HTTPConnection.proxy_is_verified</code> and <code>HTTPSConnection.proxy_is_verified</code> to be always set to a boolean after connecting to a proxy. It could be <code>None</code> in some cases previously. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3130">#3130</a>)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/blob/main/CHANGES.rst">urllib3's changelog</a>.</em></p> <blockquote> <h1>2.2.2 (2024-06-17)</h1> <ul> <li>Added the <code>Proxy-Authorization</code> header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via <code>Retry.remove_headers_on_redirect</code>.</li> <li>Allowed passing negative integers as <code>amt</code> to read methods of <code>http.client.HTTPResponse</code> as an alternative to <code>None</code>. (<code>[#3122](https://github.com/urllib3/urllib3/issues/3122) <https://github.com/urllib3/urllib3/issues/3122></code>__)</li> <li>Fixed return types representing copying actions to use <code>typing.Self</code>. (<code>[#3363](https://github.com/urllib3/urllib3/issues/3363) <https://github.com/urllib3/urllib3/issues/3363></code>__)</li> </ul> <h1>2.2.1 (2024-02-16)</h1> <ul> <li>Fixed issue where <code>InsecureRequestWarning</code> was emitted for HTTPS connections when using Emscripten. (<code>[#3331](https://github.com/urllib3/urllib3/issues/3331) <https://github.com/urllib3/urllib3/issues/3331></code>__)</li> <li>Fixed <code>HTTPConnectionPool.urlopen</code> to stop automatically casting non-proxy headers to <code>HTTPHeaderDict</code>. This change was premature as it did not apply to proxy headers and <code>HTTPHeaderDict</code> does not handle byte header values correctly yet. (<code>[#3343](https://github.com/urllib3/urllib3/issues/3343) <https://github.com/urllib3/urllib3/issues/3343></code>__)</li> <li>Changed <code>InvalidChunkLength</code> to <code>ProtocolError</code> when response terminates before the chunk length is sent. (<code>[#2860](https://github.com/urllib3/urllib3/issues/2860) <https://github.com/urllib3/urllib3/issues/2860></code>__)</li> <li>Changed <code>ProtocolError</code> to be more verbose on incomplete reads with excess content. (<code>[#3261](https://github.com/urllib3/urllib3/issues/3261) <https://github.com/urllib3/urllib3/issues/3261></code>__)</li> </ul> <h1>2.2.0 (2024-01-30)</h1> <ul> <li>Added support for <code>Emscripten and Pyodide <https://urllib3.readthedocs.io/en/latest/reference/contrib/emscripten.html></code><strong>, including streaming support in cross-origin isolated browser environments where threading is enabled. (<code>[#2951](https://github.com/urllib3/urllib3/issues/2951) <https://github.com/urllib3/urllib3/issues/2951></code></strong>)</li> <li>Added support for <code>HTTPResponse.read1()</code> method. (<code>[#3186](https://github.com/urllib3/urllib3/issues/3186) <https://github.com/urllib3/urllib3/issues/3186></code>__)</li> <li>Added rudimentary support for HTTP/2. (<code>[#3284](https://github.com/urllib3/urllib3/issues/3284) <https://github.com/urllib3/urllib3/issues/3284></code>__)</li> <li>Fixed issue where requests against urls with trailing dots were failing due to SSL errors when using proxy. (<code>[#2244](https://github.com/urllib3/urllib3/issues/2244) <https://github.com/urllib3/urllib3/issues/2244></code>__)</li> <li>Fixed <code>HTTPConnection.proxy_is_verified</code> and <code>HTTPSConnection.proxy_is_verified</code> to be always set to a boolean after connecting to a proxy. It could be <code>None</code> in some cases previously. (<code>[#3130](https://github.com/urllib3/urllib3/issues/3130) <https://github.com/urllib3/urllib3/issues/3130></code>__)</li> <li>Fixed an issue where <code>headers</code> passed in a request with <code>json=</code> would be mutated (<code>[#3203](https://github.com/urllib3/urllib3/issues/3203) <https://github.com/urllib3/urllib3/issues/3203></code>__)</li> <li>Fixed <code>HTTPSConnection.is_verified</code> to be set to <code>False</code> when connecting from a HTTPS proxy to an HTTP target. It was set to <code>True</code> previously. (<code>[#3267](https://github.com/urllib3/urllib3/issues/3267) <https://github.com/urllib3/urllib3/issues/3267></code>__)</li> <li>Fixed handling of new error message from OpenSSL 3.2.0 when configuring an HTTP proxy as HTTPS (<code>[#3268](https://github.com/urllib3/urllib3/issues/3268) <https://github.com/urllib3/urllib3/issues/3268></code>__)</li> <li>Fixed TLS 1.3 post-handshake auth when the server certificate validation is disabled (<code>[#3325](https://github.com/urllib3/urllib3/issues/3325) <https://github.com/urllib3/urllib3/issues/3325></code>__)</li> <li>Note for downstream distributors: To run integration tests, you now need to run the tests a second time with the <code>--integration</code> pytest flag. (<code>[#3181](https://github.com/urllib3/urllib3/issues/3181) <https://github.com/urllib3/urllib3/issues/3181></code>__)</li> </ul> <h1>2.1.0 (2023-11-13)</h1> <ul> <li>Removed support for the deprecated urllib3[secure] extra. (<code>[#2680](https://github.com/urllib3/urllib3/issues/2680) <https://github.com/urllib3/urllib3/issues/2680></code>__)</li> <li>Removed support for the deprecated SecureTransport TLS implementation. (<code>[#2681](https://github.com/urllib3/urllib3/issues/2681) <https://github.com/urllib3/urllib3/issues/2681></code>__)</li> <li>Removed support for the end-of-life Python 3.7. (<code>[#3143](https://github.com/urllib3/urllib3/issues/3143) <https://github.com/urllib3/urllib3/issues/3143></code>__)</li> <li>Allowed loading CA certificates from memory for proxies. (<code>[#3065](https://github.com/urllib3/urllib3/issues/3065) <https://github.com/urllib3/urllib3/issues/3065></code>__)</li> <li>Fixed decoding Gzip-encoded responses which specified <code>x-gzip</code> content-encoding. (<code>[#3174](https://github.com/urllib3/urllib3/issues/3174) <https://github.com/urllib3/urllib3/issues/3174></code>__)</li> </ul> <h1>2.0.7 (2023-10-17)</h1> <ul> <li>Made body stripped from HTTP requests changing the request method to GET after HTTP 303 "See Other" redirect responses.</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/urllib3/urllib3/commit/27e2a5c5a7ab6a517252cc8dcef3ffa6ffb8f61a"><code>27e2a5c</code></a> Release 2.2.2 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3406">#3406</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/accff72ecc2f6cf5a76d9570198a93ac7c90270e"><code>accff72</code></a> Merge pull request from GHSA-34jh-p97f-mpxf</li> <li><a href="https://github.com/urllib3/urllib3/commit/34be4a57e59eb7365bcc37d52e9f8271b5b8d0d3"><code>34be4a5</code></a> Pin CFFI to a new release candidate instead of a Git commit (<a href="https://redirect.github.com/urllib3/urllib3/issues/3398">#3398</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/da410581b6b3df73da976b5ce5eb20a4bd030437"><code>da41058</code></a> Bump browser-actions/setup-chrome from 1.6.0 to 1.7.1 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3399">#3399</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/b07a669bd970d69847801148286b726f0570b625"><code>b07a669</code></a> Bump github/codeql-action from 2.13.4 to 3.25.6 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3396">#3396</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/b8589ec9f8c4da91511e601b632ac06af7e7c10e"><code>b8589ec</code></a> Measure coverage with v4 of artifact actions (<a href="https://redirect.github.com/urllib3/urllib3/issues/3394">#3394</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/f3bdc5585111429e22c81b5fb26c3ec164d98b81"><code>f3bdc55</code></a> Allow triggering CI manually (<a href="https://redirect.github.com/urllib3/urllib3/issues/3391">#3391</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/52392654b30183129cf3ec06010306f517d9c146"><code>5239265</code></a> Fix HTTP version in debug log (<a href="https://redirect.github.com/urllib3/urllib3/issues/3316">#3316</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/b34619f94ece0c40e691a5aaf1304953d88089de"><code>b34619f</code></a> Bump actions/checkout to 4.1.4 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3387">#3387</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/9961d14de7c920091d42d42ed76d5d479b80064d"><code>9961d14</code></a> Bump browser-actions/setup-chrome from 1.5.0 to 1.6.0 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3386">#3386</a>)</li> <li>Additional commits viewable in <a href="https://github.com/urllib3/urllib3/compare/1.26.7...2.2.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=1.26.7&new-version=2.2.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index cb48c7d75c9..11b8a66ae4f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -266,7 +266,7 @@ typing-extensions==4.11.0 # python-on-whales uritemplate==4.1.1 # via gidgethub -urllib3==1.26.7 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index df5552b908a..a906793ddd1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -253,7 +253,7 @@ typing-extensions==4.11.0 # typer uritemplate==4.1.1 # via gidgethub -urllib3==2.0.4 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 925512c194c..843c3415dfa 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -84,7 +84,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.0.4 +urllib3==2.2.2 # via requests webcolors==1.13 # via blockdiag diff --git a/requirements/doc.txt b/requirements/doc.txt index a139b99145b..172cf824284 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -79,7 +79,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.0.4 +urllib3==2.2.2 # via requests webcolors==1.13 # via blockdiag diff --git a/requirements/lint.txt b/requirements/lint.txt index 066e73c6ad4..491f273a010 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -115,7 +115,7 @@ typing-extensions==4.11.0 # python-on-whales # rich # typer -urllib3==2.2.1 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 7a8bf4ca058..b4233b93fcc 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -119,7 +119,7 @@ typing-extensions==4.11.0 # pydantic-core # python-on-whales # typer -urllib3==2.0.4 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in From 9bd27697c98e20a17ba5a34b58d92f9a23cff437 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:13:51 +0000 Subject: [PATCH 0368/1511] Bump snowballstemmer from 2.1.0 to 2.2.0 (#8787) Bumps [snowballstemmer](https://github.com/snowballstem/snowball) from 2.1.0 to 2.2.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/snowballstem/snowball/blob/master/NEWS">snowballstemmer's changelog</a>.</em></p> <blockquote> <h1>Snowball 2.2.0 (2021-11-10)</h1> <h2>New Code Generators</h2> <ul> <li>Add Ada generator from Stephane Carrez (<a href="https://redirect.github.com/snowballstem/snowball/issues/135">#135</a>).</li> </ul> <h2>Javascript</h2> <ul> <li> <p>Fix generated code to use integer division rather than floating point division.</p> <p>Noted by David Corbett.</p> </li> </ul> <h2>Pascal</h2> <ul> <li> <p>Fix code generated for division. Previously real division was used and the generated code would fail to compile with an "Incompatible types" error.</p> <p>Noted by David Corbett.</p> </li> <li> <p>Fix code generated for Snowball's <code>minint</code> and <code>maxint</code> constant.</p> </li> </ul> <h2>Python</h2> <ul> <li> <p>Python 2 is no longer actively supported, as proposed on the mailing list: <a href="https://lists.tartarus.org/pipermail/snowball-discuss/2021-August/001721.html">https://lists.tartarus.org/pipermail/snowball-discuss/2021-August/001721.html</a></p> </li> <li> <p>Fix code generated for division. Previously the Python code we generated used integer division but rounded negative fractions towards negative infinity rather than zero under Python 2, and under Python 3 used floating point division.</p> <p>Noted by David Corbett.</p> </li> </ul> <h2>Code Quality Improvements</h2> <ul> <li>C#: An <code>among</code> without functions is now generated as <code>static</code> and groupings are now generated as constant. Patches from James Turner in <a href="https://redirect.github.com/snowballstem/snowball/issues/146">#146</a> and <a href="https://redirect.github.com/snowballstem/snowball/issues/147">#147</a>.</li> </ul> <h2>Code generation improvements</h2> <ul> <li>General:</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/snowballstem/snowball/commit/48a67a2831005f49c48ec29a5837640e23e54e6b"><code>48a67a2</code></a> Update for 2.2.0</li> <li><a href="https://github.com/snowballstem/snowball/commit/ec00981590405827782d4655fcb4e68e9dcb9d43"><code>ec00981</code></a> Fix handling of len and lenof as names</li> <li><a href="https://github.com/snowballstem/snowball/commit/5559db7016d232c0a9f6d9db52ecafce83496740"><code>5559db7</code></a> Report clearer error if = is used instead of ==</li> <li><a href="https://github.com/snowballstem/snowball/commit/4ff359881a651fc921e9966ef39674918f8e426a"><code>4ff3598</code></a> Optimise constant numeric expressions</li> <li><a href="https://github.com/snowballstem/snowball/commit/40b164100328367c426324cdd29ab69114d4ba8e"><code>40b1641</code></a> NEWS: Update</li> <li><a href="https://github.com/snowballstem/snowball/commit/f89c3b9d639b9543222f69b0e24742be1dcfe392"><code>f89c3b9</code></a> Fix $(EXE_EXT) to $(EXEEXT)</li> <li><a href="https://github.com/snowballstem/snowball/commit/55fd44b016303d7103f93f8b1fab9a634f0678ac"><code>55fd44b</code></a> Ada: Fix code generated for <code>minint</code> and <code>maxint</code></li> <li><a href="https://github.com/snowballstem/snowball/commit/e6d8b6f1d7df1a05fa8fbc11f479069e17b50788"><code>e6d8b6f</code></a> Pascal: Fix code generated for <code>minint</code></li> <li><a href="https://github.com/snowballstem/snowball/commit/ed32a1da901351c225ad1ddcbd5e3bd6f53c3a1f"><code>ed32a1d</code></a> Ada: Fix "parentheses required for unary minus" errors</li> <li><a href="https://github.com/snowballstem/snowball/commit/47773b9a06679972d69f6a2e14f11d5c09f30adf"><code>47773b9</code></a> NEWS: Update</li> <li>Additional commits viewable in <a href="https://github.com/snowballstem/snowball/compare/v2.1.0...v2.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=snowballstemmer&package-manager=pip&previous-version=2.1.0&new-version=2.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 11b8a66ae4f..5791f34f3f6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -207,7 +207,7 @@ six==1.16.0 # virtualenv slotscheck==0.19.0 # via -r requirements/lint.in -snowballstemmer==2.1.0 +snowballstemmer==2.2.0 # via sphinx sphinx==7.1.2 # via From 47b31d6729f34576ed37dfa706b3a035773ce177 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:39:53 +0000 Subject: [PATCH 0369/1511] Bump jinja2 from 3.0.3 to 3.1.4 (#8788) Bumps [jinja2](https://github.com/pallets/jinja) from 3.0.3 to 3.1.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/jinja/releases">jinja2's releases</a>.</em></p> <blockquote> <h2>3.1.4</h2> <p>This is the Jinja 3.1.4 security release, which fixes security issues and bugs but does not otherwise change behavior and should not result in breaking changes.</p> <p>PyPI: <a href="https://pypi.org/project/Jinja2/3.1.4/">https://pypi.org/project/Jinja2/3.1.4/</a> Changes: <a href="https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-4">https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-4</a></p> <ul> <li>The <code>xmlattr</code> filter does not allow keys with <code>/</code> solidus, <code>></code> greater-than sign, or <code>=</code> equals sign, in addition to disallowing spaces. Regardless of any validation done by Jinja, user input should never be used as keys to this filter, or must be separately validated first. GHSA-h75v-3vvj-5mfj</li> </ul> <h2>3.1.3</h2> <p>This is a fix release for the 3.1.x feature branch.</p> <ul> <li>Fix for <a href="https://github.com/pallets/jinja/security/advisories/GHSA-h5c8-rqwp-cp95">GHSA-h5c8-rqwp-cp95</a>. You are affected if you are using <code>xmlattr</code> and passing user input as attribute keys.</li> <li>Changes: <a href="https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-3">https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-3</a></li> <li>Milestone: <a href="https://github.com/pallets/jinja/milestone/15?closed=1">https://github.com/pallets/jinja/milestone/15?closed=1</a></li> </ul> <h2>3.1.2</h2> <p>This is a fix release for the <a href="https://github.com/pallets/jinja/releases/tag/3.1.0">3.1.0</a> feature release.</p> <ul> <li>Changes: <a href="https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-2">https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-2</a></li> <li>Milestone: <a href="https://github.com/pallets/jinja/milestone/13?closed=1">https://github.com/pallets/jinja/milestone/13?closed=1</a></li> </ul> <h2>3.1.1</h2> <ul> <li>Changes: <a href="https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-1">https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-1</a></li> <li>Milestone: <a href="https://github.com/pallets/jinja/milestone/12?closed=1">https://github.com/pallets/jinja/milestone/12?closed=1</a></li> </ul> <h2>3.1.0</h2> <p>This is a feature release, which includes new features and removes previously deprecated features. The 3.1.x branch is now the supported bugfix branch, the 3.0.x branch has become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as <a href="https://pypi.org/project/pip-tools/">pip-tools</a> to pin all dependencies and control upgrades. We also encourage upgrading to MarkupSafe 2.1.1, the latest version at this time.</p> <ul> <li>Changes: <a href="https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-0">https://jinja.palletsprojects.com/en/3.1.x/changes/#version-3-1-0</a></li> <li>Milestone: <a href="https://github.com/pallets/jinja/milestone/8?closed=1">https://github.com/pallets/jinja/milestone/8?closed=1</a></li> <li>MarkupSafe changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-1">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-1</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/jinja/blob/main/CHANGES.rst">jinja2's changelog</a>.</em></p> <blockquote> <h2>Version 3.1.4</h2> <p>Released 2024-05-05</p> <ul> <li>The <code>xmlattr</code> filter does not allow keys with <code>/</code> solidus, <code>></code> greater-than sign, or <code>=</code> equals sign, in addition to disallowing spaces. Regardless of any validation done by Jinja, user input should never be used as keys to this filter, or must be separately validated first. :ghsa:<code>h75v-3vvj-5mfj</code></li> </ul> <h2>Version 3.1.3</h2> <p>Released 2024-01-10</p> <ul> <li>Fix compiler error when checking if required blocks in parent templates are empty. :pr:<code>1858</code></li> <li><code>xmlattr</code> filter does not allow keys with spaces. :ghsa:<code>h5c8-rqwp-cp95</code></li> <li>Make error messages stemming from invalid nesting of <code>{% trans %}</code> blocks more helpful. :pr:<code>1918</code></li> </ul> <h2>Version 3.1.2</h2> <p>Released 2022-04-28</p> <ul> <li>Add parameters to <code>Environment.overlay</code> to match <code>__init__</code>. :issue:<code>1645</code></li> <li>Handle race condition in <code>FileSystemBytecodeCache</code>. :issue:<code>1654</code></li> </ul> <h2>Version 3.1.1</h2> <p>Released 2022-03-25</p> <ul> <li>The template filename on Windows uses the primary path separator. :issue:<code>1637</code></li> </ul> <h2>Version 3.1.0</h2> <p>Released 2022-03-24</p> <ul> <li>Drop support for Python 3.6. :pr:<code>1534</code></li> <li>Remove previously deprecated code. :pr:<code>1544</code></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/jinja/commit/dd4a8b5466d8790540c181590b14db4d4d889d57"><code>dd4a8b5</code></a> release version 3.1.4</li> <li><a href="https://github.com/pallets/jinja/commit/0668239dc6b44ef38e7a6c9f91f312fd4ca581cb"><code>0668239</code></a> Merge pull request from GHSA-h75v-3vvj-5mfj</li> <li><a href="https://github.com/pallets/jinja/commit/d655030770081e2dfe46f90e27620472a502289d"><code>d655030</code></a> disallow invalid characters in keys to xmlattr filter</li> <li><a href="https://github.com/pallets/jinja/commit/a7863ba9d3521f1450f821119c50d19d7ecea329"><code>a7863ba</code></a> add ghsa links</li> <li><a href="https://github.com/pallets/jinja/commit/b5c98e78c2ee7d2bf0aa06d29ed9bf7082de9cf4"><code>b5c98e7</code></a> start version 3.1.4</li> <li><a href="https://github.com/pallets/jinja/commit/da3a9f0b804199845fcb76f2e08748bdaeba93ee"><code>da3a9f0</code></a> update project files (<a href="https://redirect.github.com/pallets/jinja/issues/1968">#1968</a>)</li> <li><a href="https://github.com/pallets/jinja/commit/0ee5eb41d1a2d7d9a05a02dc26dd70e63aaaeeb1"><code>0ee5eb4</code></a> satisfy formatter, linter, and strict mypy</li> <li><a href="https://github.com/pallets/jinja/commit/20477c63575175196bfc8103f223cc9f5642595d"><code>20477c6</code></a> update project files (<a href="https://redirect.github.com/pallets/jinja/issues/5457">#5457</a>)</li> <li><a href="https://github.com/pallets/jinja/commit/e491223739dedbb1f4fc6a71340c1484e149d947"><code>e491223</code></a> update pyyaml dev dependency</li> <li><a href="https://github.com/pallets/jinja/commit/36f98854c721f98ba103f97f65a8a098da5af0d7"><code>36f9885</code></a> fix pr link</li> <li>Additional commits viewable in <a href="https://github.com/pallets/jinja/compare/3.0.3...3.1.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jinja2&package-manager=pip&previous-version=3.0.3&new-version=3.1.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5791f34f3f6..5bda339d438 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -107,7 +107,7 @@ incremental==22.10.0 # via towncrier iniconfig==1.1.1 # via pytest -jinja2==3.0.3 +jinja2==3.1.4 # via # sphinx # towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index a906793ddd1..32f44ed27e3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -103,7 +103,7 @@ incremental==22.10.0 # via towncrier iniconfig==2.0.0 # via pytest -jinja2==3.1.2 +jinja2==3.1.4 # via # sphinx # towncrier diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 843c3415dfa..b76afe98d57 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -32,7 +32,7 @@ importlib-resources==6.1.1 # via towncrier incremental==22.10.0 # via towncrier -jinja2==3.1.2 +jinja2==3.1.4 # via # sphinx # towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 172cf824284..0e5f7d49e0c 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -32,7 +32,7 @@ importlib-resources==6.1.1 # via towncrier incremental==22.10.0 # via towncrier -jinja2==3.1.2 +jinja2==3.1.4 # via # sphinx # towncrier From 91cd7646322492de0aa5b7068906650a9f7c8abf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:40:35 +0000 Subject: [PATCH 0370/1511] Bump python-dateutil from 2.8.2 to 2.9.0.post0 (#8790) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-dateutil](https://github.com/dateutil/dateutil) from 2.8.2 to 2.9.0.post0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/dateutil/dateutil/releases">python-dateutil's releases</a>.</em></p> <blockquote> <h2>2.9.0.post0</h2> <h1>Version 2.9.0.post0 (2024-03-01)</h1> <h2>Bugfixes</h2> <ul> <li>Pinned <code>setuptools_scm</code> to <code><8</code>, which should make the generated <code>_version.py</code> file compatible with all supported versions of Python.</li> </ul> <h2>2.9.0</h2> <h1>Version 2.9.0 (2024-02-29)</h1> <h2>Data updates</h2> <ul> <li>Updated tzdata version to 2024a. (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1342">#1342</a>)</li> </ul> <h2>Features</h2> <ul> <li>Made all <code>dateutil</code> submodules lazily imported using <a href="https://www.python.org/dev/peps/pep-0562/">PEP 562</a>. On Python 3.7+, things like <code>import dateutil; dateutil.tz.gettz("America/New_York")</code> will now work without explicitly importing <code>dateutil.tz</code>, with the import occurring behind the scenes on first use. The old behavior remains on Python 3.6 and earlier. Fixed by Orson Adams. (gh issue <a href="https://redirect.github.com/dateutil/dateutil/issues/771">#771</a>, gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1007">#1007</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Removed a call to <code>datetime.utcfromtimestamp</code>, which is deprecated as of Python 3.12. Reported by Hugo van Kemenade (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1284">#1284</a>), fixed by Thomas Grainger (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1285">#1285</a>).</li> </ul> <h2>Documentation changes</h2> <ul> <li>Added note into docs and tests where relativedelta would return last day of the month only if the same day on a different month resolves to a date that doesn't exist. Reported by <a href="https://github.com/hawkEye-01"><code>@​hawkEye-01</code></a> (gh issue <a href="https://redirect.github.com/dateutil/dateutil/issues/1167">#1167</a>). Fixed by <a href="https://github.com/Mifrill"><code>@​Mifrill</code></a> (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1168">#1168</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/dateutil/dateutil/blob/master/NEWS">python-dateutil's changelog</a>.</em></p> <blockquote> <h1>Version 2.9.0.post0 (2024-03-01)</h1> <h2>Bugfixes</h2> <ul> <li>Pinned <code>setuptools_scm</code> to <code><8</code>, which should make the generated <code>_version.py</code> file compatible with all supported versions of Python.</li> </ul> <h1>Version 2.9.0 (2024-02-29)</h1> <h2>Data updates</h2> <ul> <li>Updated tzdata version to 2024a. (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1342">#1342</a>)</li> </ul> <h2>Features</h2> <ul> <li>Made all <code>dateutil</code> submodules lazily imported using <code>PEP 562 <https://www.python.org/dev/peps/pep-0562/></code>_. On Python 3.7+, things like <code>import dateutil; dateutil.tz.gettz("America/New_York")</code> will now work without explicitly importing <code>dateutil.tz</code>, with the import occurring behind the scenes on first use. The old behavior remains on Python 3.6 and earlier. Fixed by Orson Adams. (gh issue <a href="https://redirect.github.com/dateutil/dateutil/issues/771">#771</a>, gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1007">#1007</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Removed a call to <code>datetime.utcfromtimestamp</code>, which is deprecated as of Python 3.12. Reported by Hugo van Kemenade (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1284">#1284</a>), fixed by Thomas Grainger (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1285">#1285</a>).</li> </ul> <h2>Documentation changes</h2> <ul> <li>Added note into docs and tests where relativedelta would return last day of the month only if the same day on a different month resolves to a date that doesn't exist. Reported by <a href="https://github.com/hawkEye-01"><code>@​hawkEye-01</code></a> (gh issue <a href="https://redirect.github.com/dateutil/dateutil/issues/1167">#1167</a>). Fixed by <a href="https://github.com/Mifrill"><code>@​Mifrill</code></a> (gh pr <a href="https://redirect.github.com/dateutil/dateutil/issues/1168">#1168</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/dateutil/dateutil/commit/1ae807774053c071acc9e7d3d27778fba0a7773e"><code>1ae8077</code></a> Merge pull request <a href="https://redirect.github.com/dateutil/dateutil/issues/1346">#1346</a> from pganssle/release_2.9.0.post0</li> <li><a href="https://github.com/dateutil/dateutil/commit/ee6de9deab99e1697837f9b78f145a91c57d600d"><code>ee6de9d</code></a> Update news to prepare for release</li> <li><a href="https://github.com/dateutil/dateutil/commit/9780d32aea9ab681769671c4e3540b449d62cdd0"><code>9780d32</code></a> Pin <code>setuptools_scm</code> to <8</li> <li><a href="https://github.com/dateutil/dateutil/commit/db9d018944c41ddc740015cf5f64717c2ba64a5c"><code>db9d018</code></a> Merge pull request <a href="https://redirect.github.com/dateutil/dateutil/issues/1343">#1343</a> from pganssle/release_2.9.0</li> <li><a href="https://github.com/dateutil/dateutil/commit/423ca2f02faffa5d0543612b9462ace420ed7925"><code>423ca2f</code></a> Run updatezinfo before build</li> <li><a href="https://github.com/dateutil/dateutil/commit/edd3fd4565616f7c92567c1daa957ee52df221a4"><code>edd3fd4</code></a> Update NEWS file</li> <li><a href="https://github.com/dateutil/dateutil/commit/fe02d0218c00c907c6d8546dc3dce7fe7bb0588c"><code>fe02d02</code></a> Run towncrier with Python 3.11</li> <li><a href="https://github.com/dateutil/dateutil/commit/9c7524a92600282488fbb85c7f1d6af10ce4ad15"><code>9c7524a</code></a> Fix MANIFEST.in pattern</li> <li><a href="https://github.com/dateutil/dateutil/commit/6de58f572257088d4248a6a3a1d2a426df534a02"><code>6de58f5</code></a> Update classifiers to include Python 3.12</li> <li><a href="https://github.com/dateutil/dateutil/commit/8fe0cab3b52fb714da3140c04aafe9c1f72f1211"><code>8fe0cab</code></a> Merge pull request <a href="https://redirect.github.com/dateutil/dateutil/issues/1342">#1342</a> from pganssle/update_zoneinfo</li> <li>Additional commits viewable in <a href="https://github.com/dateutil/dateutil/compare/2.8.2...2.9.0.post0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-dateutil&package-manager=pip&previous-version=2.8.2&new-version=2.9.0.post0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5bda339d438..07ff53cb693 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -180,7 +180,7 @@ pytest-mock==3.14.0 # via # -r requirements/lint.in # -r requirements/test.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 32f44ed27e3..454ec2718df 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -171,7 +171,7 @@ pytest-mock==3.14.0 # via # -r requirements/lint.in # -r requirements/test.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index b4233b93fcc..1ec683bca8c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -86,7 +86,7 @@ pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via -r requirements/test.in From d76e8be0bbbad2c1ca10f2430157e0d312902f19 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:41:14 +0000 Subject: [PATCH 0371/1511] Bump aiohappyeyeballs from 2.3.7 to 2.4.0 (#8791) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.3.7 to 2.4.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.4.0 (2024-08-19)</h1> <h2>Documentation</h2> <ul> <li>docs: fix a trivial typo in README.md (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/84">#84</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f5ae7d4bce04ee0645257ac828745a3b989ef149"><code>f5ae7d4</code></a>)</li> </ul> <h2>Feature</h2> <ul> <li>feat: add support for python 3.13 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/86">#86</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/4f2152fbb6b1d915c2fd68219339d998c47a71f9"><code>4f2152f</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.4.0 (2024-08-19)</h2> <h3>Feature</h3> <ul> <li>Add support for python 3.13 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/86">#86</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/4f2152fbb6b1d915c2fd68219339d998c47a71f9"><code>4f2152f</code></a>)</li> </ul> <h3>Documentation</h3> <ul> <li>Fix a trivial typo in readme.md (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/84">#84</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f5ae7d4bce04ee0645257ac828745a3b989ef149"><code>f5ae7d4</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c31b127a69bdcd7895d1a521985d918061955348"><code>c31b127</code></a> 2.4.0</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/4f2152fbb6b1d915c2fd68219339d998c47a71f9"><code>4f2152f</code></a> feat: add support for python 3.13 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/86">#86</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/546f9b8dd53da275147aa75ccb70a00db7faf916"><code>546f9b8</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/85">#85</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f5ae7d4bce04ee0645257ac828745a3b989ef149"><code>f5ae7d4</code></a> docs: fix a trivial typo in README.md (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/84">#84</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.3.7...v2.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.3.7&new-version=2.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1327703b7ba..3a616ad4d14 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 07ff53cb693..4ec843a6a8a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 454ec2718df..724b300db16 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index f8bfd4b6b21..4d1981d5e3b 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 1ec683bca8c..aa27682ca5a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From b65d5736690e525f68edab6410a1fb31c2a8db01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:09:17 +0000 Subject: [PATCH 0372/1511] Bump certifi from 2023.7.22 to 2024.7.4 (#8793) Bumps [certifi](https://github.com/certifi/python-certifi) from 2023.7.22 to 2024.7.4. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/certifi/python-certifi/commit/bd8153872e9c6fc98f4023df9c2deaffea2fa463"><code>bd81538</code></a> 2024.07.04 (<a href="https://redirect.github.com/certifi/python-certifi/issues/295">#295</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/06a2cbf21f345563dde6c28b60e29d57e9b210b3"><code>06a2cbf</code></a> Bump peter-evans/create-pull-request from 6.0.5 to 6.1.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/294">#294</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/13bba02b72bac97c432c277158bc04b4d2a6bc23"><code>13bba02</code></a> Bump actions/checkout from 4.1.6 to 4.1.7 (<a href="https://redirect.github.com/certifi/python-certifi/issues/293">#293</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/e8abcd0e62b334c164b95d49fcabdc9ecbca0554"><code>e8abcd0</code></a> Bump pypa/gh-action-pypi-publish from 1.8.14 to 1.9.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/292">#292</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/124f4adf171e15cd9a91a8b6e0325ecc97be8fe1"><code>124f4ad</code></a> 2024.06.02 (<a href="https://redirect.github.com/certifi/python-certifi/issues/291">#291</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/c2196ce5d6ee675b27755a19948480a7823e2c6a"><code>c2196ce</code></a> --- (<a href="https://redirect.github.com/certifi/python-certifi/issues/290">#290</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/fefdeec7588ff1c05214b85a552afcad5fdb51b2"><code>fefdeec</code></a> Bump actions/checkout from 4.1.4 to 4.1.5 (<a href="https://redirect.github.com/certifi/python-certifi/issues/289">#289</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/3c5fb1560b826a7f83f1f9750173ff766492c9cf"><code>3c5fb15</code></a> Bump actions/download-artifact from 4.1.6 to 4.1.7 (<a href="https://redirect.github.com/certifi/python-certifi/issues/286">#286</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/4a9569a3eb58db8548536fc16c5c5c7af946a5b1"><code>4a9569a</code></a> Bump actions/checkout from 4.1.2 to 4.1.4 (<a href="https://redirect.github.com/certifi/python-certifi/issues/287">#287</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/1fc808626a895a916b1e4c2b63abae6c5eafdbe3"><code>1fc8086</code></a> Bump peter-evans/create-pull-request from 6.0.4 to 6.0.5 (<a href="https://redirect.github.com/certifi/python-certifi/issues/288">#288</a>)</li> <li>Additional commits viewable in <a href="https://github.com/certifi/python-certifi/compare/2023.07.22...2024.07.04">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2023.7.22&new-version=2024.7.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4ec843a6a8a..a8481672c85 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -36,7 +36,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.1 # via pip-tools -certifi==2023.7.22 +certifi==2024.7.4 # via requests cffi==1.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 724b300db16..4602d3dafae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.1 # via pip-tools -certifi==2023.7.22 +certifi==2024.7.4 # via requests cffi==1.17.0 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index b76afe98d57..e06bb87946e 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -12,7 +12,7 @@ babel==2.12.1 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag -certifi==2023.7.22 +certifi==2024.7.4 # via requests charset-normalizer==3.3.1 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index 0e5f7d49e0c..d77a13bb4ae 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -12,7 +12,7 @@ babel==2.12.1 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag -certifi==2023.7.22 +certifi==2024.7.4 # via requests charset-normalizer==3.3.1 # via requests diff --git a/requirements/lint.txt b/requirements/lint.txt index 491f273a010..f829c7a6a32 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -12,7 +12,7 @@ annotated-types==0.6.0 # via pydantic async-timeout==4.0.3 # via aioredis -certifi==2024.2.2 +certifi==2024.7.4 # via requests cffi==1.17.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index aa27682ca5a..be6e210a80a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -18,7 +18,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -certifi==2023.7.22 +certifi==2024.7.4 # via requests cffi==1.17.0 # via From 7e73d963cf04323eb5e0a5c890820709a9d3dca7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:09:55 +0000 Subject: [PATCH 0373/1511] Bump zipp from 3.17.0 to 3.20.0 (#8795) Bumps [zipp](https://github.com/jaraco/zipp) from 3.17.0 to 3.20.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jaraco/zipp/blob/main/NEWS.rst">zipp's changelog</a>.</em></p> <blockquote> <h1>v3.20.0</h1> <h2>Features</h2> <ul> <li>Made the zipfile compatibility overlay available as zipp.compat.overlay.</li> </ul> <h1>v3.19.3</h1> <h2>Bugfixes</h2> <ul> <li>Also match directories in Path.glob. (<a href="https://redirect.github.com/jaraco/zipp/issues/121">#121</a>)</li> </ul> <h1>v3.19.2</h1> <p>No significant changes.</p> <h1>v3.19.1</h1> <h2>Bugfixes</h2> <ul> <li>Improved handling of malformed zip files. (<a href="https://redirect.github.com/jaraco/zipp/issues/119">#119</a>)</li> </ul> <h1>v3.19.0</h1> <h2>Features</h2> <ul> <li>Implement is_symlink. (<a href="https://redirect.github.com/jaraco/zipp/issues/117">#117</a>)</li> </ul> <h1>v3.18.2</h1> <p>No significant changes.</p> <h1>v3.18.1</h1> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jaraco/zipp/commit/c5a33b2fae38dab057445011fdf33d26d0ba7cdf"><code>c5a33b2</code></a> Finalize</li> <li><a href="https://github.com/jaraco/zipp/commit/f7f1cb32cd12eb8b413930068e55e8d76914488d"><code>f7f1cb3</code></a> Made the zipfile compatibility overlay available as zipp.compat.overlay.</li> <li><a href="https://github.com/jaraco/zipp/commit/9be5e1217a3aa1f96ba0aee57d348e8244ad9145"><code>9be5e12</code></a> Finalize</li> <li><a href="https://github.com/jaraco/zipp/commit/11841113ebd25cd2520f801981bb1e16b01690d7"><code>1184111</code></a> Narrow the versions under which Traversable is imported from importlib.abc. F...</li> <li><a href="https://github.com/jaraco/zipp/commit/579be51bb31881a8e04040ff7a7c134053540326"><code>579be51</code></a> Merge pull request <a href="https://redirect.github.com/jaraco/zipp/issues/122">#122</a> from jaraco/bugfix/121-glob-dirs</li> <li><a href="https://github.com/jaraco/zipp/commit/5d89a1cf540894ef28c0b6485daf01c860bd59d0"><code>5d89a1c</code></a> Also match directories in Path.glob.</li> <li><a href="https://github.com/jaraco/zipp/commit/6f900eda31288c29c4e0af58c4504334704a9650"><code>6f900ed</code></a> Add failing test capturing missed expectation.</li> <li><a href="https://github.com/jaraco/zipp/commit/21c6bfd6ed451640ec3a69e1239fab053e1d7d6f"><code>21c6bfd</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/jaraco/zipp/commit/ab34814ca3ffe511ad63bb9589da06fd76758db8"><code>ab34814</code></a> Re-enable preview, this time not for one specific feature, but for all featur...</li> <li><a href="https://github.com/jaraco/zipp/commit/05d8b1482e5d32fadd96322bf86e158a17c4919d"><code>05d8b14</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li>Additional commits viewable in <a href="https://github.com/jaraco/zipp/compare/v3.17.0...v3.20.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=zipp&package-manager=pip&previous-version=3.17.0&new-version=3.20.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a8481672c85..e3b0848a2de 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -282,7 +282,7 @@ wheel==0.37.0 # via pip-tools yarl==1.9.4 # via -r requirements/runtime-deps.in -zipp==3.17.0 +zipp==3.20.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/dev.txt b/requirements/dev.txt index 4602d3dafae..16ed6eec20f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -269,7 +269,7 @@ wheel==0.41.0 # via pip-tools yarl==1.9.4 # via -r requirements/runtime-deps.in -zipp==3.17.0 +zipp==3.20.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index e06bb87946e..3bb07da4364 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -88,7 +88,7 @@ urllib3==2.2.2 # via requests webcolors==1.13 # via blockdiag -zipp==3.17.0 +zipp==3.20.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/doc.txt b/requirements/doc.txt index d77a13bb4ae..e6b913198f0 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -83,7 +83,7 @@ urllib3==2.2.2 # via requests webcolors==1.13 # via blockdiag -zipp==3.17.0 +zipp==3.20.0 # via # importlib-metadata # importlib-resources From 351e07bf134ded05b0fb3f90f6ca0abc0e1646a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:29:53 +0000 Subject: [PATCH 0374/1511] Bump virtualenv from 20.10.0 to 20.26.3 (#8794) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.10.0 to 20.26.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.26.3</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.26.2 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2724">pypa/virtualenv#2724</a></li> <li>Bump embeded wheels by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2741">pypa/virtualenv#2741</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.26.2...20.26.3">https://github.com/pypa/virtualenv/compare/20.26.2...20.26.3</a></p> <h2>20.26.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Release 20.26.1 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2713">pypa/virtualenv#2713</a></li> <li>Update activate_this.py documentation to use runpy instead of exec by <a href="https://github.com/FredStober"><code>@​FredStober</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2716">pypa/virtualenv#2716</a></li> <li>Apply ruff/bugbear new rules by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2718">pypa/virtualenv#2718</a></li> <li>Fix the CI by <a href="https://github.com/HandSonic"><code>@​HandSonic</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2721">pypa/virtualenv#2721</a></li> <li>Fix <a href="https://redirect.github.com/pypa/virtualenv/issues/1949">#1949</a>: zipapp virtual environment creation fails if zipapp path is symlinked by <a href="https://github.com/HandSonic"><code>@​HandSonic</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2722">pypa/virtualenv#2722</a></li> <li>Fix bad return code in bash activation if hashing is disabled by <a href="https://github.com/fenkes-ibm"><code>@​fenkes-ibm</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2717">pypa/virtualenv#2717</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/FredStober"><code>@​FredStober</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2716">pypa/virtualenv#2716</a></li> <li><a href="https://github.com/HandSonic"><code>@​HandSonic</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2721">pypa/virtualenv#2721</a></li> <li><a href="https://github.com/fenkes-ibm"><code>@​fenkes-ibm</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2717">pypa/virtualenv#2717</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.26.1...20.26.2">https://github.com/pypa/virtualenv/compare/20.26.1...20.26.2</a></p> <h2>20.26.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.26.0 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2710">pypa/virtualenv#2710</a></li> <li>Fix PATH-based Python discovery on Windows by <a href="https://github.com/ofek"><code>@​ofek</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2712">pypa/virtualenv#2712</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.26.0...20.26.1">https://github.com/pypa/virtualenv/compare/20.26.0...20.26.1</a></p> <h2>20.26.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.25.3 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2704">pypa/virtualenv#2704</a></li> <li>Fixed a case when template variable is WindowsPath by <a href="https://github.com/NtWriteCode"><code>@​NtWriteCode</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2707">pypa/virtualenv#2707</a></li> <li>Allow builtin interpreter discovery to find specific Python versions given a general spec by <a href="https://github.com/flying-sheep"><code>@​flying-sheep</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2709">pypa/virtualenv#2709</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/NtWriteCode"><code>@​NtWriteCode</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2707">pypa/virtualenv#2707</a></li> <li><a href="https://github.com/flying-sheep"><code>@​flying-sheep</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2709">pypa/virtualenv#2709</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.26.3 (2024-06-21)</h2> <p>Bugfixes - 20.26.3</p> <pre><code>- Upgrade embedded wheels: <ul> <li>setuptools to <code>70.1.0</code> from <code>69.5.1</code></li> <li>pip to <code>24.1</code> from <code>24.0</code> (:issue:<code>2741</code>)</li> </ul> <h2>v20.26.2 (2024-05-13)</h2> <p>Bugfixes - 20.26.2 </code></pre></p> <ul> <li><code>virtualenv.pyz</code> no longer fails when zipapp path contains a symlink - by :user:<code>HandSonic</code> and :user:<code>petamas</code>. (:issue:<code>1949</code>)</li> <li>Fix bad return code from activate.sh if hashing is disabled - by :user:'fenkes-ibm'. (:issue:<code>2717</code>)</li> </ul> <h2>v20.26.1 (2024-04-29)</h2> <p>Bugfixes - 20.26.1</p> <pre><code>- fix PATH-based Python discovery on Windows - by :user:`ofek`. (:issue:`2712`) <h2>v20.26.0 (2024-04-23)</h2> <p>Bugfixes - 20.26.0 </code></pre></p> <ul> <li>allow builtin discovery to discover specific interpreters (e.g. <code>python3.12</code>) given an unspecific spec (e.g. <code>python3</code>) - by :user:<code>flying-sheep</code>. (:issue:<code>2709</code>)</li> </ul> <h2>v20.25.3 (2024-04-17)</h2> <p>Bugfixes - 20.25.3</p> <pre><code>- Python 3.13.0a6 renamed pathmod to parser. (:issue:`2702`) <h2>v20.25.2 (2024-04-16)</h2> <p>Bugfixes - 20.25.2 </code></pre></p> <ul> <li> <p>Upgrade embedded wheels:</p> <ul> <li>setuptools of <code>69.1.0</code> to <code>69.5.1</code></li> <li>wheel of <code>0.42.0</code> to <code>0.43.0</code> (:issue:<code>2699</code>)</li> </ul> </li> </ul> <p>v20.25.1 (2024-02-21)</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/3185e1f8a5eaae8ff055a8481a542cc70a9cbce4"><code>3185e1f</code></a> release 20.26.3</li> <li><a href="https://github.com/pypa/virtualenv/commit/2a149ec196eefceceb3b017a0a8df7788f52522a"><code>2a149ec</code></a> Bump embeded wheels (<a href="https://redirect.github.com/pypa/virtualenv/issues/2741">#2741</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/bd91d48f01da390c8b3aa19b7b0dc04f34ec1e5d"><code>bd91d48</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2739">#2739</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/82202884cd0961b2a09064571d7b9ed663da8542"><code>8220288</code></a> Bump pypa/gh-action-pypi-publish from 1.8.14 to 1.9.0 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2738">#2738</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/429d6a2319c1bf917452ca418bf041fe0daa130d"><code>429d6a2</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2734">#2734</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/c723579b81ee92aaaab7e3c3a51d66209fa2abd9"><code>c723579</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2730">#2730</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/e35ece8dd863562024ede29837964da782d2800e"><code>e35ece8</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2727">#2727</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/0646a0544eedfe9c7666690ffe805a6d17cf6016"><code>0646a05</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2724">#2724</a> from pypa/release-20.26.2</li> <li><a href="https://github.com/pypa/virtualenv/commit/9b14661dc4fb6866533b556c470ee47263c7249f"><code>9b14661</code></a> release 20.26.2</li> <li><a href="https://github.com/pypa/virtualenv/commit/7cbed79924b6b6d953ca9ecf40de23a7d3486f57"><code>7cbed79</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2723">#2723</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/virtualenv/compare/20.10.0...20.26.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.10.0&new-version=20.26.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 14 +++++--------- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e3b0848a2de..e00fcec36c0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -28,8 +28,6 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.9.1 # via sphinx -backports-entry-points-selectable==1.3.0 - # via virtualenv blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" @@ -66,13 +64,13 @@ cryptography==41.0.2 # trustme cython==3.0.11 # via -r requirements/cython.in -distlib==0.3.3 +distlib==0.3.8 # via virtualenv docutils==0.20.1 # via sphinx exceptiongroup==1.1.2 # via pytest -filelock==3.3.2 +filelock==3.15.4 # via virtualenv freezegun==1.5.1 # via @@ -138,7 +136,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==2.4.0 +platformdirs==4.2.2 # via virtualenv pluggy==1.5.0 # via pytest @@ -202,9 +200,7 @@ requests==2.31.0 setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 - # via - # python-dateutil - # virtualenv + # via python-dateutil slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.2.0 @@ -272,7 +268,7 @@ uvloop==0.20.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.10.0 +virtualenv==20.26.3 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 16ed6eec20f..38ab17a7cc4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -259,7 +259,7 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.24.2 +virtualenv==20.26.3 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index f829c7a6a32..3ad484485de 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,7 +119,7 @@ urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in -virtualenv==20.24.2 +virtualenv==20.26.3 # via pre-commit # The following packages are considered to be unsafe in a requirements file: From 5ee29e728fff604249c4b37f3af7eef3e862928e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:47:29 +0100 Subject: [PATCH 0375/1511] [PR #8776/11171b8d backport][3.11] Use more precise headers type (#8778) **This is a backport of PR #8776 as merged into master (11171b8d4dc269cc05c59befc105b6744f2640e3).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8768.bugfix.rst | 1 + aiohttp/client_exceptions.py | 6 ++++-- tests/test_client_exceptions.py | 12 +++++++----- 3 files changed, 12 insertions(+), 7 deletions(-) create mode 100644 CHANGES/8768.bugfix.rst diff --git a/CHANGES/8768.bugfix.rst b/CHANGES/8768.bugfix.rst new file mode 100644 index 00000000000..18512163572 --- /dev/null +++ b/CHANGES/8768.bugfix.rst @@ -0,0 +1 @@ +Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamorcerer`. diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index ff29b3d3ca9..36bb6d1c0d8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -4,8 +4,10 @@ import warnings from typing import TYPE_CHECKING, Optional, Tuple, Union +from multidict import MultiMapping + from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders, StrOrURL +from .typedefs import StrOrURL try: import ssl @@ -71,7 +73,7 @@ def __init__( code: Optional[int] = None, status: Optional[int] = None, message: str = "", - headers: Optional[LooseHeaders] = None, + headers: Optional[MultiMapping[str]] = None, ) -> None: self.request_info = request_info if code is not None: diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index d863d6674a3..85e71a3508b 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -5,6 +5,7 @@ from unittest import mock import pytest +from multidict import CIMultiDict from yarl import URL from aiohttp import client, client_reqrep @@ -44,7 +45,7 @@ def test_pickle(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(foo="bar"), ) err.foo = "bar" for proto in range(pickle.HIGHEST_PROTOCOL + 1): @@ -54,7 +55,8 @@ def test_pickle(self) -> None: assert err2.history == () assert err2.status == 400 assert err2.message == "Something wrong" - assert err2.headers == {} + # Use headers.get() to verify static type is correct. + assert err2.headers.get("foo") == "bar" assert err2.foo == "bar" def test_repr(self) -> None: @@ -66,11 +68,11 @@ def test_repr(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert repr(err) == ( "ClientResponseError(%r, (), status=400, " - "message='Something wrong', headers={})" % (self.request_info,) + "message='Something wrong', headers=<CIMultiDict()>)" % (self.request_info,) ) def test_str(self) -> None: @@ -79,7 +81,7 @@ def test_str(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert str(err) == ( "400, message='Something wrong', " "url='http://example.com'" From 014db7e853198d9124d5d6915416231d3acee7c9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:47:43 +0100 Subject: [PATCH 0376/1511] [PR #8776/11171b8d backport][3.10] Use more precise headers type (#8777) **This is a backport of PR #8776 as merged into master (11171b8d4dc269cc05c59befc105b6744f2640e3).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8768.bugfix.rst | 1 + aiohttp/client_exceptions.py | 6 ++++-- tests/test_client_exceptions.py | 12 +++++++----- 3 files changed, 12 insertions(+), 7 deletions(-) create mode 100644 CHANGES/8768.bugfix.rst diff --git a/CHANGES/8768.bugfix.rst b/CHANGES/8768.bugfix.rst new file mode 100644 index 00000000000..18512163572 --- /dev/null +++ b/CHANGES/8768.bugfix.rst @@ -0,0 +1 @@ +Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamorcerer`. diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index ff29b3d3ca9..36bb6d1c0d8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -4,8 +4,10 @@ import warnings from typing import TYPE_CHECKING, Optional, Tuple, Union +from multidict import MultiMapping + from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders, StrOrURL +from .typedefs import StrOrURL try: import ssl @@ -71,7 +73,7 @@ def __init__( code: Optional[int] = None, status: Optional[int] = None, message: str = "", - headers: Optional[LooseHeaders] = None, + headers: Optional[MultiMapping[str]] = None, ) -> None: self.request_info = request_info if code is not None: diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index d863d6674a3..85e71a3508b 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -5,6 +5,7 @@ from unittest import mock import pytest +from multidict import CIMultiDict from yarl import URL from aiohttp import client, client_reqrep @@ -44,7 +45,7 @@ def test_pickle(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(foo="bar"), ) err.foo = "bar" for proto in range(pickle.HIGHEST_PROTOCOL + 1): @@ -54,7 +55,8 @@ def test_pickle(self) -> None: assert err2.history == () assert err2.status == 400 assert err2.message == "Something wrong" - assert err2.headers == {} + # Use headers.get() to verify static type is correct. + assert err2.headers.get("foo") == "bar" assert err2.foo == "bar" def test_repr(self) -> None: @@ -66,11 +68,11 @@ def test_repr(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert repr(err) == ( "ClientResponseError(%r, (), status=400, " - "message='Something wrong', headers={})" % (self.request_info,) + "message='Something wrong', headers=<CIMultiDict()>)" % (self.request_info,) ) def test_str(self) -> None: @@ -79,7 +81,7 @@ def test_str(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert str(err) == ( "400, message='Something wrong', " "url='http://example.com'" From ae15bac3503cc22a85412054f1abf4363cc74a9c Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 20 Aug 2024 22:06:03 +0100 Subject: [PATCH 0377/1511] Drop Python 3.8 (#8797) (#8799) (cherry picked from commit 5be5af3c900ef9ead3387a1193fc4ff4ad1e5594) --- .codecov.yml | 2 +- .github/workflows/ci-cd.yml | 4 +--- CHANGES/8797.breaking.rst | 1 + Makefile | 6 +----- aiohttp/cookiejar.py | 2 +- aiohttp/resolver.py | 6 ++---- aiohttp/web_fileresponse.py | 4 ---- setup.cfg | 3 +-- setup.py | 4 ++-- tests/test_resolver.py | 7 ------- 10 files changed, 10 insertions(+), 29 deletions(-) create mode 100644 CHANGES/8797.breaking.rst diff --git a/.codecov.yml b/.codecov.yml index 30809053e16..e21d45ac7b2 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,5 +1,5 @@ codecov: - branch: 3.9 + branch: master notify: after_n_builds: 13 diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a6a58cef9c2..d422a269f02 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,15 +125,13 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] + pyver: [3.9, '3.10', '3.11', '3.12'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] exclude: - os: macos no-extensions: 'Y' - - os: macos - pyver: 3.8 - os: windows no-extensions: 'Y' include: diff --git a/CHANGES/8797.breaking.rst b/CHANGES/8797.breaking.rst new file mode 100644 index 00000000000..c219ea3d264 --- /dev/null +++ b/CHANGES/8797.breaking.rst @@ -0,0 +1 @@ +Dropped support for Python 3.8 -- by :user:`Dreamsorcerer`. diff --git a/Makefile b/Makefile index bb2d437a134..2a40be049ee 100644 --- a/Makefile +++ b/Makefile @@ -112,11 +112,7 @@ define run_tests_in_docker docker run --rm -ti -v `pwd`:/src -w /src "aiohttp-test-$(1)-$(2)" $(TEST_SPEC) endef -.PHONY: test-3.8-no-extensions test-3.8 test-3.9-no-extensions test -test-3.8-no-extensions: - $(call run_tests_in_docker,3.8,y) -test-3.8: - $(call run_tests_in_docker,3.8,n) +.PHONY: test-3.9-no-extensions test test-3.9-no-extensions: $(call run_tests_in_docker,3.9,y) test-3.9: diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index e9997ce2935..e3eefc9c656 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -70,7 +70,7 @@ class CookieJar(AbstractCookieJar): except (OSError, ValueError): # Hit the maximum representable time on Windows # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 - # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere + # Throws ValueError on PyPy 3.9, OSError elsewhere MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) except OverflowError: # #4515: datetime.max may not be representable on 32-bit platforms diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 10e36266abe..c8fce5b5706 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,6 +1,5 @@ import asyncio import socket -import sys from typing import Any, Dict, List, Optional, Tuple, Type, Union from .abc import AbstractResolver, ResolveResult @@ -18,7 +17,6 @@ _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV -_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) class ThreadedResolver(AbstractResolver): @@ -49,7 +47,7 @@ async def resolve( # IPv6 is not supported by Python build, # or IPv6 is not enabled in the host continue - if address[3] and _SUPPORTS_SCOPE_ID: + if address[3]: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. @@ -116,7 +114,7 @@ async def resolve( address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr family = node.family if family == socket.AF_INET6: - if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + if len(address) > 3 and address[3]: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 0c23e375d25..2c253e03b0a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,7 +1,6 @@ import asyncio import os import pathlib -import sys from contextlib import suppress from mimetypes import MimeTypes from stat import S_ISREG @@ -48,9 +47,6 @@ CONTENT_TYPES: Final[MimeTypes] = MimeTypes() -if sys.version_info < (3, 9): - CONTENT_TYPES.encodings_map[".br"] = "br" - # File extension to IANA encodings map that will be checked in the order defined. ENCODING_EXTENSIONS = MappingProxyType( {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} diff --git a/setup.cfg b/setup.cfg index cfd1be5610f..03fb594ecbe 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,7 +33,6 @@ classifiers = Programming Language :: Python Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 @@ -42,7 +41,7 @@ classifiers = Topic :: Internet :: WWW/HTTP [options] -python_requires = >=3.8 +python_requires = >=3.9 packages = aiohttp # https://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag zip_safe = False diff --git a/setup.py b/setup.py index 23ac86f9b4a..808f539d259 100644 --- a/setup.py +++ b/setup.py @@ -4,8 +4,8 @@ from setuptools import Extension, setup -if sys.version_info < (3, 8): - raise RuntimeError("aiohttp 3.x requires Python 3.8+") +if sys.version_info < (3, 9): + raise RuntimeError("aiohttp 3.x requires Python 3.9+") NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) diff --git a/tests/test_resolver.py b/tests/test_resolver.py index f51506a6999..825db81e41b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -9,7 +9,6 @@ from aiohttp.resolver import ( _NUMERIC_SOCKET_FLAGS, - _SUPPORTS_SCOPE_ID, AsyncResolver, DefaultResolver, ThreadedResolver, @@ -136,9 +135,6 @@ async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -@pytest.mark.skipif( - not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" -) async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result( @@ -211,9 +207,6 @@ async def test_threaded_resolver_positive_lookup() -> None: ipaddress.ip_address(real[0]["host"]) -@pytest.mark.skipif( - not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" -) async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: loop = Mock() loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) From cace1d15df77102b838196b3c53b589a957b335d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 00:48:09 +0100 Subject: [PATCH 0378/1511] [PR #8800/17bf9127 backport][3.11] Upload junit to codecov (#8802) **This is a backport of PR #8800 as merged into master (17bf912743708b18cc4737182a2c2e286c312f4d).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/ci-cd.yml | 8 +++++++- setup.cfg | 1 - 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d422a269f02..bb4df9b7760 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -196,7 +196,7 @@ jobs: PIP_USER: 1 run: >- PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}" - pytest + pytest --junitxml=junit.xml shell: bash - name: Re-run the failing tests with maximum verbosity if: failure() @@ -232,6 +232,12 @@ jobs: steps.python-install.outputs.python-version }} token: ${{ secrets.CODECOV_TOKEN }} + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() diff --git a/setup.cfg b/setup.cfg index 03fb594ecbe..4000b5a40a7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -167,7 +167,6 @@ junit_suite_name = aiohttp_test_suite norecursedirs = dist docs build .tox .eggs minversion = 3.8.2 testpaths = tests/ -junit_family=xunit2 xfail_strict = true markers = dev_mode: mark test to run in dev mode. From 0ae4fa3d2f480c53fec7147c497a0b598d70a012 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 00:48:33 +0100 Subject: [PATCH 0379/1511] [PR #8800/17bf9127 backport][3.10] Upload junit to codecov (#8801) **This is a backport of PR #8800 as merged into master (17bf912743708b18cc4737182a2c2e286c312f4d).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/ci-cd.yml | 8 +++++++- setup.cfg | 1 - 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a6a58cef9c2..8e56acb497d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -198,7 +198,7 @@ jobs: PIP_USER: 1 run: >- PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}" - pytest + pytest --junitxml=junit.xml shell: bash - name: Re-run the failing tests with maximum verbosity if: failure() @@ -234,6 +234,12 @@ jobs: steps.python-install.outputs.python-version }} token: ${{ secrets.CODECOV_TOKEN }} + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() diff --git a/setup.cfg b/setup.cfg index cfd1be5610f..71ed6b98e0e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -168,7 +168,6 @@ junit_suite_name = aiohttp_test_suite norecursedirs = dist docs build .tox .eggs minversion = 3.8.2 testpaths = tests/ -junit_family=xunit2 xfail_strict = true markers = dev_mode: mark test to run in dev mode. From 35b293b68295c6ce0289746f64835f6bae9e6205 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:03:26 +0000 Subject: [PATCH 0380/1511] Bump annotated-types from 0.5.0 to 0.7.0 (#8814) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [annotated-types](https://github.com/annotated-types/annotated-types) from 0.5.0 to 0.7.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/annotated-types/annotated-types/releases">annotated-types's releases</a>.</em></p> <blockquote> <h2>v0.7.0</h2> <h2>What's Changed</h2> <ul> <li>Allow <code>tzinfo</code> objects for <code>Timezone</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/56">annotated-types/annotated-types#56</a></li> <li>add URLs to <code>pyproject.toml</code> by <a href="https://github.com/samuelcolvin"><code>@​samuelcolvin</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/58">annotated-types/annotated-types#58</a></li> <li>suggested fix on typo by <a href="https://github.com/PelicanQ"><code>@​PelicanQ</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/60">annotated-types/annotated-types#60</a></li> <li>Correct misstatement in README by <a href="https://github.com/Zac-HD"><code>@​Zac-HD</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/62">annotated-types/annotated-types#62</a></li> <li>Fix IsDigit -> IsDigits by <a href="https://github.com/toriningen"><code>@​toriningen</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/63">annotated-types/annotated-types#63</a></li> <li>Add <code>Unit</code> type by <a href="https://github.com/tlambert03"><code>@​tlambert03</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/65">annotated-types/annotated-types#65</a></li> <li>Improve handling of GroupedMetadata (First try) by <a href="https://github.com/cksleigen"><code>@​cksleigen</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/69">annotated-types/annotated-types#69</a></li> <li>Prepare for 0.7.0 release by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/70">annotated-types/annotated-types#70</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/Viicos"><code>@​Viicos</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/56">annotated-types/annotated-types#56</a></li> <li><a href="https://github.com/PelicanQ"><code>@​PelicanQ</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/60">annotated-types/annotated-types#60</a></li> <li><a href="https://github.com/toriningen"><code>@​toriningen</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/63">annotated-types/annotated-types#63</a></li> <li><a href="https://github.com/tlambert03"><code>@​tlambert03</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/65">annotated-types/annotated-types#65</a></li> <li><a href="https://github.com/cksleigen"><code>@​cksleigen</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/69">annotated-types/annotated-types#69</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/annotated-types/annotated-types/compare/v0.6.0...v0.7.0">https://github.com/annotated-types/annotated-types/compare/v0.6.0...v0.7.0</a></p> <h2>v0.6.0</h2> <h2>What's Changed</h2> <ul> <li>Drop Python 3.7, test on 3.12-dev and update deps by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/44">annotated-types/annotated-types#44</a></li> <li>Add IsFinite type by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/43">annotated-types/annotated-types#43</a></li> <li>Add Not to allow predicate negation by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/45">annotated-types/annotated-types#45</a></li> <li>update deps to fix python 3.12 CI by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/50">annotated-types/annotated-types#50</a></li> <li>✨ Add support for <code>doc()</code> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/49">annotated-types/annotated-types#49</a></li> <li>Fix README typo by <a href="https://github.com/JelleZijlstra"><code>@​JelleZijlstra</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/52">annotated-types/annotated-types#52</a></li> <li>Remove unused pytest-mock test dependency by <a href="https://github.com/gotmax23"><code>@​gotmax23</code></a> in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/54">annotated-types/annotated-types#54</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/tiangolo"><code>@​tiangolo</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/49">annotated-types/annotated-types#49</a></li> <li><a href="https://github.com/JelleZijlstra"><code>@​JelleZijlstra</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/52">annotated-types/annotated-types#52</a></li> <li><a href="https://github.com/gotmax23"><code>@​gotmax23</code></a> made their first contribution in <a href="https://redirect.github.com/annotated-types/annotated-types/pull/54">annotated-types/annotated-types#54</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/annotated-types/annotated-types/compare/v0.5.0...v0.6.0">https://github.com/annotated-types/annotated-types/compare/v0.5.0...v0.6.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/annotated-types/annotated-types/commit/0735cd3d4c272b88405b6b04009716b691115210"><code>0735cd3</code></a> Prepare for 0.7.0 release (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/70">#70</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/0757d4126a55681f577a0d9800b15b8d418bc7ad"><code>0757d41</code></a> Improve handling of GroupedMetadata (First try) (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/69">#69</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/99dbac8103ec938ffc1abd779f2f4316bf5e73e7"><code>99dbac8</code></a> Add <code>Unit</code> type (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/65">#65</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/89e3d2ec0c828b2cd8a1bd08132daf355bc34fe2"><code>89e3d2e</code></a> Fix IsDigit -> IsDigits (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/63">#63</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/59a50d17126f97a6ad383d886c21e03d5de54972"><code>59a50d1</code></a> Correct misstatement in README (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/62">#62</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/66930379318ab566798219368eda267f2031f404"><code>6693037</code></a> suggested fix on typo (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/60">#60</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/195e3406a276d3599fb94f6f982f5e91837de4a3"><code>195e340</code></a> add URLs to <code>pyproject.toml</code> (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/58">#58</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/657ded980a131be3609e7df486cdd96b6133de29"><code>657ded9</code></a> Allow <code>tzinfo</code> objects for <code>Timezone</code> (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/56">#56</a>)</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/18584dffd31a845c25b92857b73ea76d10ea3d0d"><code>18584df</code></a> Prepare for 0.6.0 release</li> <li><a href="https://github.com/annotated-types/annotated-types/commit/4ddf47e6b2427634a4ff8ca9618aa295d92e6bad"><code>4ddf47e</code></a> Remove unused pytest-mock test dependency (<a href="https://redirect.github.com/annotated-types/annotated-types/issues/54">#54</a>)</li> <li>Additional commits viewable in <a href="https://github.com/annotated-types/annotated-types/compare/v0.5.0...v0.7.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=annotated-types&package-manager=pip&previous-version=0.5.0&new-version=0.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e00fcec36c0..c9422ac7d86 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.12 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 38ab17a7cc4..c52e9d42cd8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.13 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 3ad484485de..9b5ac722a95 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 # via -r requirements/lint.in aioredis==2.0.1 # via -r requirements/lint.in -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 # via aioredis diff --git a/requirements/test.txt b/requirements/test.txt index be6e210a80a..05939335770 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in From 4bab25717c1e9eac1e0181cb88beb5935757dedc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:03:31 +0000 Subject: [PATCH 0381/1511] Bump incremental from 22.10.0 to 24.7.2 (#8815) Bumps [incremental](https://github.com/twisted/incremental) from 22.10.0 to 24.7.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/twisted/incremental/blob/trunk/NEWS.rst">incremental's changelog</a>.</em></p> <blockquote> <h1>Incremental 24.7.2 (2024-07-29)</h1> <h2>Bugfixes</h2> <ul> <li> <p>Incremental could mis-identify that a project had opted in to version management.</p> <p>If a <code>pyproject.toml</code> in the current directory contained a <code>[project]</code> table with a <code>name</code> key, but did not contain the opt-in <code>[tool.incremental]</code> table, Incremental would still treat the file as if the opt-in were present and attempt to validate the configuration. This could happen in contexts outside of packaging, such as when creating a virtualenv. When operating as a setuptools plugin Incremental now always ignores invalid configuration, such as configuration that doesn't match the content of the working directory. (<code>[#106](https://github.com/twisted/incremental/issues/106) <https://github.com/twisted/incremental/issues/106></code>__)</p> </li> </ul> <h1>Incremental 24.7.1 (2024-07-27)</h1> <h2>Bugfixes</h2> <ul> <li>Incremental 24.7.0 would produce an error when parsing the <code>pyproject.toml</code> of a project that lacked the <code>use_incremental=True</code> or <code>[tool.incremental]</code> opt-in markers if that file lacked a <code>[project]</code> section containing the package name. This could cause a project that only uses <code>pyproject.toml</code> to configure tools to fail to build if Incremental is installed. Incremental now ignores such projects. (<code>[#100](https://github.com/twisted/incremental/issues/100) <https://github.com/twisted/incremental/issues/100></code>__)</li> </ul> <h2>Misc</h2> <ul> <li><code>[#101](https://github.com/twisted/incremental/issues/101) <https://github.com/twisted/incremental/issues/101></code>__</li> </ul> <h1>Incremental 24.7.0 (2024-07-25)</h1> <h2>Features</h2> <ul> <li>Incremental can now be configured using <code>pyproject.toml</code>. (<code>[#90](https://github.com/twisted/incremental/issues/90) <https://github.com/twisted/incremental/issues/90></code>__)</li> <li>Incremental now provides a read-only <code>Hatchling version source plugin <https://hatch.pypa.io/latest/plugins/version-source/reference/></code>_. (<code>[#93](https://github.com/twisted/incremental/issues/93) <https://github.com/twisted/incremental/issues/93></code>__)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Incremental no longer inserts a dot before the rc version component (i.e., <code>1.2.3rc1</code> instead of <code>1.2.3.rc1</code>), resulting in version numbers in the <code>canonical format <https://packaging.python.org/en/latest/specifications/version-specifiers/#public-version-identifiers></code><strong>. (<code>[#81](https://github.com/twisted/incremental/issues/81) <https://github.com/twisted/incremental/issues/81></code></strong>)</li> <li>Incremental's tests are now included in the sdist release artifact. (<code>[#80](https://github.com/twisted/incremental/issues/80) <https://github.com/twisted/incremental/issues/80></code>__)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li><code>incremental[scripts]</code> no longer depends on Twisted. (<code>[#88](https://github.com/twisted/incremental/issues/88) <https://github.com/twisted/incremental/issues/88></code>__)</li> <li>Support for Python 2.7 has been dropped for lack of test infrastructure. We no longer provide universal wheels. (<code>[#86](https://github.com/twisted/incremental/issues/86) <https://github.com/twisted/incremental/issues/86></code>__)</li> <li>Support for Python 3.5, 3.6, and 3.7 has been dropped for lack of test infrastructure. (<code>[#92](https://github.com/twisted/incremental/issues/92) <https://github.com/twisted/incremental/issues/92></code>__)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/twisted/incremental/commit/32be98f929cc45b6a1b588e865dcf8c220b6c7f6"><code>32be98f</code></a> Release Incremental 24.7.2</li> <li><a href="https://github.com/twisted/incremental/commit/380e66970918027bc7fbbb393453de7cd8d7a555"><code>380e669</code></a> Merge pull request <a href="https://redirect.github.com/twisted/incremental/issues/107">#107</a> from twisted/106-never-raise</li> <li><a href="https://github.com/twisted/incremental/commit/a559f5c770ec93c95abaadfc8693d7ae654c6e39"><code>a559f5c</code></a> Allow syntax errors to propagate</li> <li><a href="https://github.com/twisted/incremental/commit/3d2cdb172c7d5c7e27903c0949128f0aa7888741"><code>3d2cdb1</code></a> Cleanups to shorten the diff</li> <li><a href="https://github.com/twisted/incremental/commit/d2fe36fa5bb5bb13bfbeb3461154fadd5d709993"><code>d2fe36f</code></a> Defense in depth</li> <li><a href="https://github.com/twisted/incremental/commit/d659ea0ed0f8518608f40065aa5e6ad6dd2a69be"><code>d659ea0</code></a> There and back again</li> <li><a href="https://github.com/twisted/incremental/commit/11ad4133e2857b37f140bfee835135f8cf1b6ad0"><code>11ad413</code></a> Update the readme</li> <li><a href="https://github.com/twisted/incremental/commit/0f7001c2bf5cc3d24f6d273067b2e88866bff6d1"><code>0f7001c</code></a> Tidy up some comments</li> <li><a href="https://github.com/twisted/incremental/commit/4adf23bb48cfbc35e75d44c4863b2dc73d4e9563"><code>4adf23b</code></a> Add newsfragment</li> <li><a href="https://github.com/twisted/incremental/commit/b5ad0545b572ede441d73c35d8ff882f430e64d1"><code>b5ad054</code></a> 100% coverage, why not?</li> <li>Additional commits viewable in <a href="https://github.com/twisted/incremental/compare/incremental-22.10.0...incremental-24.7.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=incremental&package-manager=pip&previous-version=22.10.0&new-version=24.7.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 +++- requirements/dev.txt | 4 +++- requirements/doc-spelling.txt | 10 +++++++--- requirements/doc.txt | 10 +++++++--- 4 files changed, 20 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c9422ac7d86..5e1ac3999f4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -101,7 +101,7 @@ importlib-metadata==7.0.0 # sphinx importlib-resources==6.1.1 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier iniconfig==1.1.1 # via pytest @@ -234,6 +234,7 @@ tomli==2.0.1 # build # cherry-picker # coverage + # incremental # mypy # pip-tools # pyproject-hooks @@ -289,4 +290,5 @@ pip==23.2.1 setuptools==68.0.0 # via # blockdiag + # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index c52e9d42cd8..afa5b28559c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -99,7 +99,7 @@ importlib-metadata==7.0.0 # sphinx importlib-resources==6.1.1 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest @@ -224,6 +224,7 @@ tomli==2.0.1 # build # cherry-picker # coverage + # incremental # mypy # pip-tools # pyproject-hooks @@ -280,5 +281,6 @@ pip==23.2.1 setuptools==68.0.0 # via # blockdiag + # incremental # nodeenv # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 3bb07da4364..a5e70f9a9d2 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -30,7 +30,7 @@ importlib-metadata==6.8.0 # via sphinx importlib-resources==6.1.1 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier jinja2==3.1.4 # via @@ -79,7 +79,9 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in tomli==2.0.1 - # via towncrier + # via + # incremental + # towncrier towncrier==23.11.0 # via # -r requirements/doc.in @@ -95,4 +97,6 @@ zipp==3.20.0 # The following packages are considered to be unsafe in a requirements file: setuptools==68.0.0 - # via blockdiag + # via + # blockdiag + # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index e6b913198f0..1d0cc15cb21 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -30,7 +30,7 @@ importlib-metadata==6.8.0 # via sphinx importlib-resources==6.1.1 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier jinja2==3.1.4 # via @@ -74,7 +74,9 @@ sphinxcontrib-serializinghtml==1.1.5 sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in tomli==2.0.1 - # via towncrier + # via + # incremental + # towncrier towncrier==23.11.0 # via # -r requirements/doc.in @@ -90,4 +92,6 @@ zipp==3.20.0 # The following packages are considered to be unsafe in a requirements file: setuptools==68.0.0 - # via blockdiag + # via + # blockdiag + # incremental From ec5c265c26899196dc0586b67564d60a941cb4a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:03:57 +0000 Subject: [PATCH 0382/1511] Bump packaging from 21.2 to 24.1 (#8817) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [packaging](https://github.com/pypa/packaging) from 21.2 to 24.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/packaging/releases">packaging's releases</a>.</em></p> <blockquote> <h2>24.1</h2> <h2>What's Changed</h2> <ul> <li>pyupgrade/black/isort/flake8 → ruff by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/769">pypa/packaging#769</a></li> <li>Add support for Python 3.13 and drop EOL 3.7 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/783">pypa/packaging#783</a></li> <li>Bump the github-actions group with 4 updates by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/782">pypa/packaging#782</a></li> <li>Fix typo in <code>_parser</code> docstring by <a href="https://github.com/pradyunsg"><code>@​pradyunsg</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/784">pypa/packaging#784</a></li> <li>Modernise type annotations using FA rules from ruff by <a href="https://github.com/pradyunsg"><code>@​pradyunsg</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/785">pypa/packaging#785</a></li> <li>Document <code>markers.default_environment()</code> by <a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/753">pypa/packaging#753</a></li> <li>Bump the github-actions group with 3 updates by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/789">pypa/packaging#789</a></li> <li>Work around platform.python_version() returning non PEP 440 compliant version for non-tagged CPython builds by <a href="https://github.com/sbidoul"><code>@​sbidoul</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/802">pypa/packaging#802</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/dependabot"><code>@​dependabot</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/782">pypa/packaging#782</a></li> <li><a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/753">pypa/packaging#753</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/packaging/compare/24.0...24.1">https://github.com/pypa/packaging/compare/24.0...24.1</a></p> <h2>24.0</h2> <h2>What's Changed</h2> <ul> <li>Fix specifier matching when the specifier is long and has an epoch by <a href="https://github.com/SpecLad"><code>@​SpecLad</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/712">pypa/packaging#712</a></li> <li>Clarify version split/join usage by <a href="https://github.com/uranusjr"><code>@​uranusjr</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/725">pypa/packaging#725</a></li> <li>Default optional metadata values to None by <a href="https://github.com/dstufft"><code>@​dstufft</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/734">pypa/packaging#734</a></li> <li>Stop using deprecated/removed keys by <a href="https://github.com/dstufft"><code>@​dstufft</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/739">pypa/packaging#739</a></li> <li>Correctly use the ExceptionGroup shim only when needed by <a href="https://github.com/dstufft"><code>@​dstufft</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/736">pypa/packaging#736</a></li> <li>Update CHANGELOG entry about <code>validate</code> kwarg by <a href="https://github.com/pradyunsg"><code>@​pradyunsg</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/731">pypa/packaging#731</a></li> <li>Support --disable-gil builds (PEP 703) in packaging.tags by <a href="https://github.com/colesbury"><code>@​colesbury</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/728">pypa/packaging#728</a></li> <li>Skip <code>test_glibc_version_string_ctypes_raise_oserror</code> if <code>ctypes</code> is unavailable by <a href="https://github.com/kevinchang96"><code>@​kevinchang96</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/741">pypa/packaging#741</a></li> <li>Enable CodeQL by <a href="https://github.com/joycebrum"><code>@​joycebrum</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/743">pypa/packaging#743</a></li> <li>PEP 703: Rename <code>Py_NOGIL</code> to <code>Py_GIL_DISABLED</code> by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/747">pypa/packaging#747</a></li> <li>Replace PEP references with PUG links by <a href="https://github.com/jeanas"><code>@​jeanas</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/750">pypa/packaging#750</a></li> <li>Remove coverage ignore for non-existent file by <a href="https://github.com/shenanigansd"><code>@​shenanigansd</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/752">pypa/packaging#752</a></li> <li>Update URLs by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/764">pypa/packaging#764</a></li> <li>Configure dependabot by <a href="https://github.com/joycebrum"><code>@​joycebrum</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/757">pypa/packaging#757</a></li> <li>Hash pin github actions by <a href="https://github.com/joycebrum"><code>@​joycebrum</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/758">pypa/packaging#758</a></li> <li>Apply some refurb suggestions by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/763">pypa/packaging#763</a></li> <li>Appply some bugbear suggestions by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/761">pypa/packaging#761</a></li> <li>Apply some ruff suggestions by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/772">pypa/packaging#772</a></li> <li>Add riscv64 as a supported manylinux architecture by <a href="https://github.com/markdryan"><code>@​markdryan</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/751">pypa/packaging#751</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/colesbury"><code>@​colesbury</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/728">pypa/packaging#728</a></li> <li><a href="https://github.com/kevinchang96"><code>@​kevinchang96</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/741">pypa/packaging#741</a></li> <li><a href="https://github.com/jeanas"><code>@​jeanas</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/750">pypa/packaging#750</a></li> <li><a href="https://github.com/shenanigansd"><code>@​shenanigansd</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/752">pypa/packaging#752</a></li> <li><a href="https://github.com/markdryan"><code>@​markdryan</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/751">pypa/packaging#751</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/packaging/compare/23.2...24.0">https://github.com/pypa/packaging/compare/23.2...24.0</a></p> <h2>23.2</h2> <h2>What's Changed</h2> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/packaging/blob/main/CHANGELOG.rst">packaging's changelog</a>.</em></p> <blockquote> <p>24.1 - 2024-06-10</p> <pre><code> No unreleased changes. <p>24.0 - 2024-03-10<br /> </code></pre></p> <ul> <li>Do specifier matching correctly when the specifier contains an epoch number and has more components than the version (:issue:<code>683</code>)</li> <li>Support the experimental <code>--disable-gil</code> builds in packaging.tags (:issue:<code>727</code>)</li> <li>BREAKING: Make optional <code>metadata.Metadata</code> attributes default to <code>None</code> (:issue:<code>733</code>)</li> <li>Fix errors when trying to access the <code>description_content_type</code>, <code>keywords</code>, and <code>requires_python</code> attributes on <code>metadata.Metadata</code> when those values have not been provided (:issue:<code>733</code>)</li> <li>Fix a bug preventing the use of the built in <code>ExceptionGroup</code> on versions of Python that support it (:issue:<code>725</code>)</li> </ul> <p>23.2 - 2023-10-01</p> <pre><code> * Document calendar-based versioning scheme (:issue:`716`) * Enforce that the entire marker string is parsed (:issue:`687`) * Requirement parsing no longer automatically validates the URL (:issue:`120`) * Canonicalize names for requirements comparison (:issue:`644`) * Introduce ``metadata.Metadata`` (along with ``metadata.ExceptionGroup`` and ``metadata.InvalidMetadata``; :issue:`570`) * Introduce the ``validate`` keyword parameter to ``utils.normalize_name()`` (:issue:`570`) * Introduce ``utils.is_normalized_name()`` (:issue:`570`) * Make ``utils.parse_sdist_filename()`` and ``utils.parse_wheel_filename()`` raise ``InvalidSdistFilename`` and ``InvalidWheelFilename``, respectively, when the version component of the name is invalid * Remove support for Python 3.7 (:issue:`783`) <p>23.1 - 2023-04-12<br /> </code></pre></p> <ul> <li>Parse raw metadata (:issue:<code>671</code>)</li> <li>Import underlying parser functions as an underscored variable (:issue:<code>663</code>)</li> <li>Improve error for local version label with unsupported operators (:issue:<code>675</code>)</li> <li>Add dedicated error for specifiers with incorrect <code>.*</code> suffix</li> <li>Replace spaces in platform names with underscores (:issue:<code>620</code>)</li> <li>Relax typing of <code>_key</code> on <code>_BaseVersion</code> (:issue:<code>669</code>)</li> <li>Handle prefix match with zeros at end of prefix correctly (:issue:<code>674</code>)</li> </ul> <p>23.0 - 2023-01-08</p> <pre><code> * Allow ``"extra"`` to be ``None`` in the marker environment (:issue:`650`) * Refactor ``tags._generic_api`` to use ``EXT_SUFFIX`` (:issue:`607`) </tr></table> </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/packaging/commit/85442b8032cb7bae72866dfd7782234a98dd2fb7"><code>85442b8</code></a> Bump for release</li> <li><a href="https://github.com/pypa/packaging/commit/3e67fc775e93166600c84a5183ab6a86afff84b5"><code>3e67fc7</code></a> Work around <code>platform.python_version()</code> returning non PEP 440 compliant versi...</li> <li><a href="https://github.com/pypa/packaging/commit/32deafe8668a2130a3366b98154914d188f3718e"><code>32deafe</code></a> Bump the github-actions group with 3 updates (<a href="https://redirect.github.com/pypa/packaging/issues/789">#789</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/e0dda88874e73cd484b9e8464c5921a903db3cf0"><code>e0dda88</code></a> Document <code>markers.default_environment()</code> (<a href="https://redirect.github.com/pypa/packaging/issues/753">#753</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/cc938f984bbbe43c5734b9656c9837ab3a28191f"><code>cc938f9</code></a> Modernise type annotations using FA rules from ruff (<a href="https://redirect.github.com/pypa/packaging/issues/785">#785</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/757f559404ff6cc1cdef59a2c3628ccdaa505ac4"><code>757f559</code></a> Fix typo in <code>_parser</code> docstring (<a href="https://redirect.github.com/pypa/packaging/issues/784">#784</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/ec9f203a9f1d336d62b71a26e3ad3dfcfbef92dc"><code>ec9f203</code></a> Bump the github-actions group with 4 updates (<a href="https://redirect.github.com/pypa/packaging/issues/782">#782</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/5cbe1e44cc7b0497a11ab441310a86a5ebf1658a"><code>5cbe1e4</code></a> Add support for Python 3.13 and drop EOL 3.7 (<a href="https://redirect.github.com/pypa/packaging/issues/783">#783</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/cb8fd38ef4c4189142702951b89dee1f09e4d71f"><code>cb8fd38</code></a> pyupgrade/black/isort/flake8 → ruff (<a href="https://redirect.github.com/pypa/packaging/issues/769">#769</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/e8002b16e79c71a5f256d4f20eee0170f4327dd9"><code>e8002b1</code></a> Bump for development</li> <li>Additional commits viewable in <a href="https://github.com/pypa/packaging/compare/21.2...24.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=packaging&package-manager=pip&previous-version=21.2&new-version=24.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 4 +--- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 9 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 3a616ad4d14..113be2767ec 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -30,7 +30,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -packaging==23.1 +packaging==24.1 # via gunicorn pycares==4.4.0 # via aiodns diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5e1ac3999f4..5ceac3ecb5e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -124,7 +124,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.6.0 # via pre-commit -packaging==21.2 +packaging==24.1 # via # build # gunicorn @@ -160,8 +160,6 @@ pyjwt==2.3.0 # via # gidgethub # pyjwt -pyparsing==2.4.7 - # via packaging pyproject-hooks==1.0.0 # via # build diff --git a/requirements/dev.txt b/requirements/dev.txt index afa5b28559c..06897d1f6a8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -121,7 +121,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.8.0 # via pre-commit -packaging==23.1 +packaging==24.1 # via # build # gunicorn diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index a5e70f9a9d2..916cee3c0cb 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -38,7 +38,7 @@ jinja2==3.1.4 # towncrier markupsafe==2.1.3 # via jinja2 -packaging==23.1 +packaging==24.1 # via sphinx pillow==9.5.0 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index 1d0cc15cb21..0038be971e1 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -38,7 +38,7 @@ jinja2==3.1.4 # towncrier markupsafe==2.1.3 # via jinja2 -packaging==23.1 +packaging==24.1 # via sphinx pillow==9.5.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 9b5ac722a95..54ad2cc4322 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -54,7 +54,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.8.0 # via pre-commit -packaging==23.1 +packaging==24.1 # via pytest platformdirs==3.10.0 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 05939335770..344fed3e747 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -61,7 +61,7 @@ mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy -packaging==23.1 +packaging==24.1 # via # gunicorn # pytest From de76808f1d29cb799f365b2655d5d3ef46a49a4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:04:00 +0000 Subject: [PATCH 0383/1511] Bump pip from 23.2.1 to 24.2 (#8816) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [pip](https://github.com/pypa/pip) from 23.2.1 to 24.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pip/blob/main/NEWS.rst">pip's changelog</a>.</em></p> <blockquote> <h1>24.2 (2024-07-28)</h1> <h2>Deprecations and Removals</h2> <ul> <li>Deprecate <code>pip install --editable</code> falling back to <code>setup.py develop</code> when using a setuptools version that does not support :pep:<code>660</code> (setuptools v63 and older). (<code>[#11457](https://github.com/pypa/pip/issues/11457) <https://github.com/pypa/pip/issues/11457></code>_)</li> </ul> <h2>Features</h2> <ul> <li> <p>Check unsupported packages for the current platform. (<code>[#11054](https://github.com/pypa/pip/issues/11054) <https://github.com/pypa/pip/issues/11054></code>_)</p> </li> <li> <p>Use system certificates <em>and</em> certifi certificates to verify HTTPS connections on Python 3.10+. Python 3.9 and earlier only use certifi.</p> <p>To revert to previous behaviour, pass the flag <code>--use-deprecated=legacy-certs</code>. (<code>[#11647](https://github.com/pypa/pip/issues/11647) <https://github.com/pypa/pip/issues/11647></code>_)</p> </li> <li> <p>Improve discovery performance of installed packages when the <code>importlib.metadata</code> backend is used to load distribution metadata (used by default under Python 3.11+). (<code>[#12656](https://github.com/pypa/pip/issues/12656) <https://github.com/pypa/pip/issues/12656></code>_)</p> </li> <li> <p>Improve performance when the same requirement string appears many times during resolution, by consistently caching the parsed requirement string. (<code>[#12663](https://github.com/pypa/pip/issues/12663) <https://github.com/pypa/pip/issues/12663></code>_)</p> </li> <li> <p>Minor performance improvement of finding applicable package candidates by not repeatedly calculating their versions (<code>[#12664](https://github.com/pypa/pip/issues/12664) <https://github.com/pypa/pip/issues/12664></code>_)</p> </li> <li> <p>Disable pip's self version check when invoking a pip subprocess to install PEP 517 build requirements. (<code>[#12683](https://github.com/pypa/pip/issues/12683) <https://github.com/pypa/pip/issues/12683></code>_)</p> </li> <li> <p>Improve dependency resolution performance by caching platform compatibility tags during wheel cache lookup. (<code>[#12712](https://github.com/pypa/pip/issues/12712) <https://github.com/pypa/pip/issues/12712></code>_)</p> </li> <li> <p><code>wheel</code> is no longer explicitly listed as a build dependency of <code>pip</code>. <code>setuptools</code> injects this dependency in the <code>get_requires_for_build_wheel()</code> hook and no longer needs it on newer versions. (<code>[#12728](https://github.com/pypa/pip/issues/12728) <https://github.com/pypa/pip/issues/12728></code>_)</p> </li> <li> <p>Ignore <code>--require-virtualenv</code> for <code>pip check</code> and <code>pip freeze</code> (<code>[#12842](https://github.com/pypa/pip/issues/12842) <https://github.com/pypa/pip/issues/12842></code>_)</p> </li> <li> <p>Improve package download and install performance.</p> <p>Increase chunk sizes when downloading (256 kB, up from 10 kB) and reading files (1 MB, up from 8 kB). This reduces the frequency of updates to pip's progress bar. (<code>[#12810](https://github.com/pypa/pip/issues/12810) <https://github.com/pypa/pip/issues/12810></code>_)</p> </li> <li> <p>Improve pip install performance.</p> <p>Files are now extracted in 1MB blocks, or in one block matching the file size for smaller files. A decompressor is no longer instantiated when extracting 0 bytes files, it is not necessary because there is no data to decompress. (<code>[#12803](https://github.com/pypa/pip/issues/12803) <https://github.com/pypa/pip/issues/12803></code>_)</p> </li> </ul> <h2>Bug Fixes</h2> <ul> <li>Set <code>no_color</code> to global <code>rich.Console</code> instance. (<code>[#11045](https://github.com/pypa/pip/issues/11045) <https://github.com/pypa/pip/issues/11045></code>_)</li> <li>Fix resolution to respect <code>--python-version</code> when checking <code>Requires-Python</code>. (<code>[#12216](https://github.com/pypa/pip/issues/12216) <https://github.com/pypa/pip/issues/12216></code>_)</li> <li>Perform hash comparisons in a case-insensitive manner. (<code>[#12680](https://github.com/pypa/pip/issues/12680) <https://github.com/pypa/pip/issues/12680></code>_)</li> <li>Avoid <code>dlopen</code> failure for glibc detection in musl builds (<code>[#12716](https://github.com/pypa/pip/issues/12716) <https://github.com/pypa/pip/issues/12716></code>_)</li> <li>Avoid keyring logging crashes when pip is run in verbose mode. (<code>[#12751](https://github.com/pypa/pip/issues/12751) <https://github.com/pypa/pip/issues/12751></code>_)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pip/commit/97146c7f4cd85551f3dc261830a57f304e43c181"><code>97146c7</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/ef81b2eafd390fb56f62930dcd74f6e4580093e0"><code>ef81b2e</code></a> Update AUTHORS.txt</li> <li><a href="https://github.com/pypa/pip/commit/350a0570a88b6c0d13c68f81ac08dc64f954cadf"><code>350a057</code></a> Bump the github-actions group with 2 updates (<a href="https://redirect.github.com/pypa/pip/issues/12876">#12876</a>)</li> <li><a href="https://github.com/pypa/pip/commit/184390f4f2cde0316801eb701f49dda4f7a9a6ac"><code>184390f</code></a> Update dependabot.yml to bump group updates (<a href="https://redirect.github.com/pypa/pip/issues/12572">#12572</a>)</li> <li><a href="https://github.com/pypa/pip/commit/48917f1c0375496058d677f652a90de6bee4dc8c"><code>48917f1</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/12875">#12875</a> from hellozee/fix-unit-test</li> <li><a href="https://github.com/pypa/pip/commit/dd85c28464dbfc9b3a53c885a41c209e4700ad2d"><code>dd85c28</code></a> Fix invalid origin test to check all the logged messages</li> <li><a href="https://github.com/pypa/pip/commit/203780b5d167c4d01c55df7adc91d5ad1a0563aa"><code>203780b</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/12865">#12865</a> from pradyunsg/better-exception-handling-around-sel...</li> <li><a href="https://github.com/pypa/pip/commit/e50314134886d5eb5b650b3ce95abaafcb6dce10"><code>e503141</code></a> Properly mock <code>_self_version_check_logic</code></li> <li><a href="https://github.com/pypa/pip/commit/3518d3293445ad43eedba116b6182185c03abda3"><code>3518d32</code></a> Rework how <code>--debug</code> is handled in <code>main</code></li> <li><a href="https://github.com/pypa/pip/commit/be21d82e4362c00aab451ef1cf212d9a62f8e58e"><code>be21d82</code></a> Move exception suppression to cover more of self-version-check logic</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pip/compare/23.2.1...24.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=23.2.1&new-version=24.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5ceac3ecb5e..f064f4edfdc 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -283,7 +283,7 @@ zipp==3.20.0 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==23.2.1 +pip==24.2 # via pip-tools setuptools==68.0.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 06897d1f6a8..62ba69406b4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -276,7 +276,7 @@ zipp==3.20.0 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==23.2.1 +pip==24.2 # via pip-tools setuptools==68.0.0 # via From d1bc1410173a33333239d47d90729edac2aef576 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:22:06 +0000 Subject: [PATCH 0384/1511] Bump identify from 2.3.5 to 2.6.0 (#8818) Bumps [identify](https://github.com/pre-commit/identify) from 2.3.5 to 2.6.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/577bfe15db2b97328431ec6de934c1a4bacc6edd"><code>577bfe1</code></a> v2.6.0</li> <li><a href="https://github.com/pre-commit/identify/commit/2e9f390c347cc49cace0c6b40f9b9dd8a6cd9d01"><code>2e9f390</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/468">#468</a> from harrymander/fix-index-error-with-env-shebang</li> <li><a href="https://github.com/pre-commit/identify/commit/0853fe343d105b822aacb6563cb73db93aee429b"><code>0853fe3</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/457">#457</a> from wircho/adolfo-add-ejson</li> <li><a href="https://github.com/pre-commit/identify/commit/818c07f7562eed43218079b5d469c168d64f93d1"><code>818c07f</code></a> Add ejson extension</li> <li><a href="https://github.com/pre-commit/identify/commit/909fd829e4bacd942de2a83687347c5a97d73985"><code>909fd82</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/469">#469</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/a9fd3882458cd34ffb1747e2007b2130a7b5296c"><code>a9fd388</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/90c7a1e580be5ccc6dde03456e79928d830fad81"><code>90c7a1e</code></a> Fix IndexError when shebang is just '#!/usr/bin/env'</li> <li><a href="https://github.com/pre-commit/identify/commit/437ef92fd9331c0be3ec47eecc81aaa19971896e"><code>437ef92</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/467">#467</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/525954b02b6a99b6ea1bbe2ff26fa4cf2fd000f1"><code>525954b</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/0651fca53e7ef7edab19cad41f6ee4fe89375b55"><code>0651fca</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/464">#464</a> from pre-commit/pre-commit-ci-update-config</li> <li>Additional commits viewable in <a href="https://github.com/pre-commit/identify/compare/v2.3.5...v2.6.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.3.5&new-version=2.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f064f4edfdc..e0742c1f7bd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -86,7 +86,7 @@ gidgethub==5.0.1 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.3.5 +identify==2.6.0 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 62ba69406b4..f16c3db32ae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -84,7 +84,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.5.26 +identify==2.6.0 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 54ad2cc4322..8d0587f43d3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -36,7 +36,7 @@ filelock==3.12.2 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.5.26 +identify==2.6.0 # via pre-commit idna==3.7 # via From b644f7a624836732f8a3a0498d767857f2ef2486 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:32:28 +0000 Subject: [PATCH 0385/1511] Bump importlib-metadata from 6.8.0 to 8.4.0 (#8820) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.8.0 to 8.4.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/importlib_metadata/blob/main/NEWS.rst">importlib-metadata's changelog</a>.</em></p> <blockquote> <h1>v8.4.0</h1> <h2>Features</h2> <ul> <li>Deferred import of inspect for import performance. (<a href="https://redirect.github.com/python/importlib_metadata/issues/499">#499</a>)</li> </ul> <h1>v8.3.0</h1> <h2>Features</h2> <ul> <li>Disallow passing of 'dist' to EntryPoints.select.</li> </ul> <h1>v8.2.0</h1> <h2>Features</h2> <ul> <li>Add SimplePath to importlib_metadata.<strong>all</strong>. (<a href="https://redirect.github.com/python/importlib_metadata/issues/494">#494</a>)</li> </ul> <h1>v8.1.0</h1> <h2>Features</h2> <ul> <li>Prioritize valid dists to invalid dists when retrieving by name. (<a href="https://redirect.github.com/python/importlib_metadata/issues/489">#489</a>)</li> </ul> <h1>v8.0.0</h1> <h2>Deprecations and Removals</h2> <ul> <li>Message.<strong>getitem</strong> now raises a KeyError on missing keys. (<a href="https://redirect.github.com/python/importlib_metadata/issues/371">#371</a>)</li> <li>Removed deprecated support for Distribution subclasses not implementing abstract methods.</li> </ul> <h1>v7.2.1</h1> <p>Bugfixes</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/importlib_metadata/commit/1616cb3a82c33c3603ff984b6ff417e68068aa6e"><code>1616cb3</code></a> Finalize</li> <li><a href="https://github.com/python/importlib_metadata/commit/71b467843258873048eb944545ba1235866523e6"><code>71b4678</code></a> Add news fragment.</li> <li><a href="https://github.com/python/importlib_metadata/commit/ebcdcfdd18d427498f11b74e245b3f8a7ef5df9c"><code>ebcdcfd</code></a> Remove workaround for <a href="https://redirect.github.com/python/typeshed/issues/10328">python/typeshed#10328</a>.</li> <li><a href="https://github.com/python/importlib_metadata/commit/2c43cfe7dba2902095a166c4f6226ac5f7bfb50b"><code>2c43cfe</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_metadata/issues/499">#499</a> from danielhollas/defer-inspect</li> <li><a href="https://github.com/python/importlib_metadata/commit/a7aaf72702b3a49ea3e33c9cf7f223839067c883"><code>a7aaf72</code></a> Use third-person imperative voice and link to issue in comment.</li> <li><a href="https://github.com/python/importlib_metadata/commit/e99c10510d48e840b0550bd05d1167633dcfaea7"><code>e99c105</code></a> Restore single-expression logic.</li> <li><a href="https://github.com/python/importlib_metadata/commit/debb5165a88b1a4433150b265e155c21b497d154"><code>debb516</code></a> Don't use global var</li> <li><a href="https://github.com/python/importlib_metadata/commit/3c8e1ec4e34c11dcff086be7fbd0d1981bf32480"><code>3c8e1ec</code></a> Finalize</li> <li><a href="https://github.com/python/importlib_metadata/commit/5035755aac64a6ee902add3909f463a2bf54ee1c"><code>5035755</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_metadata/issues/498">#498</a> from python/feature/entry-points-disallow-dist-match</li> <li><a href="https://github.com/python/importlib_metadata/commit/6d9b766099dbac1c97a220badde7e14304e03291"><code>6d9b766</code></a> Remove MetadataPathFinder regardless of its position.</li> <li>Additional commits viewable in <a href="https://github.com/python/importlib_metadata/compare/v6.8.0...v8.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=importlib-metadata&package-manager=pip&previous-version=6.8.0&new-version=8.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e0742c1f7bd..69efedc66e3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -95,7 +95,7 @@ idna==3.3 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==7.0.0 +importlib-metadata==8.4.0 # via # build # sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index f16c3db32ae..c2d8f4f2b02 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -93,7 +93,7 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==7.0.0 +importlib-metadata==8.4.0 # via # build # sphinx diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 916cee3c0cb..d30d3fbef13 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -26,7 +26,7 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==8.4.0 # via sphinx importlib-resources==6.1.1 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 0038be971e1..4069a152c5d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -26,7 +26,7 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==8.4.0 # via sphinx importlib-resources==6.1.1 # via towncrier From 3a10ee24f21f4bd3eea92143f519e39673d3e66d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:32:40 +0000 Subject: [PATCH 0386/1511] Bump iniconfig from 1.1.1 to 2.0.0 (#8821) Bumps [iniconfig](https://github.com/pytest-dev/iniconfig) from 1.1.1 to 2.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/iniconfig/blob/main/CHANGELOG">iniconfig's changelog</a>.</em></p> <blockquote> <h1>2.0.0</h1> <ul> <li>add support for Python 3.7-3.11</li> <li>drop support for Python 2.6-3.6</li> <li>add encoding argument defaulting to utf-8</li> <li>inline and clarify type annotations</li> <li>move parsing code from inline to extra file</li> <li>add typing overloads for helper methods</li> </ul> <p>.. note::</p> <p>major release due to the major changes in python versions supported + changes in packaging</p> <p>the api is expected to be compatible</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/iniconfig/commit/93f5930e668c0d1ddf4597e38dd0dea4e2665e7a"><code>93f5930</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/iniconfig/issues/51">#51</a> from pytest-dev/add-deploy</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/942655746cf0a0183f04ba7c425b4d4683e688ea"><code>9426557</code></a> rework deploy pipeline for hatch usage and modern checkout</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/5f617e30ac0c8512f13a0af44e0f94ddbddd4240"><code>5f617e3</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/iniconfig/issues/49">#49</a> from pytest-dev/rework-types</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/180065cec29f156b1baf9cf684ebbf3f251db073"><code>180065c</code></a> changelog</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/c7d1d88a398cb64884c30fb03c6581dd2bcd1ae7"><code>c7d1d88</code></a> add sectionwrapper get overload types</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/90df3d776833fcfba42c9641d73c22956f25bf37"><code>90df3d7</code></a> hatch-vcs</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/df78c51bfba888c9d145fbd3d66f45b74dac0271"><code>df78c51</code></a> pre-commit pyproject ftm</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/c113dd6d4c6403ae636e0d69df5b3170c2c32888"><code>c113dd6</code></a> migrate to hatch</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/0253ff11d9e2ca1967df615185d0f52c983ee642"><code>0253ff1</code></a> implement review comments</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/10583b86c3de36764f27e24f453eb9fe91c2ab3f"><code>10583b8</code></a> update changelog</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/iniconfig/compare/v1.1.1...v2.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=iniconfig&package-manager=pip&previous-version=1.1.1&new-version=2.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 69efedc66e3..a8bab3f0158 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -103,7 +103,7 @@ importlib-resources==6.1.1 # via towncrier incremental==24.7.2 # via towncrier -iniconfig==1.1.1 +iniconfig==2.0.0 # via pytest jinja2==3.1.4 # via From 37f1f6c155f76a4f485ef5f404ee55aea08dfd19 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:50:43 +0000 Subject: [PATCH 0387/1511] Bump setuptools from 68.0.0 to 73.0.1 (#8819) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 68.0.0 to 73.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v73.0.1</h1> <h2>Bugfixes</h2> <ul> <li>Remove <code>abc.ABCMeta</code> metaclass from abstract classes. <code>pypa/setuptools#4503 <https://github.com/pypa/setuptools/pull/4503></code>_ had an unintended consequence of causing potential <code>TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4579">#4579</a>)</li> </ul> <h1>v73.0.0</h1> <h2>Features</h2> <ul> <li>Mark abstract base classes and methods with <code>abc.ABC</code> and <code>abc.abstractmethod</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4503">#4503</a>)</li> <li>Changed the order of type checks in <code>setuptools.command.easy_install.CommandSpec.from_param</code> to support any <code>collections.abc.Iterable</code> of <code>str</code> param -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4505">#4505</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Prevent an error in <code>bdist_wheel</code> if <code>compression</code> is set to a <code>str</code> (even if valid) after finalizing options but before running the command. -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4383">#4383</a>)</li> <li>Raises an exception when <code>py_limited_api</code> is used in a build with <code>Py_GIL_DISABLED</code><code>python/cpython#111506</code><a href="https://redirect.github.com/pypa/setuptools/issues/4420">#4420</a>)</li> <li><code>pypa/distutils#284</code></li> </ul> <h2>Deprecations and Removals</h2> <ul> <li><code>setuptools</code> is replacing the usages of :pypi:<code>ordered_set</code> with simple instances of <code>dict[Hashable, None]</code>. This is done to remove the extra dependency and it is possible because since Python 3.7, <code>dict</code> maintain insertion order. (<a href="https://redirect.github.com/pypa/setuptools/issues/4574">#4574</a>)</li> </ul> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4534">#4534</a>, <a href="https://redirect.github.com/pypa/setuptools/issues/4546">#4546</a>, <a href="https://redirect.github.com/pypa/setuptools/issues/4554">#4554</a>, <a href="https://redirect.github.com/pypa/setuptools/issues/4559">#4559</a>, <a href="https://redirect.github.com/pypa/setuptools/issues/4565">#4565</a></li> </ul> <h1>v72.2.0</h1> <h2>Features</h2> <ul> <li><code>pypa/distutils#272</code><a href="https://redirect.github.com/pypa/distutils/issues/237">pypa/distutils#237</a><code>pypa/distuils#228</code><a href="https://redirect.github.com/pypa/setuptools/issues/4538">#4538</a>)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/ebddeb36f72c9d758b5cc0e9f81f8a66aa837d96"><code>ebddeb3</code></a> Bump version: 73.0.0 → 73.0.1</li> <li><a href="https://github.com/pypa/setuptools/commit/18963fb1851d24b89780cc10e213a2779be5f1eb"><code>18963fb</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4580">#4580</a> from Avasam/no-ABCMeta</li> <li><a href="https://github.com/pypa/setuptools/commit/b7ee00da2cfa8208c47812fb657392e8b88f620c"><code>b7ee00d</code></a> Remove ABCMeta metaclass, keep abstractmethods</li> <li><a href="https://github.com/pypa/setuptools/commit/477f713450ff57de126153f3034d032542916d03"><code>477f713</code></a> Override distribution attribute type in all distutils-based commands (<a href="https://redirect.github.com/pypa/setuptools/issues/4577">#4577</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/429ac589e5f290282f91b420350b002a2c519699"><code>429ac58</code></a> Override distribution attribute type in all distutils-based commands</li> <li><a href="https://github.com/pypa/setuptools/commit/4147b093d0aea4f57757c699a0b25bbc3aab2580"><code>4147b09</code></a> Bump version: 72.2.0 → 73.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/2ad8c10d8214340be812769359090c7950a39c35"><code>2ad8c10</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4576">#4576</a> from pypa/bugfix/distutils-284</li> <li><a href="https://github.com/pypa/setuptools/commit/8afe0c3e9c4c56f5d7343dc21f743e9cf83c594a"><code>8afe0c3</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4574">#4574</a> from abravalheri/ordered_set</li> <li><a href="https://github.com/pypa/setuptools/commit/ad611bcaedfefef3480ac111c4f22e2ca8cc7a1c"><code>ad611bc</code></a> Merge <a href="https://github.com/pypa/distutils">https://github.com/pypa/distutils</a> into bugfix/distutils-284</li> <li><a href="https://github.com/pypa/setuptools/commit/30b7331b07fbc404959cb37ac311afdfb90813be"><code>30b7331</code></a> Ensure a missing target is still indicated as 'sources are newer' even when t...</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v68.0.0...v73.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=68.0.0&new-version=73.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a8bab3f0158..cc8a478cabd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -285,7 +285,7 @@ zipp==3.20.0 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==68.0.0 +setuptools==73.0.1 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index c2d8f4f2b02..b6b7ae16e81 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -278,7 +278,7 @@ zipp==3.20.0 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==68.0.0 +setuptools==73.0.1 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index d30d3fbef13..f4ed5c5b611 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.0 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 +setuptools==73.0.1 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 4069a152c5d..c553f228a0d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.0 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 +setuptools==73.0.1 # via # blockdiag # incremental diff --git a/requirements/lint.txt b/requirements/lint.txt index 8d0587f43d3..5dd21b48e45 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -123,5 +123,5 @@ virtualenv==20.26.3 # via pre-commit # The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 +setuptools==73.0.1 # via nodeenv From 8e4b2aa6ee12bb4596b9f7b03c9399d040617784 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 10:56:09 +0000 Subject: [PATCH 0388/1511] Bump regex from 2021.11.10 to 2024.7.24 (#8826) Bumps [regex](https://github.com/mrabarnett/mrab-regex) from 2021.11.10 to 2024.7.24. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/mrabarnett/mrab-regex/blob/hg/changelog.txt">regex's changelog</a>.</em></p> <blockquote> <p>Version: 2024.7.24</p> <pre><code>Git issue 539: Bug: Partial matching fails on a simple example </code></pre> <p>Version: 2024.6.22</p> <pre><code>Git issue 535: Regex fails Unicode 15.1 GraphemeBreakTest due to missing new GB9c rule implementation </code></pre> <p>Version: 2024.5.15</p> <pre><code>Git issue 530: hangs with fuzzy and optionals <p>It's not hanging, it'll finish eventually. It's just an example of catastrophic backtracking.</p> <p>The error printed when Ctrl+C is pressed does show a bug, though, which is now fixed.<br /> </code></pre></p> <p>Version: 2024.5.10</p> <pre><code>Updated for Python 3.13. <p><time.h> now needs to be included explicitly because Python.h no longer includes it.<br /> </code></pre></p> <p>Version: 2024.4.28</p> <pre><code>Git issue 527: `VERBOSE`/`X` flag breaks `\N` escapes </code></pre> <p>Version: 2024.4.16</p> <pre><code>Git issue 525: segfault when fuzzy matching empty list </code></pre> <p>Version: 2023.12.25</p> <pre><code>Cannot get release notification action in main.yml to work. Commenting it out for now. </code></pre> <p>Version: 2023.12.24</p> <pre><code>Fixed invalid main.yml. </code></pre> <p>Version: 2023.12.23</p> <pre><code>The escape function no longer escapes \x00. It's not necessary. <p>Inline flags can now be turned off and apply to what follows.</p> <p>Added \R to match line endings.<br /> </code></pre></p> <p>Version: 2023.10.3</p> <pre><code>Updated to Unicode 15.1.0. </code></pre> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/e8a8d28aa32a945dfbed6ef41d29f30daf07e08d"><code>e8a8d28</code></a> Git issue 539: Bug: Partial matching fails on a simple example</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/6d086ffc5be71220d527495ad384834e4f54f340"><code>6d086ff</code></a> Git issue 535: Regex fails Unicode 15.1 GraphemeBreakTest due to missing new ...</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/8eabb4223e4b1d4c7b6a4496328e00eee5e352e5"><code>8eabb42</code></a> Git issue 530: hangs with fuzzy and optionals</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/be139ffc3a18a7ead6939ea19015dc41e3682ec4"><code>be139ff</code></a> Updated for Python 3.13.</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/2e3272be48e32367cb1110dba5329ace06312017"><code>2e3272b</code></a> Git issue 527: <code>VERBOSE</code>/<code>X</code> flag breaks <code>\N</code> escapes</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/9c950f2c25a5b1221a059761c5f90b9ee0f31763"><code>9c950f2</code></a> Updated changelog.</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/5d65c8a727eb3e81d5681ec0be84e02af0f9b20b"><code>5d65c8a</code></a> Git issue 525: segfault when fuzzy matching empty list</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/4f2ed52b3d73f39541026cf74f7c23106086b0b5"><code>4f2ed52</code></a> Cannot get release notification action in main.yml to work. Commenting it out...</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/647c006a88c553a694118e8cae109aa9365f188f"><code>647c006</code></a> Further fixes in main.yml.</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/d0afd79fbabd03a64ae2250e4b5701af8e997ebb"><code>d0afd79</code></a> Another fix in main.yml.</li> <li>Additional commits viewable in <a href="https://github.com/mrabarnett/mrab-regex/compare/2021.11.10...2024.7.24">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=regex&package-manager=pip&previous-version=2021.11.10&new-version=2024.7.24)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index cc8a478cabd..e7099ba8962 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,7 @@ pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2021.11.10 +regex==2024.7.24 # via re-assert requests==2.31.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index b6b7ae16e81..78f178012cc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -183,7 +183,7 @@ pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.7.24 # via re-assert requests==2.31.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index 344fed3e747..2840cbd4d4c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -92,7 +92,7 @@ python-on-whales==0.72.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.7.24 # via re-assert requests==2.31.0 # via python-on-whales From f6e3b71026ee633034a5299b1c6410a2e8053826 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 10:56:34 +0000 Subject: [PATCH 0389/1511] Bump nodeenv from 1.6.0 to 1.9.1 (#8828) Bumps [nodeenv](https://github.com/ekalinin/nodeenv) from 1.6.0 to 1.9.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/ekalinin/nodeenv/releases">nodeenv's releases</a>.</em></p> <blockquote> <h2>1.9.1: Fix version discovery</h2> <ul> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/358">#358</a> (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/356">#356</a>, <a href="https://redirect.github.com/ekalinin/nodeenv/issues/356">#356</a> )</li> </ul> <h2>1.9.0</h2> <ul> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/338">#338</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/347">#347</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/348">#348</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/345">#345</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/342">#342</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/346">#346</a></li> </ul> <h2>1.8.0: fix fish; add riscv64; multiple attempt to download node</h2> <p>Changes:</p> <ul> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/312">#312</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/313">#313</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/317">#317</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/320">#320</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/327">#327</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/329">#329</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/330">#330</a></li> </ul> <h2>1.7.0: drop py34, py35, py36; improved work on m1</h2> <ul> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/272">#272</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/289">#289</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/288">#288</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/294">#294</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/296">#296</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/295">#295</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/298">#298</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/299">#299</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/307">#307</a></li> <li><a href="https://redirect.github.com/ekalinin/nodeenv/issues/308">#308</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/ekalinin/nodeenv/commit/231431ed1a6239708fb715edb56730474a416e32"><code>231431e</code></a> 1.9.1</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/69e310af9b8de6d8398b69c8c9a3c902663c928c"><code>69e310a</code></a> Fix version discovery; <a href="https://redirect.github.com/ekalinin/nodeenv/issues/356">#356</a>, <a href="https://redirect.github.com/ekalinin/nodeenv/issues/357">#357</a> (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/358">#358</a>)</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/9d74cd8f083ceeb546a33052683f8df083d54b1e"><code>9d74cd8</code></a> update AUTHORS</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/2aa4a494b89981269f368e4c29c168d98cd6bd94"><code>2aa4a49</code></a> 1.9.0</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/5aaed3c89f7f5e3b4d88cfd7183fc168d5cd0b66"><code>5aaed3c</code></a> Makefile: fixed tests target (regression tests)</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/1024f4f64ceabd612b4df9a0b9dbe2691b2f5f9d"><code>1024f4f</code></a> Remove usage of non-portable <code>which</code> (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/346">#346</a>)</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/066a02c69f202b84f6aa110bdb3837df34efa8c7"><code>066a02c</code></a> Fix tests after <a href="https://redirect.github.com/ekalinin/nodeenv/issues/342">#342</a> (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/354">#354</a>)</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/c1dffc5c64377cfcda9f2befd357e4791903bf39"><code>c1dffc5</code></a> On Python 3.3+, replace pipes.quote with shlex.quote (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/342">#342</a>)</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/de428ee8e9c0189e8ba9b3dc5736552babbca28c"><code>de428ee</code></a> Support shells with "set -u" (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/345">#345</a>)</li> <li><a href="https://github.com/ekalinin/nodeenv/commit/dc114e19815bb74b3a1b45038c6197c08160d86a"><code>dc114e1</code></a> Drop <code>packaging</code> dependency in favor of a simple version-parsing function (<a href="https://redirect.github.com/ekalinin/nodeenv/issues/352">#352</a>)</li> <li>Additional commits viewable in <a href="https://github.com/ekalinin/nodeenv/compare/1.6.0...1.9.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nodeenv&package-manager=pip&previous-version=1.6.0&new-version=1.9.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 3 +-- requirements/lint.txt | 6 +----- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e7099ba8962..ecf1e6a8150 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -122,7 +122,7 @@ mypy==1.11.1 ; implementation_name == "cpython" # -r requirements/test.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.6.0 +nodeenv==1.9.1 # via pre-commit packaging==24.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 78f178012cc..80809361618 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -119,7 +119,7 @@ mypy==1.11.1 ; implementation_name == "cpython" # -r requirements/test.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit packaging==24.1 # via @@ -282,5 +282,4 @@ setuptools==73.0.1 # via # blockdiag # incremental - # nodeenv # pip-tools diff --git a/requirements/lint.txt b/requirements/lint.txt index 5dd21b48e45..589e1801dd5 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -52,7 +52,7 @@ mypy==1.11.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit packaging==24.1 # via pytest @@ -121,7 +121,3 @@ uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in virtualenv==20.26.3 # via pre-commit - -# The following packages are considered to be unsafe in a requirements file: -setuptools==73.0.1 - # via nodeenv From f087b793eb539719eb1258e3b4d9902b9603dd5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:36:43 +0000 Subject: [PATCH 0390/1511] Bump markupsafe from 2.0.1 to 2.1.5 (#8830) Bumps [markupsafe](https://github.com/pallets/markupsafe) from 2.0.1 to 2.1.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/markupsafe/releases">markupsafe's releases</a>.</em></p> <blockquote> <h2>2.1.5</h2> <p>This is a fix release for the 2.1.x feature release branch. It fixes bugs but does not otherwise change behavior and should not result in breaking changes.</p> <p>Fixes a regression in <code>striptags</code> behavior from 2.14. Spaces are now collapsed correctly.</p> <ul> <li>Changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-5">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-5</a></li> <li>Milestone: <a href="https://github.com/pallets/markupsafe/milestone/12?closed=1">https://github.com/pallets/markupsafe/milestone/12?closed=1</a></li> <li>PyPI: <a href="https://pypi.org/project/MarkupSafe/2.1.5/">https://pypi.org/project/MarkupSafe/2.1.5/</a></li> </ul> <h2>2.1.4</h2> <p>This is a fix release for the 2.1.x feature release branch. It fixes bugs but does not otherwise change behavior and should not result in breaking changes.</p> <ul> <li> <p>Improves performance of the <code>Markup.striptags</code> method for large input.</p> </li> <li> <p>Changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-4">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-4</a></p> </li> <li> <p>Milestone: <a href="https://github.com/pallets/markupsafe/milestone/11?closed=1">https://github.com/pallets/markupsafe/milestone/11?closed=1</a></p> </li> <li> <p>PyPI: <a href="https://pypi.org/project/MarkupSafe/2.1.4/">https://pypi.org/project/MarkupSafe/2.1.4/</a></p> </li> </ul> <h2>2.1.3</h2> <p>This is a fix release for the 2.1.x feature branch.</p> <ul> <li>Changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-3">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-3</a></li> <li>Milestone: <a href="https://github.com/pallets/markupsafe/milestone/9?closed=1">https://github.com/pallets/markupsafe/milestone/9?closed=1</a></li> </ul> <h2>2.1.2</h2> <p>This is the first release to provide wheels for Python 3.11. An SLSA provenance file is also generated, and is available to download from the GitHub release page.</p> <ul> <li>Changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-2">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-2</a></li> <li>Milestone: <a href="https://github.com/pallets/markupsafe/milestone/8?closed=1">https://github.com/pallets/markupsafe/milestone/8?closed=1</a></li> </ul> <h2>2.1.1</h2> <ul> <li>Changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-1">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-1</a></li> <li>Milestone: <a href="https://github.com/pallets/markupsafe/milestone/7?closed=1">https://github.com/pallets/markupsafe/milestone/7?closed=1</a></li> </ul> <h2>2.1.0</h2> <ul> <li>Changes: <a href="https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-0">https://markupsafe.palletsprojects.com/en/2.1.x/changes/#version-2-1-0</a></li> <li>Milestone: <a href="https://github.com/pallets/markupsafe/milestone/5">https://github.com/pallets/markupsafe/milestone/5</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/markupsafe/blob/main/CHANGES.rst">markupsafe's changelog</a>.</em></p> <blockquote> <h2>Version 2.1.5</h2> <p>Released 2024-02-02</p> <ul> <li>Fix <code>striptags</code> not collapsing spaces. :issue:<code>417</code></li> </ul> <h2>Version 2.1.4</h2> <p>Released 2024-01-19</p> <ul> <li>Don't use regular expressions for <code>striptags</code>, avoiding a performance issue. :pr:<code>413</code></li> </ul> <h2>Version 2.1.3</h2> <p>Released 2023-06-02</p> <ul> <li>Implement <code>format_map</code>, <code>casefold</code>, <code>removeprefix</code>, and <code>removesuffix</code> methods. :issue:<code>370</code></li> <li>Fix static typing for basic <code>str</code> methods on <code>Markup</code>. :issue:<code>358</code></li> <li>Use <code>Self</code> for annotating return types. :pr:<code>379</code></li> </ul> <h2>Version 2.1.2</h2> <p>Released 2023-01-17</p> <ul> <li>Fix <code>striptags</code> not stripping tags containing newlines. :issue:<code>310</code></li> </ul> <h2>Version 2.1.1</h2> <p>Released 2022-03-14</p> <ul> <li>Avoid ambiguous regex matches in <code>striptags</code>. :pr:<code>293</code></li> </ul> <h2>Version 2.1.0</h2> <p>Released 2022-02-17</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/markupsafe/commit/fbba4acd0312826cec9cfe18371c7df07962cb65"><code>fbba4ac</code></a> release version 2.1.5</li> <li><a href="https://github.com/pallets/markupsafe/commit/c5fa23ba96336160204ed1376d60693b0d65e18d"><code>c5fa23b</code></a> update publish actions</li> <li><a href="https://github.com/pallets/markupsafe/commit/60a6512315d0ce05e6788808f80be526f2084b3f"><code>60a6512</code></a> striptags collapses spaces correctly (<a href="https://redirect.github.com/pallets/markupsafe/issues/418">#418</a>)</li> <li><a href="https://github.com/pallets/markupsafe/commit/0b6bee071fbd8d3171fb1ac4fb669baace808438"><code>0b6bee0</code></a> collapse spaces after stripping tags</li> <li><a href="https://github.com/pallets/markupsafe/commit/73e6a4886564a554c4a19983d29c97f9fc95457d"><code>73e6a48</code></a> start version 2.1.5</li> <li><a href="https://github.com/pallets/markupsafe/commit/d704bf45a1f77926a669261b394afef38eda2a70"><code>d704bf4</code></a> use pip-compile, dependabot updates (<a href="https://redirect.github.com/pallets/markupsafe/issues/419">#419</a>)</li> <li><a href="https://github.com/pallets/markupsafe/commit/1f82932e5c5a6e54181308afeb8443df21858ea0"><code>1f82932</code></a> use pip-compile, dependabot updates</li> <li><a href="https://github.com/pallets/markupsafe/commit/25a640f38297bfdc2ec2c82fe68df4c7613d083a"><code>25a640f</code></a> release version 2.1.4 (<a href="https://redirect.github.com/pallets/markupsafe/issues/414">#414</a>)</li> <li><a href="https://github.com/pallets/markupsafe/commit/b7cd6523579ea5a08d89799f2a64ec2c2bc45eca"><code>b7cd652</code></a> release version 2.1.4</li> <li><a href="https://github.com/pallets/markupsafe/commit/3bead8eedcfb434097dc61a18dd4721201df262a"><code>3bead8e</code></a> update cibuildwheel for 3.12 wheels</li> <li>Additional commits viewable in <a href="https://github.com/pallets/markupsafe/compare/2.0.1...2.1.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=markupsafe&package-manager=pip&previous-version=2.0.1&new-version=2.1.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ecf1e6a8150..c823f7a9001 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -109,7 +109,7 @@ jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.0.1 +markupsafe==2.1.5 # via jinja2 multidict==6.0.5 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 80809361618..d9187dafba6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -107,7 +107,7 @@ jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 multidict==6.0.5 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index f4ed5c5b611..c2c69a2d14a 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -36,7 +36,7 @@ jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 packaging==24.1 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index c553f228a0d..e3692c95f58 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -36,7 +36,7 @@ jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 packaging==24.1 # via sphinx From 41cb0bf31196de2181c79cd689b907305d8e17af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:39:34 +0000 Subject: [PATCH 0391/1511] Bump babel from 2.9.1 to 2.16.0 (#8836) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [babel](https://github.com/python-babel/babel) from 2.9.1 to 2.16.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python-babel/babel/releases">babel's releases</a>.</em></p> <blockquote> <h2>Version 2.16.0</h2> <p>The changelog below is auto-generated by GitHub.</p> <p>Please see <a href="https://github.com/python-babel/babel/blob/cf7d22369cf40a8218cff1d3dc823eefa174aee0/CHANGES.rst">CHANGELOG.rst</a> for additional details.</p> <h2>What's Changed</h2> <ul> <li>Upgrade GitHub Actions versions to avoid deprecation warnings by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1080">python-babel/babel#1080</a></li> <li>Replace deprecated <code>ast.Str</code> with <code>ast.Constant</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1083">python-babel/babel#1083</a></li> <li>Fix <a href="https://redirect.github.com/python-babel/babel/issues/1094">#1094</a> by <a href="https://github.com/john-psina"><code>@​john-psina</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1095">python-babel/babel#1095</a></li> <li>CI fixes by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1097">python-babel/babel#1097</a></li> <li>Upgrade to CLDR 45 by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1077">python-babel/babel#1077</a></li> <li>Make pgettext search plurals when translation is not found by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1085">python-babel/babel#1085</a></li> <li>Fix for <a href="https://redirect.github.com/python-babel/babel/issues/832">#832</a> by <a href="https://github.com/Edwin18"><code>@​Edwin18</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1052">python-babel/babel#1052</a></li> <li>Support list format fallbacks by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1099">python-babel/babel#1099</a></li> <li>Improve Codecov configuration by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1103">python-babel/babel#1103</a></li> <li>Avoid crashing on importing localtime when TZ is malformed by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1100">python-babel/babel#1100</a></li> <li>Allow parsing .po files that have an extant but empty Language header by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1101">python-babel/babel#1101</a></li> <li>Add a mention to the docs that <code>format_skeleton(..., fuzzy=True)</code> may raise by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1106">python-babel/babel#1106</a></li> <li>Allow falling back to modifier-less locale data by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1104">python-babel/babel#1104</a></li> <li>Allow use of importlib.metadata for finding entrypoints by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1102">python-babel/babel#1102</a></li> <li>Test on Python 3.13 beta releases by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1107">python-babel/babel#1107</a></li> <li>Normalize package name to lower-case in setup.py by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1110">python-babel/babel#1110</a></li> <li>Do not allow substituting alternates or drafts in derived locales by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1113">python-babel/babel#1113</a></li> <li>Two hyperlinks (to CLDR) and some typos by <a href="https://github.com/buhtz"><code>@​buhtz</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1115">python-babel/babel#1115</a></li> <li>Initial support for reading mapping configuration as TOML by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1108">python-babel/babel#1108</a></li> <li>Prepare for 2.16.0 release by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1116">python-babel/babel#1116</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> made their first contribution in <a href="https://redirect.github.com/python-babel/babel/pull/1083">python-babel/babel#1083</a></li> <li><a href="https://github.com/john-psina"><code>@​john-psina</code></a> made their first contribution in <a href="https://redirect.github.com/python-babel/babel/pull/1095">python-babel/babel#1095</a></li> <li><a href="https://github.com/Edwin18"><code>@​Edwin18</code></a> made their first contribution in <a href="https://redirect.github.com/python-babel/babel/pull/1052">python-babel/babel#1052</a></li> <li><a href="https://github.com/buhtz"><code>@​buhtz</code></a> made their first contribution in <a href="https://redirect.github.com/python-babel/babel/pull/1115">python-babel/babel#1115</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/python-babel/babel/compare/v2.15.0...v2.16.0">https://github.com/python-babel/babel/compare/v2.15.0...v2.16.0</a></p> <h2>v2.15.0</h2> <p>The changelog below is auto-generated by GitHub.</p> <p>The binary artifacts attached to this GitHub release <a href="https://github.com/python-babel/babel/actions/runs/8958743141#artifacts">were generated by the GitHub Actions workflow</a>.</p> <p>Please see <a href="https://github.com/python-babel/babel/blob/40b194f4777366e95cc2dfb680fd696b86ef1c04/CHANGES.rst">CHANGELOG.rst</a> for additional details.</p> <hr /> <h2>What's Changed</h2> <ul> <li>Drop support for Python 3.7 (EOL since June 2023) by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1048">python-babel/babel#1048</a></li> <li>Upgrade GitHub Actions by <a href="https://github.com/cclauss"><code>@​cclauss</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1054">python-babel/babel#1054</a></li> <li>Improve .po IO by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1068">python-babel/babel#1068</a></li> <li>Use CLDR 44 by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1071">python-babel/babel#1071</a></li> <li>Allow alternative space characters as group separator when parsing numbers by <a href="https://github.com/ronnix"><code>@​ronnix</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1007">python-babel/babel#1007</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python-babel/babel/blob/master/CHANGES.rst">babel's changelog</a>.</em></p> <blockquote> <h2>Version 2.16.0</h2> <p>Features</p> <pre><code> * CLDR: Upgrade to CLDR 45 by @tomasr8 in :gh:`1077` * Lists: Support list format fallbacks by @akx in :gh:`1099` * Messages: Initial support for reading mapping configuration as TOML by @akx in :gh:`1108` <p>Bugfixes<br /> </code></pre></p> <ul> <li>CLDR: Do not allow substituting alternates or drafts in derived locales by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1113</code></li> <li>Core: Allow falling back to modifier-less locale data by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1104</code></li> <li>Core: Allow use of importlib.metadata for finding entrypoints by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1102</code></li> <li>Dates: Avoid crashing on importing localtime when TZ is malformed by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1100</code></li> <li>Messages: Allow parsing .po files that have an extant but empty Language header by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1101</code></li> <li>Messages: Fix <code>--ignore-dirs</code> being incorrectly read (<a href="https://redirect.github.com/python-babel/babel/issues/1094">#1094</a>) by <a href="https://github.com/john-psina"><code>@​john-psina</code></a> and <a href="https://github.com/Edwin18"><code>@​Edwin18</code></a> in :gh:<code>1052</code> and :gh:<code>1095</code></li> <li>Messages: Make pgettext search plurals when translation is not found by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in :gh:<code>1085</code></li> </ul> <p>Infrastructure</p> <pre><code> * Replace deprecated `ast.Str` with `ast.Constant` by @tomasr8 in :gh:`1083` * CI fixes by @akx in :gh:`1080`, :gh:`1097`, :gh:`1103`, :gh:`1107` * Test on Python 3.13 beta releases by @akx in * Normalize package name to lower-case in setup.py by @akx in :gh:`1110` <p>Documentation</p> <pre><code> * Add a mention to the docs that `format_skeleton(..., fuzzy=True)` may raise by @tomasr8 in :gh:`1106` * Two hyperlinks (to CLDR) and some typos by @buhtz in :gh:`1115` Version 2.15.0 -------------- Python version support </code></pre> <ul> <li>Babel 2.15.0 will require Python 3.8 or newer. (:gh:<code>1048</code>)</li> </ul> <p>Features</p> <pre><code> * CLDR: Upgrade to CLDR 44 (:gh:`1071`) (@akx) * Dates: Support for the &quot;fall back to short format&quot; logic for time delta formatting (:gh:`1075`) (@akx) * Message: More versatile .po IO functions (:gh:`1068`) (@akx) &lt;/tr&gt;&lt;/table&gt; </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python-babel/babel/commit/cf7d22369cf40a8218cff1d3dc823eefa174aee0"><code>cf7d223</code></a> Prepare for 2.16.0 release (<a href="https://redirect.github.com/python-babel/babel/issues/1116">#1116</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/d26a669826d6c963dfdc21ae09e8cd5659fc95e2"><code>d26a669</code></a> Initial support for reading mapping configuration as TOML (<a href="https://redirect.github.com/python-babel/babel/issues/1108">#1108</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/34ed517de44cf3c3002f3b69713b1693d329646d"><code>34ed517</code></a> Two hyperlinks (to CLDR) and some typos (<a href="https://redirect.github.com/python-babel/babel/issues/1115">#1115</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/b4ba84382f3ce7bdf0e5a68e7108a21f4e8e7926"><code>b4ba843</code></a> Do not allow substituting alternates or drafts in derived locales (<a href="https://redirect.github.com/python-babel/babel/issues/1113">#1113</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/d3346ee33b13f50af582e31ae5c337aa409dda11"><code>d3346ee</code></a> Normalize package name to lower-case in setup.py (<a href="https://redirect.github.com/python-babel/babel/issues/1110">#1110</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/a32f15ecec23d5d5049100fd8e65606be7ad12a1"><code>a32f15e</code></a> Test on Python 3.13 beta releases (<a href="https://redirect.github.com/python-babel/babel/issues/1107">#1107</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/4d3fd0e0198185cd023f4d3cd1495bda211867f4"><code>4d3fd0e</code></a> Allow use of importlib.metadata for finding entrypoints (<a href="https://redirect.github.com/python-babel/babel/issues/1102">#1102</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/42d793cb4408a296e5618e1bce8d176fc588ce57"><code>42d793c</code></a> Allow falling back to modifier-less locale data when modified data is missing...</li> <li><a href="https://github.com/python-babel/babel/commit/32f41c22d589d7f187448be477affe1cfbcbc59d"><code>32f41c2</code></a> Improve docs for <code>format_skeleton</code> (<a href="https://redirect.github.com/python-babel/babel/issues/1106">#1106</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/2ebc47e63211df8b7636ed8e74bb99ac5bd36c25"><code>2ebc47e</code></a> Allow parsing .po files that have an extant but empty Language header (<a href="https://redirect.github.com/python-babel/babel/issues/1101">#1101</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python-babel/babel/compare/v2.9.1...v2.16.0">compare view</a></li> </ul> </details> <br /> </code></pre> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=babel&package-manager=pip&previous-version=2.9.1&new-version=2.16.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c823f7a9001..0ab791aea87 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,7 +26,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # aioredis attrs==24.2.0 # via -r requirements/runtime-deps.in -babel==2.9.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag diff --git a/requirements/dev.txt b/requirements/dev.txt index d9187dafba6..988a011bbde 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # aioredis attrs==24.2.0 # via -r requirements/runtime-deps.in -babel==2.12.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index c2c69a2d14a..5b080130969 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -8,7 +8,7 @@ aiohttp-theme==0.1.6 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx -babel==2.12.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag diff --git a/requirements/doc.txt b/requirements/doc.txt index e3692c95f58..44cc009b5c4 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -8,7 +8,7 @@ aiohttp-theme==0.1.6 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx -babel==2.12.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag From 9af8919a57c0c83a9f5f71b2f88fa34f8da0a327 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:41:28 +0000 Subject: [PATCH 0392/1511] Bump cryptography from 41.0.2 to 43.0.0 (#8837) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cryptography](https://github.com/pyca/cryptography) from 41.0.2 to 43.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>43.0.0 - 2024-07-20</p> <pre><code> * **BACKWARDS INCOMPATIBLE:** Support for OpenSSL less than 1.1.1e has been removed. Users on older version of OpenSSL will need to upgrade. * **BACKWARDS INCOMPATIBLE:** Dropped support for LibreSSL < 3.8. * Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.3.1. * Updated the minimum supported Rust version (MSRV) to 1.65.0, from 1.63.0. * :func:`~cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key` now enforces a minimum RSA key size of 1024-bit. Note that 1024-bit is still considered insecure, users should generally use a key size of 2048-bits. * :func:`~cryptography.hazmat.primitives.serialization.pkcs7.serialize_certificates` now emits ASN.1 that more closely follows the recommendations in :rfc:`2315`. * Added new :doc:`/hazmat/decrepit/index` module which contains outdated and insecure cryptographic primitives. :class:`~cryptography.hazmat.primitives.ciphers.algorithms.CAST5`, :class:`~cryptography.hazmat.primitives.ciphers.algorithms.SEED`, :class:`~cryptography.hazmat.primitives.ciphers.algorithms.IDEA`, and :class:`~cryptography.hazmat.primitives.ciphers.algorithms.Blowfish`, which were deprecated in 37.0.0, have been added to this module. They will be removed from the ``cipher`` module in 45.0.0. * Moved :class:`~cryptography.hazmat.primitives.ciphers.algorithms.TripleDES` and :class:`~cryptography.hazmat.primitives.ciphers.algorithms.ARC4` into :doc:`/hazmat/decrepit/index` and deprecated them in the ``cipher`` module. They will be removed from the ``cipher`` module in 48.0.0. * Added support for deterministic :class:`~cryptography.hazmat.primitives.asymmetric.ec.ECDSA` (:rfc:`6979`) * Added support for client certificate verification to the :mod:`X.509 path validation <cryptography.x509.verification>` APIs in the form of :class:`~cryptography.x509.verification.ClientVerifier`, :class:`~cryptography.x509.verification.VerifiedClient`, and ``PolicyBuilder`` :meth:`~cryptography.x509.verification.PolicyBuilder.build_client_verifier`. * Added Certificate :attr:`~cryptography.x509.Certificate.public_key_algorithm_oid` and Certificate Signing Request :attr:`~cryptography.x509.CertificateSigningRequest.public_key_algorithm_oid` to determine the :class:`~cryptography.hazmat._oid.PublicKeyAlgorithmOID` Object Identifier of the public key found inside the certificate. * Added :attr:`~cryptography.x509.InvalidityDate.invalidity_date_utc`, a timezone-aware alternative to the naïve ``datetime`` attribute :attr:`~cryptography.x509.InvalidityDate.invalidity_date`. * Added support for parsing empty DN string in :meth:`~cryptography.x509.Name.from_rfc4514_string`. * Added the following properties that return timezone-aware ``datetime`` objects: :meth:`~cryptography.x509.ocsp.OCSPResponse.produced_at_utc`, :meth:`~cryptography.x509.ocsp.OCSPResponse.revocation_time_utc`, :meth:`~cryptography.x509.ocsp.OCSPResponse.this_update_utc`, :meth:`~cryptography.x509.ocsp.OCSPResponse.next_update_utc`, :meth:`~cryptography.x509.ocsp.OCSPSingleResponse.revocation_time_utc`, </tr></table> </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/ebf14f2edc8536f36797979cb0e075e766d978c5"><code>ebf14f2</code></a> bump for 43.0.0 and update changelog (<a href="https://redirect.github.com/pyca/cryptography/issues/11311">#11311</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/42788a0353e0ca0d922b6b8b9bde77cbb1c65984"><code>42788a0</code></a> Fix exchange with keys that had Q automatically computed (<a href="https://redirect.github.com/pyca/cryptography/issues/11309">#11309</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/2dbdfb8f3913cb9cef08218fcd48a9b4eaa8b57d"><code>2dbdfb8</code></a> don't assign unused name (<a href="https://redirect.github.com/pyca/cryptography/issues/11310">#11310</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/ccc66e6cdf92f4c29012f86f44ad183161eccaad"><code>ccc66e6</code></a> Bump openssl from 0.10.64 to 0.10.65 in /src/rust (<a href="https://redirect.github.com/pyca/cryptography/issues/11308">#11308</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/4310c8727b50fa5f713a0e863ee3defc0c831921"><code>4310c87</code></a> Bump sphinxcontrib-qthelp from 1.0.7 to 1.0.8 (<a href="https://redirect.github.com/pyca/cryptography/issues/11307">#11307</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/f66a9c4b4fe9b87825872fef7a36c319b823f322"><code>f66a9c4</code></a> Bump sphinxcontrib-htmlhelp from 2.0.5 to 2.0.6 (<a href="https://redirect.github.com/pyca/cryptography/issues/11306">#11306</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/a8fcf18ee0bb0570bd4c9041cf387dc7a9c1968a"><code>a8fcf18</code></a> Bump openssl-sys from 0.9.102 to 0.9.103 in /src/rust (<a href="https://redirect.github.com/pyca/cryptography/issues/11305">#11305</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/2fe32b28b05d8918dda6f7a34e6d9d4148dde818"><code>2fe32b2</code></a> Bump mypy from 1.10.1 to 1.11.0 (<a href="https://redirect.github.com/pyca/cryptography/issues/11303">#11303</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/ee24e827fc226ad8dc9edacf3dbe1823602d0a8b"><code>ee24e82</code></a> Bump setuptools from 71.0.3 to 71.0.4 in /.github/requirements (<a href="https://redirect.github.com/pyca/cryptography/issues/11304">#11304</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/7249ccd5c658e2965909d970cc9735ae7f049d15"><code>7249ccd</code></a> Bump portable-atomic from 1.6.0 to 1.7.0 in /src/rust (<a href="https://redirect.github.com/pyca/cryptography/issues/11302">#11302</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pyca/cryptography/compare/41.0.2...43.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=41.0.2&new-version=43.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0ab791aea87..e454042f7e0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -58,7 +58,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==41.0.2 +cryptography==43.0.0 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 988a011bbde..078be0edf5e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -58,7 +58,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==41.0.3 +cryptography==43.0.0 # via # pyjwt # trustme diff --git a/requirements/test.txt b/requirements/test.txt index 2840cbd4d4c..7c990b70404 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -34,7 +34,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==41.0.2 +cryptography==43.0.0 # via trustme exceptiongroup==1.1.2 # via pytest From 57b958eb7acc603d8b0f742d23f6f7b7a49ef60e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 11:47:55 +0000 Subject: [PATCH 0393/1511] Bump distlib from 0.3.7 to 0.3.8 (#8838) Bumps [distlib](https://github.com/pypa/distlib) from 0.3.7 to 0.3.8. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/distlib/blob/master/CHANGES.rst">distlib's changelog</a>.</em></p> <blockquote> <p>0.3.8</p> <pre><code> Released: 2023-12-12 <ul> <li> <p>markers</p> <ul> <li>Fix <a href="https://redirect.github.com/pypa/distlib/issues/209">#209</a>: use legacy version implementation for Python versions.</li> </ul> </li> <li> <p>tests</p> <ul> <li> <p>Fix <a href="https://redirect.github.com/pypa/distlib/issues/204">#204</a>: use symlinks in venv creation during test.</p> </li> <li> <p>Fix <a href="https://redirect.github.com/pypa/distlib/issues/208">#208</a>: handle deprecation removals in Python 3.13. </code></pre></p> </li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/distlib/commit/ab5f8e797fbc56a0e3488bba68d05e7a602cb63f"><code>ab5f8e7</code></a> Changes for 0.3.8.</li> <li><a href="https://github.com/pypa/distlib/commit/86bb2124476e1700a55fcc08135435261e61164f"><code>86bb212</code></a> Update change log.</li> <li><a href="https://github.com/pypa/distlib/commit/488599f4fdb71d572e99df93254ac0dc23d0fd41"><code>488599f</code></a> Updates based on flake8 checks.</li> <li><a href="https://github.com/pypa/distlib/commit/0e261af798199a652633e1aa38523d3d3a828f61"><code>0e261af</code></a> Use legacy version implementation for Python itself.</li> <li><a href="https://github.com/pypa/distlib/commit/8242f393f2bf525adc8fd4fffcc9dadce97f0f85"><code>8242f39</code></a> Update copyright years.</li> <li><a href="https://github.com/pypa/distlib/commit/e27569b02d9718c19ece71ce6d5124b967277399"><code>e27569b</code></a> Fix <a href="https://redirect.github.com/pypa/distlib/issues/208">#208</a>: Handle deprecation removals in Python 3.13.</li> <li><a href="https://github.com/pypa/distlib/commit/65a014b0b199d23bb6745450973a6f8162d05c1c"><code>65a014b</code></a> Update requirements and CI matrix.</li> <li><a href="https://github.com/pypa/distlib/commit/124108a2d94156b494d8900a7c0cc9c2ed8bc03a"><code>124108a</code></a> Skip test for non-final Python versions.</li> <li><a href="https://github.com/pypa/distlib/commit/ff48e09e9b5f8588145e22f5b7c51b361eb1fcfe"><code>ff48e09</code></a> Fix <a href="https://redirect.github.com/pypa/distlib/issues/206">#206</a>: include tox.ini in sdist.</li> <li><a href="https://github.com/pypa/distlib/commit/eeaa18ddeae5c290308d4402ee1891cb9849bc60"><code>eeaa18d</code></a> Fix <a href="https://redirect.github.com/pypa/distlib/issues/204">#204</a>: Use symlinks in venv creation during test.</li> <li>Additional commits viewable in <a href="https://github.com/pypa/distlib/compare/0.3.7...0.3.8">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=distlib&package-manager=pip&previous-version=0.3.7&new-version=0.3.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 078be0edf5e..6c6e238997b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -62,7 +62,7 @@ cryptography==43.0.0 # via # pyjwt # trustme -distlib==0.3.7 +distlib==0.3.8 # via virtualenv docutils==0.20.1 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 589e1801dd5..ccc9d3cf43d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -28,7 +28,7 @@ click==8.1.6 # typer cryptography==43.0.0 # via trustme -distlib==0.3.7 +distlib==0.3.8 # via virtualenv exceptiongroup==1.1.2 # via pytest From f0c89d6f9b0f4183699adb99f69f129e4c5d4d9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 12:08:24 +0000 Subject: [PATCH 0394/1511] Bump gidgethub from 5.0.1 to 5.3.0 (#8841) Bumps [gidgethub](https://github.com/brettcannon/gidgethub) from 5.0.1 to 5.3.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/brettcannon/gidgethub/releases">gidgethub's releases</a>.</em></p> <blockquote> <h2>5.3.0</h2> <ul> <li> <p>Add support passing <code>extra_headers</code> when making requests [PR <a href="https://redirect.github.com/brettcannon/gidgethub/issues/192">#192</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/pull/192">brettcannon/gidgethub#192</a>)</p> </li> <li> <p>Add a getstatus() method for APIs that do not return content. [PR <a href="https://redirect.github.com/brettcannon/gidgethub/issues/194">#194</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/pull/194">brettcannon/gidgethub#194</a>)</p> </li> </ul> <h2>5.2.1</h2> <ul> <li> <p>Fix cgi and importlib_resources deprecations. [PR <a href="https://redirect.github.com/brettcannon/gidgethub/issues/185">#185</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/pull/185">brettcannon/gidgethub#185</a>)</p> </li> <li> <p>Add support for Python 3.11 and drop EOL Python 3.6 [PR <a href="https://redirect.github.com/brettcannon/gidgethub/issues/184">#184</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/pull/184">brettcannon/gidgethub#184</a>)</p> </li> </ul> <h2>5.2.0</h2> <ul> <li>Make the minimum version of PyJWT be v2.4.0.</li> </ul> <h2>5.1.0</h2> <ul> <li> <p>Use <code>X-Hub-Signature-256</code> header for webhook validation when available. ([PR <a href="https://redirect.github.com/brettcannon/gidgethub/issues/160">#160</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/pull/160">brettcannon/gidgethub#160</a>)).</p> </li> <li> <p>The documentation is now built using Sphinx v>= 4.0.0. ([Issue <a href="https://redirect.github.com/brettcannon/gidgethub/issues/143">#143</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/issues/143">brettcannon/gidgethub#143</a>))</p> </li> <li> <p><code>gidgethub.abc.GitHubAPI.getiter</code> now accepts <code>iterable_key</code> parameter in order to support the Checks API. ([Issue <a href="https://redirect.github.com/brettcannon/gidgethub/issues/164">#164</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/issues/164">brettcannon/gidgethub#164</a>))</p> </li> <li> <p>Accept HTTP 202 ACCEPTED as successful. ([PR <a href="https://redirect.github.com/brettcannon/gidgethub/issues/174">#174</a>](<a href="https://redirect.github.com/brettcannon/gidgethub/pull/174">brettcannon/gidgethub#174</a>))</p> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/gidgethub/gidgethub/blob/main/docs/changelog.rst">gidgethub's changelog</a>.</em></p> <blockquote> <h2>5.3.0</h2> <ul> <li> <p>Add support passing <code>extra_headers</code> when making requests (<code>PR [#192](https://github.com/brettcannon/gidgethub/issues/192) <https://github.com/brettcannon/gidgethub/pull/192></code>_)</p> </li> <li> <p>Add a getstatus() method for APIs that do not return content. (<code>PR [#194](https://github.com/brettcannon/gidgethub/issues/194) <https://github.com/brettcannon/gidgethub/pull/194></code>_)</p> </li> </ul> <h2>5.2.1</h2> <ul> <li> <p>Fix cgi and importlib_resources deprecations. (<code>PR [#185](https://github.com/brettcannon/gidgethub/issues/185) <https://github.com/brettcannon/gidgethub/pull/185></code>_)</p> </li> <li> <p>Add support for Python 3.11 and drop EOL Python 3.6 (<code>PR [#184](https://github.com/brettcannon/gidgethub/issues/184) <https://github.com/brettcannon/gidgethub/pull/184></code>_)</p> </li> </ul> <h2>5.2.0</h2> <ul> <li>Make the minimum version of PyJWT be v2.4.0.</li> </ul> <h2>5.1.0</h2> <ul> <li> <p>Use <code>X-Hub-Signature-256</code> header for webhook validation when available. (<code>PR [#160](https://github.com/brettcannon/gidgethub/issues/160) <https://github.com/brettcannon/gidgethub/pull/160></code>_).</p> </li> <li> <p>The documentation is now built using Sphinx v>= 4.0.0. (<code>Issue [#143](https://github.com/brettcannon/gidgethub/issues/143) <https://github.com/brettcannon/gidgethub/issues/143></code>_)</p> </li> <li> <p>:meth:<code>gidgethub.abc.GitHubAPI.getiter</code> now accepts <code>iterable_key</code> parameter in order to support the Checks API. (<code>Issue [#164](https://github.com/brettcannon/gidgethub/issues/164) <https://github.com/brettcannon/gidgethub/issues/164></code>_)</p> </li> <li> <p>Accept HTTP 202 ACCEPTED as successful. (<code>PR [#174](https://github.com/brettcannon/gidgethub/issues/174) <https://github.com/brettcannon/gidgethub/pull/174></code>_)</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gidgethub/gidgethub/commit/dbcdf4bd32de86288495dbbd8c1682399ba53ad1"><code>dbcdf4b</code></a> Fix the rst markup on the Changelog. (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/197">#197</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/b9fe3c69030e0960d4ba2dcc1efb921e240c7504"><code>b9fe3c6</code></a> Add <code>extra_headers</code> option to <code>get</code> methods (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/192">#192</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/901f0578c00919cdb8c0b120b6d9d9f35eda620e"><code>901f057</code></a> Add a getstatus() method to gidgethub.abc (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/194">#194</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/20e861254150b16bfd2fbe26b012cb6736430faf"><code>20e8612</code></a> Update the intersphinx mapping to 1.0-style (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/195">#195</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/e1f7baed601914139fc5179fcb8faea210bbf543"><code>e1f7bae</code></a> Fix lint issues (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/191">#191</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/b3ae8d16426355f660a2fc2aadeb4d8e61478add"><code>b3ae8d1</code></a> Replace use of <code>requests</code> in examples with <code>httpx</code> (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/190">#190</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/4fe3c04d7c550a5b33fbf00e154810042f7d3edb"><code>4fe3c04</code></a> Release 5.2.1 (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/186">#186</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/89ade8859539212e0663e91f0777ad8a39ecf323"><code>89ade88</code></a> Fix cgi and importlib_resources deprecations (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/185">#185</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/64888cbe83e3f11af3c6f25294adff26dc2f557a"><code>64888cb</code></a> Add support for Python 3.11 and drop EOL Python 3.6 (<a href="https://redirect.github.com/brettcannon/gidgethub/issues/184">#184</a>)</li> <li><a href="https://github.com/gidgethub/gidgethub/commit/8c60e56029b7e10b7be9879e64dfbf97bbeda2b8"><code>8c60e56</code></a> Add variable mapping to fix 'Session tests-3.10-dev skipped: Python interpret...</li> <li>Additional commits viewable in <a href="https://github.com/brettcannon/gidgethub/compare/5.0.1...v5.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=gidgethub&package-manager=pip&previous-version=5.0.1&new-version=5.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e454042f7e0..0684b998e32 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ frozenlist==1.4.1 # aiosignal funcparserlib==1.0.1 # via blockdiag -gidgethub==5.0.1 +gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in @@ -156,7 +156,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.18.0 # via sphinx -pyjwt==2.3.0 +pyjwt==2.9.0 # via # gidgethub # pyjwt From 10746c21ddedbaf6e8f8e0ae1e6d6e5f8c2d097f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 12:51:26 +0000 Subject: [PATCH 0395/1511] Bump typer from 0.6.1 to 0.12.4 (#8840) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [typer](https://github.com/fastapi/typer) from 0.6.1 to 0.12.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/fastapi/typer/releases">typer's releases</a>.</em></p> <blockquote> <h2>0.12.4</h2> <h3>Features</h3> <ul> <li>✨ Add support for Python 3.12, tests in CI and official marker. PR <a href="https://redirect.github.com/tiangolo/typer/pull/807">#807</a> by <a href="https://github.com/ivantodorovich"><code>@​ivantodorovich</code></a>.</li> </ul> <h3>Fixes</h3> <ul> <li>🐛 Fix support for <code>UnionType</code> (e.g. <code>str | None</code>) with Python 3.11. PR <a href="https://redirect.github.com/fastapi/typer/pull/548">#548</a> by <a href="https://github.com/jonaslb"><code>@​jonaslb</code></a>.</li> <li>🐛 Fix <code>zsh</code> autocompletion installation. PR <a href="https://redirect.github.com/fastapi/typer/pull/237">#237</a> by <a href="https://github.com/alexjurkiewicz"><code>@​alexjurkiewicz</code></a>.</li> <li>🐛 Fix usage of <code>Annotated</code> with future annotations in Python 3.7+. PR <a href="https://redirect.github.com/fastapi/typer/pull/814">#814</a> by <a href="https://github.com/ivantodorovich"><code>@​ivantodorovich</code></a>.</li> <li>🐛 Fix <code>shell_complete</code> not working for Arguments. PR <a href="https://redirect.github.com/fastapi/typer/pull/737">#737</a> by <a href="https://github.com/bckohan"><code>@​bckohan</code></a>.</li> </ul> <h3>Docs</h3> <ul> <li>📝 Update docs links, from tiangolo to new fastapi org. PR <a href="https://redirect.github.com/fastapi/typer/pull/919">#919</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>📝 Add docs for team and repo management. PR <a href="https://redirect.github.com/tiangolo/typer/pull/917">#917</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> </ul> <h3>Internal</h3> <ul> <li>🔧 Add URLs to <code>pyproject.toml</code>, show up in PyPI. PR <a href="https://redirect.github.com/fastapi/typer/pull/931">#931</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Do not sync labels as it overrides manually added labels. PR <a href="https://redirect.github.com/fastapi/typer/pull/930">#930</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update labeler GitHub Action to add only one label. PR <a href="https://redirect.github.com/fastapi/typer/pull/927">#927</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update labeler GitHub Actions permissions and dependencies. PR <a href="https://redirect.github.com/fastapi/typer/pull/926">#926</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add GitHub Action label-checker. PR <a href="https://redirect.github.com/fastapi/typer/pull/925">#925</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add GitHub Action labeler. PR <a href="https://redirect.github.com/fastapi/typer/pull/924">#924</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add GitHub Action add-to-project. PR <a href="https://redirect.github.com/fastapi/typer/pull/922">#922</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔨 Update docs.py script to enable dirty reload conditionally. PR <a href="https://redirect.github.com/tiangolo/typer/pull/918">#918</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update MkDocs previews. PR <a href="https://redirect.github.com/tiangolo/typer/pull/916">#916</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Upgrade build docs configs. PR <a href="https://redirect.github.com/tiangolo/typer/pull/914">#914</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update MkDocs to have titles in Markdown files instead of config. PR <a href="https://redirect.github.com/tiangolo/typer/pull/913">#913</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add alls-green for test-redistribute. PR <a href="https://redirect.github.com/tiangolo/typer/pull/911">#911</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update docs-previews to handle no docs changes. PR <a href="https://redirect.github.com/tiangolo/typer/pull/912">#912</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷🏻 Show docs deployment status and preview URLs in comment. PR <a href="https://redirect.github.com/tiangolo/typer/pull/910">#910</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Enable auto dark mode from system. PR <a href="https://redirect.github.com/tiangolo/typer/pull/908">#908</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>💄 Add dark mode logo. PR <a href="https://redirect.github.com/tiangolo/typer/pull/907">#907</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update tabs and admonitions with new syntax and new MkDocs features. PR <a href="https://redirect.github.com/tiangolo/typer/pull/906">#906</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Enable MkDocs Material features. PR <a href="https://redirect.github.com/tiangolo/typer/pull/905">#905</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Enable dark mode for docs. PR <a href="https://redirect.github.com/tiangolo/typer/pull/904">#904</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>➖ Do not install jieba for MkDocs Material as there are no chinese translations. PR <a href="https://redirect.github.com/tiangolo/typer/pull/903">#903</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🙈 Add MkDocs Material cache to gitignore. PR <a href="https://redirect.github.com/tiangolo/typer/pull/902">#902</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔨 Update lint script. PR <a href="https://redirect.github.com/tiangolo/typer/pull/901">#901</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update MkDocs configs and docs build setup. PR <a href="https://redirect.github.com/tiangolo/typer/pull/900">#900</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>⬆ Bump actions/cache from 3 to 4. PR <a href="https://redirect.github.com/tiangolo/typer/pull/839">#839</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>🍱 Update Typer icon and logo. PR <a href="https://redirect.github.com/tiangolo/typer/pull/899">#899</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update issue-manager.yml GitHub Action permissions. PR <a href="https://redirect.github.com/tiangolo/typer/pull/897">#897</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Refactor GitHub Action to comment docs deployment URLs and update token, preparing for GitHub org. PR <a href="https://redirect.github.com/tiangolo/typer/pull/896">#896</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔨 Update docs Termynal scripts to not include line nums for local dev. PR <a href="https://redirect.github.com/tiangolo/typer/pull/882">#882</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>⬆ Bump black from 23.3.0 to 24.3.0. PR <a href="https://redirect.github.com/tiangolo/typer/pull/837">#837</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Bump pillow from 10.1.0 to 10.3.0. PR <a href="https://redirect.github.com/tiangolo/typer/pull/836">#836</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>✅ Add CI configs to run tests on Windows and MacOS. PR <a href="https://redirect.github.com/tiangolo/typer/pull/824">#824</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/fastapi/typer/blob/master/docs/release-notes.md">typer's changelog</a>.</em></p> <blockquote> <h2>0.12.4</h2> <h3>Features</h3> <ul> <li>✨ Add support for Python 3.12, tests in CI and official marker. PR <a href="https://redirect.github.com/tiangolo/typer/pull/807">#807</a> by <a href="https://github.com/ivantodorovich"><code>@​ivantodorovich</code></a>.</li> </ul> <h3>Fixes</h3> <ul> <li>🐛 Fix support for <code>UnionType</code> (e.g. <code>str | None</code>) with Python 3.11. PR <a href="https://redirect.github.com/fastapi/typer/pull/548">#548</a> by <a href="https://github.com/jonaslb"><code>@​jonaslb</code></a>.</li> <li>🐛 Fix <code>zsh</code> autocompletion installation. PR <a href="https://redirect.github.com/fastapi/typer/pull/237">#237</a> by <a href="https://github.com/alexjurkiewicz"><code>@​alexjurkiewicz</code></a>.</li> <li>🐛 Fix usage of <code>Annotated</code> with future annotations in Python 3.7+. PR <a href="https://redirect.github.com/fastapi/typer/pull/814">#814</a> by <a href="https://github.com/ivantodorovich"><code>@​ivantodorovich</code></a>.</li> <li>🐛 Fix <code>shell_complete</code> not working for Arguments. PR <a href="https://redirect.github.com/fastapi/typer/pull/737">#737</a> by <a href="https://github.com/bckohan"><code>@​bckohan</code></a>.</li> </ul> <h3>Docs</h3> <ul> <li>📝 Update docs links, from tiangolo to new fastapi org. PR <a href="https://redirect.github.com/fastapi/typer/pull/919">#919</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>📝 Add docs for team and repo management. PR <a href="https://redirect.github.com/tiangolo/typer/pull/917">#917</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> </ul> <h3>Internal</h3> <ul> <li>🔧 Add URLs to <code>pyproject.toml</code>, show up in PyPI. PR <a href="https://redirect.github.com/fastapi/typer/pull/931">#931</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Do not sync labels as it overrides manually added labels. PR <a href="https://redirect.github.com/fastapi/typer/pull/930">#930</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update labeler GitHub Action to add only one label. PR <a href="https://redirect.github.com/fastapi/typer/pull/927">#927</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update labeler GitHub Actions permissions and dependencies. PR <a href="https://redirect.github.com/fastapi/typer/pull/926">#926</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add GitHub Action label-checker. PR <a href="https://redirect.github.com/fastapi/typer/pull/925">#925</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add GitHub Action labeler. PR <a href="https://redirect.github.com/fastapi/typer/pull/924">#924</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add GitHub Action add-to-project. PR <a href="https://redirect.github.com/fastapi/typer/pull/922">#922</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔨 Update docs.py script to enable dirty reload conditionally. PR <a href="https://redirect.github.com/tiangolo/typer/pull/918">#918</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update MkDocs previews. PR <a href="https://redirect.github.com/tiangolo/typer/pull/916">#916</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Upgrade build docs configs. PR <a href="https://redirect.github.com/tiangolo/typer/pull/914">#914</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update MkDocs to have titles in Markdown files instead of config. PR <a href="https://redirect.github.com/tiangolo/typer/pull/913">#913</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Add alls-green for test-redistribute. PR <a href="https://redirect.github.com/tiangolo/typer/pull/911">#911</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update docs-previews to handle no docs changes. PR <a href="https://redirect.github.com/tiangolo/typer/pull/912">#912</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷🏻 Show docs deployment status and preview URLs in comment. PR <a href="https://redirect.github.com/tiangolo/typer/pull/910">#910</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Enable auto dark mode from system. PR <a href="https://redirect.github.com/tiangolo/typer/pull/908">#908</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>💄 Add dark mode logo. PR <a href="https://redirect.github.com/tiangolo/typer/pull/907">#907</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update tabs and admonitions with new syntax and new MkDocs features. PR <a href="https://redirect.github.com/tiangolo/typer/pull/906">#906</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Enable MkDocs Material features. PR <a href="https://redirect.github.com/tiangolo/typer/pull/905">#905</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Enable dark mode for docs. PR <a href="https://redirect.github.com/tiangolo/typer/pull/904">#904</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>➖ Do not install jieba for MkDocs Material as there are no chinese translations. PR <a href="https://redirect.github.com/tiangolo/typer/pull/903">#903</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🙈 Add MkDocs Material cache to gitignore. PR <a href="https://redirect.github.com/tiangolo/typer/pull/902">#902</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔨 Update lint script. PR <a href="https://redirect.github.com/tiangolo/typer/pull/901">#901</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔧 Update MkDocs configs and docs build setup. PR <a href="https://redirect.github.com/tiangolo/typer/pull/900">#900</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>⬆ Bump actions/cache from 3 to 4. PR <a href="https://redirect.github.com/tiangolo/typer/pull/839">#839</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>🍱 Update Typer icon and logo. PR <a href="https://redirect.github.com/tiangolo/typer/pull/899">#899</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Update issue-manager.yml GitHub Action permissions. PR <a href="https://redirect.github.com/tiangolo/typer/pull/897">#897</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>👷 Refactor GitHub Action to comment docs deployment URLs and update token, preparing for GitHub org. PR <a href="https://redirect.github.com/tiangolo/typer/pull/896">#896</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🔨 Update docs Termynal scripts to not include line nums for local dev. PR <a href="https://redirect.github.com/tiangolo/typer/pull/882">#882</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>⬆ Bump black from 23.3.0 to 24.3.0. PR <a href="https://redirect.github.com/tiangolo/typer/pull/837">#837</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Bump pillow from 10.1.0 to 10.3.0. PR <a href="https://redirect.github.com/tiangolo/typer/pull/836">#836</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/fastapi/typer/commit/ae94d5753c60badda5afd04edafd7e381991e3f8"><code>ae94d57</code></a> 🔖 Release version 0.12.4</li> <li><a href="https://github.com/fastapi/typer/commit/68b34156d86f20424d2d8597bdb0edd0585aca6a"><code>68b3415</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/218bf89ab53858e2b55c292a5f8c7b2ec1b20a2b"><code>218bf89</code></a> 🐛 Fix support for <code>UnionType</code> (e.g. <code>str | None</code>) with Python 3.11 (<a href="https://redirect.github.com/fastapi/typer/issues/548">#548</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/ad421bdf47deea882ac8a7ad194ce81545f47f6f"><code>ad421bd</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/640fb09c6de03829a51ebc8bdab49618c5083c44"><code>640fb09</code></a> 🐛 Fix <code>zsh</code> autocompletion installation (<a href="https://redirect.github.com/fastapi/typer/issues/237">#237</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/90f3e61d0858eb80926dbd547fcd509726e219a0"><code>90f3e61</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/ca65b366b70c170743a2e3715fb5bef8b045da09"><code>ca65b36</code></a> 🐛 Fix usage of <code>Annotated</code> with future annotations in Python 3.7+ (<a href="https://redirect.github.com/fastapi/typer/issues/814">#814</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/4efe1bcffef828e79237afa39b8ff6fa7267a912"><code>4efe1bc</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/dcb45b1a65e80316b2b740e4f6c2968209439714"><code>dcb45b1</code></a> 🐛 Fix <code>shell_complete</code> not working for Arguments (<a href="https://redirect.github.com/fastapi/typer/issues/737">#737</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/76ca3e3397f13993cae4a9d68cc60e51c93253d8"><code>76ca3e3</code></a> 📝 Update release notes</li> <li>Additional commits viewable in <a href="https://github.com/fastapi/typer/compare/0.6.1...0.12.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typer&package-manager=pip&previous-version=0.6.1&new-version=0.12.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 16 ++++++++++++++-- requirements/dev.txt | 15 +++++++++++++-- requirements/lint.txt | 2 +- requirements/test.txt | 13 ++++++++++++- 4 files changed, 40 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0684b998e32..da844ee27ab 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -109,8 +109,12 @@ jinja2==3.1.4 # via # sphinx # towncrier +markdown-it-py==3.0.0 + # via rich markupsafe==2.1.5 # via jinja2 +mdurl==0.1.2 + # via markdown-it-py multidict==6.0.5 # via # -r requirements/multidict.in @@ -155,7 +159,9 @@ pydantic-core==2.6.0 pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.18.0 - # via sphinx + # via + # rich + # sphinx pyjwt==2.9.0 # via # gidgethub @@ -195,8 +201,12 @@ requests==2.31.0 # cherry-picker # python-on-whales # sphinx +rich==13.7.1 + # via typer setuptools-git==1.2 # via -r requirements/test.in +shellingham==1.5.4 + # via typer six==1.16.0 # via python-dateutil slotscheck==0.19.0 @@ -249,7 +259,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.6.1 +typer==0.12.4 # via python-on-whales typing-extensions==4.11.0 # via @@ -259,6 +269,8 @@ typing-extensions==4.11.0 # pydantic # pydantic-core # python-on-whales + # rich + # typer uritemplate==4.1.1 # via gidgethub urllib3==2.2.2 diff --git a/requirements/dev.txt b/requirements/dev.txt index 6c6e238997b..6b95b18276e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -107,8 +107,12 @@ jinja2==3.1.4 # via # sphinx # towncrier +markdown-it-py==3.0.0 + # via rich markupsafe==2.1.5 # via jinja2 +mdurl==0.1.2 + # via markdown-it-py multidict==6.0.5 # via # -r requirements/runtime-deps.in @@ -150,7 +154,9 @@ pydantic==2.2.0 pydantic-core==2.6.0 # via pydantic pygments==2.18.0 - # via sphinx + # via + # rich + # sphinx pyjwt==2.8.0 # via # gidgethub @@ -190,8 +196,12 @@ requests==2.31.0 # cherry-picker # python-on-whales # sphinx +rich==13.7.1 + # via typer setuptools-git==1.2 # via -r requirements/test.in +shellingham==1.5.4 + # via typer six==1.16.0 # via python-dateutil slotscheck==0.19.0 @@ -241,7 +251,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.9.0 +typer==0.12.4 # via python-on-whales typing-extensions==4.11.0 # via @@ -251,6 +261,7 @@ typing-extensions==4.11.0 # pydantic # pydantic-core # python-on-whales + # rich # typer uritemplate==4.1.1 # via gidgethub diff --git a/requirements/lint.txt b/requirements/lint.txt index ccc9d3cf43d..9f0f71afff3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -103,7 +103,7 @@ tqdm==4.66.5 # via python-on-whales trustme==1.1.0 # via -r requirements/lint.in -typer==0.12.3 +typer==0.12.4 # via python-on-whales typing-extensions==4.11.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index 7c990b70404..e6452d9920a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,6 +53,10 @@ idna==3.4 # yarl iniconfig==2.0.0 # via pytest +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py multidict==6.0.5 # via # -r requirements/runtime-deps.in @@ -77,6 +81,8 @@ pydantic==2.2.0 # via python-on-whales pydantic-core==2.6.0 # via pydantic +pygments==2.18.0 + # via rich pytest==8.3.2 # via # -r requirements/test.in @@ -96,8 +102,12 @@ regex==2024.7.24 # via re-assert requests==2.31.0 # via python-on-whales +rich==13.7.1 + # via typer setuptools-git==1.2 # via -r requirements/test.in +shellingham==1.5.4 + # via typer six==1.16.0 # via python-dateutil tomli==2.0.1 @@ -109,7 +119,7 @@ tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in -typer==0.9.0 +typer==0.12.4 # via python-on-whales typing-extensions==4.11.0 # via @@ -118,6 +128,7 @@ typing-extensions==4.11.0 # pydantic # pydantic-core # python-on-whales + # rich # typer urllib3==2.2.2 # via requests From 7f64912206fb79113817dfee9a67ce9eff61cc4e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 13:28:05 +0000 Subject: [PATCH 0396/1511] Bump typing-extensions from 4.11.0 to 4.12.2 (#8839) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.11.0 to 4.12.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python/typing_extensions/releases">typing-extensions's releases</a>.</em></p> <blockquote> <h2>4.12.2</h2> <ul> <li>Fix regression in v4.12.0 where specialization of certain generics with an overridden <code>__eq__</code> method would raise errors. Patch by Jelle Zijlstra.</li> <li>Fix tests so they pass on 3.13.0b2</li> </ul> <h2>4.12.1</h2> <ul> <li>Preliminary changes for compatibility with the draft implementation of PEP 649 in Python 3.14. Patch by Jelle Zijlstra.</li> <li>Fix regression in v4.12.0 where nested <code>Annotated</code> types would cause <code>TypeError</code> to be raised if the nested <code>Annotated</code> type had unhashable metadata. Patch by Alex Waygood.</li> </ul> <h2>4.12.0</h2> <p>This release focuses on compatibility with the upcoming release of Python 3.13. Most changes are related to the implementation of type parameter defaults (PEP 696).</p> <p>Thanks to all of the people who contributed patches, especially Alex Waygood, who did most of the work adapting typing-extensions to the CPython PEP 696 implementation.</p> <p>There is a single change since 4.12.0rc1:</p> <ul> <li>Fix incorrect behaviour of <code>typing_extensions.ParamSpec</code> on Python 3.8 and 3.9 that meant that <code>isinstance(typing_extensions.ParamSpec("P"), typing.TypeVar)</code> would have a different result in some situations depending on whether or not a profiling function had been set using <code>sys.setprofile</code>. Patch by Alex Waygood.</li> </ul> <p>Changes included in 4.12.0rc1:</p> <ul> <li>Improve the implementation of type parameter defaults (PEP 696) <ul> <li>Backport the <code>typing.NoDefault</code> sentinel object from Python 3.13. TypeVars, ParamSpecs and TypeVarTuples without default values now have their <code>__default__</code> attribute set to this sentinel value.</li> <li>TypeVars, ParamSpecs and TypeVarTuples now have a <code>has_default()</code> method, matching <code>typing.TypeVar</code>, <code>typing.ParamSpec</code> and <code>typing.TypeVarTuple</code> on Python 3.13+.</li> <li>TypeVars, ParamSpecs and TypeVarTuples with <code>default=None</code> passed to their constructors now have their <code>__default__</code> attribute set to <code>None</code> at runtime rather than <code>types.NoneType</code>.</li> <li>Fix most tests for <code>TypeVar</code>, <code>ParamSpec</code> and <code>TypeVarTuple</code> on Python 3.13.0b1 and newer.</li> <li>Backport CPython PR <a href="https://redirect.github.com/python/cpython/pull/118774">#118774</a>, allowing type parameters without default values to follow those with default values in some type parameter lists. Patch by Alex Waygood, backporting a CPython PR by Jelle Zijlstra.</li> <li>It is now disallowed to use a <code>TypeVar</code> with a default value after a <code>TypeVarTuple</code> in a type parameter list. This matches the CPython implementation of PEP 696 on Python 3.13+.</li> <li>Fix bug in PEP-696 implementation where a default value for a <code>ParamSpec</code></li> </ul> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/typing_extensions/blob/main/CHANGELOG.md">typing-extensions's changelog</a>.</em></p> <blockquote> <h1>Release 4.12.2 (June 7, 2024)</h1> <ul> <li>Fix regression in v4.12.0 where specialization of certain generics with an overridden <code>__eq__</code> method would raise errors. Patch by Jelle Zijlstra.</li> <li>Fix tests so they pass on 3.13.0b2</li> </ul> <h1>Release 4.12.1 (June 1, 2024)</h1> <ul> <li>Preliminary changes for compatibility with the draft implementation of PEP 649 in Python 3.14. Patch by Jelle Zijlstra.</li> <li>Fix regression in v4.12.0 where nested <code>Annotated</code> types would cause <code>TypeError</code> to be raised if the nested <code>Annotated</code> type had unhashable metadata. Patch by Alex Waygood.</li> </ul> <h1>Release 4.12.0 (May 23, 2024)</h1> <p>This release is mostly the same as 4.12.0rc1 but fixes one more longstanding bug.</p> <ul> <li>Fix incorrect behaviour of <code>typing_extensions.ParamSpec</code> on Python 3.8 and 3.9 that meant that <code>isinstance(typing_extensions.ParamSpec("P"), typing.TypeVar)</code> would have a different result in some situations depending on whether or not a profiling function had been set using <code>sys.setprofile</code>. Patch by Alex Waygood.</li> </ul> <h1>Release 4.12.0rc1 (May 16, 2024)</h1> <p>This release focuses on compatibility with the upcoming release of Python 3.13. Most changes are related to the implementation of type parameter defaults (PEP 696).</p> <p>Thanks to all of the people who contributed patches, especially Alex Waygood, who did most of the work adapting typing-extensions to the CPython PEP 696 implementation.</p> <p>Full changelog:</p> <ul> <li>Improve the implementation of type parameter defaults (PEP 696) <ul> <li>Backport the <code>typing.NoDefault</code> sentinel object from Python 3.13. TypeVars, ParamSpecs and TypeVarTuples without default values now have their <code>__default__</code> attribute set to this sentinel value.</li> <li>TypeVars, ParamSpecs and TypeVarTuples now have a <code>has_default()</code> method, matching <code>typing.TypeVar</code>, <code>typing.ParamSpec</code> and <code>typing.TypeVarTuple</code> on Python 3.13+.</li> <li>TypeVars, ParamSpecs and TypeVarTuples with <code>default=None</code> passed to their constructors now have their <code>__default__</code> attribute set to <code>None</code> at runtime rather than <code>types.NoneType</code>.</li> <li>Fix most tests for <code>TypeVar</code>, <code>ParamSpec</code> and <code>TypeVarTuple</code> on Python 3.13.0b1 and newer.</li> </ul> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/typing_extensions/commit/e1250ff869e7ee5ad05170d8a4b65469f13801c3"><code>e1250ff</code></a> Prepare release 4.12.2 (<a href="https://redirect.github.com/python/typing_extensions/issues/426">#426</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/53bcdded534494674f893112f71d3be344d65363"><code>53bcdde</code></a> Avoid error if origin has a buggy <strong>eq</strong> (<a href="https://redirect.github.com/python/typing_extensions/issues/422">#422</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/726963800030ab35ba5b975fc3a60486c26c5050"><code>7269638</code></a> Prepare release 4.12.1 (<a href="https://redirect.github.com/python/typing_extensions/issues/418">#418</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/8dfcf3c74a4f5d736a6d2ce8d82c3e85cd0c5b18"><code>8dfcf3c</code></a> Fix <code>TypeError</code> on nested <code>Annotated</code> types where the inner type has unhashab...</li> <li><a href="https://github.com/python/typing_extensions/commit/d76f5911b7d44aa1ff26de22e76047ca6c53f840"><code>d76f591</code></a> Switch from flake8 to ruff (<a href="https://redirect.github.com/python/typing_extensions/issues/414">#414</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/920d60d09e929e23657a4459dd446fb428715981"><code>920d60d</code></a> Support my PEP 649 branch (<a href="https://redirect.github.com/python/typing_extensions/issues/412">#412</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/e792bce5508dad9f5f00066ad615d231cc1d64c1"><code>e792bce</code></a> Ignore fewer flake8 rules when linting tests (<a href="https://redirect.github.com/python/typing_extensions/issues/413">#413</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/f90a8dc40b60bf43510b8611a07d8cc570544ffe"><code>f90a8dc</code></a> Prepare release 4.12.0 (<a href="https://redirect.github.com/python/typing_extensions/issues/408">#408</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/118e1a604a857d54cb70a2a1f930b425676d6cb4"><code>118e1a6</code></a> Make sure <code>isinstance(typing_extensions.ParamSpec("P"), typing.TypeVar)</code> is u...</li> <li><a href="https://github.com/python/typing_extensions/commit/910141ab8295b422851f83ffc46c9eb04bbca719"><code>910141a</code></a> Add security documentation (<a href="https://redirect.github.com/python/typing_extensions/issues/403">#403</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/typing_extensions/compare/4.11.0...4.12.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.11.0&new-version=4.12.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index da844ee27ab..ee6c6328122 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -261,7 +261,7 @@ trustme==1.1.0 ; platform_machine != "i686" # -r requirements/test.in typer==0.12.4 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types diff --git a/requirements/dev.txt b/requirements/dev.txt index 6b95b18276e..d08ca566672 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -253,7 +253,7 @@ trustme==1.1.0 ; platform_machine != "i686" # -r requirements/test.in typer==0.12.4 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types diff --git a/requirements/lint.txt b/requirements/lint.txt index 9f0f71afff3..3dc829156d6 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -105,7 +105,7 @@ trustme==1.1.0 # via -r requirements/lint.in typer==0.12.4 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types diff --git a/requirements/test.txt b/requirements/test.txt index e6452d9920a..c9eaef76c05 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -121,7 +121,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.12.4 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # annotated-types # mypy From 8a2a8900ea88bd17ba37f9406eeebcfba8cb1fef Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 22 Aug 2024 14:30:41 +0100 Subject: [PATCH 0397/1511] Fix Python parser chunked handling with multiple Transfer-Encoding values (#8843) (cherry picked from commit faa15fd7d1bea808a64f979c1a7ace8340d68d61) --- CHANGES/8823.bugfix.rst | 1 + aiohttp/http_parser.py | 20 +++++++++++++++----- tests/test_http_parser.py | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8823.bugfix.rst diff --git a/CHANGES/8823.bugfix.rst b/CHANGES/8823.bugfix.rst new file mode 100644 index 00000000000..ea18e65fd4a --- /dev/null +++ b/CHANGES/8823.bugfix.rst @@ -0,0 +1 @@ +Fixed Python parser chunked handling with multiple Transfer-Encoding values -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index b992955a011..f46cf833c03 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -277,8 +277,10 @@ def __init__( ) @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: - pass + def parse_message(self, lines: List[bytes]) -> _MsgT: ... + + @abc.abstractmethod + def _is_chunked_te(self, te: str) -> bool: ... def feed_eof(self) -> Optional[_MsgT]: if self._payload_parser is not None: @@ -537,10 +539,8 @@ def parse_headers( # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te is not None: - if "chunked" == te.lower(): + if self._is_chunked_te(te): chunked = True - else: - raise BadHttpMessage("Request has invalid `Transfer-Encoding`") if hdrs.CONTENT_LENGTH in headers: raise BadHttpMessage( @@ -650,6 +650,12 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: url, ) + def _is_chunked_te(self, te: str) -> bool: + if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + return True + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + class HttpResponseParser(HttpParser[RawResponseMessage]): """Read response status line and headers. @@ -735,6 +741,10 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: chunked, ) + def _is_chunked_te(self, te: str) -> bool: + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked" + class HttpPayloadParser: def __init__( diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 74700df4253..78abe528cb0 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -84,6 +84,7 @@ def response(loop: Any, protocol: Any, request: Any): max_line_size=8190, max_headers=32768, max_field_size=8190, + read_until_eof=True, ) @@ -514,6 +515,23 @@ def test_request_te_chunked123(parser: Any) -> None: parser.feed_data(text) +async def test_request_te_last_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + assert await messages[0][1].read() == b"Test" + + +def test_request_te_first_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked, not\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + with pytest.raises( + http_exceptions.BadHttpMessage, + match="nvalid `Transfer-Encoding`", + ): + parser.feed_data(text) + + def test_conn_upgrade(parser: Any) -> None: text = ( b"GET /test HTTP/1.1\r\n" @@ -1154,6 +1172,23 @@ async def test_http_response_parser_bad_chunked_strict_c(loop, protocol) -> None response.feed_data(text) +async def test_http_response_parser_notchunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: notchunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + response.feed_eof() + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + + +async def test_http_response_parser_last_chunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"Test" + + def test_http_response_parser_bad(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTT/1\r\n\r\n") From 23c9671fdab74956da85d26144d860e774b181e7 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 22 Aug 2024 14:30:53 +0100 Subject: [PATCH 0398/1511] Fix Python parser chunked handling with multiple Transfer-Encoding values (#8844) (cherry picked from commit faa15fd7d1bea808a64f979c1a7ace8340d68d61) --- CHANGES/8823.bugfix.rst | 1 + aiohttp/http_parser.py | 20 +++++++++++++++----- tests/test_http_parser.py | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8823.bugfix.rst diff --git a/CHANGES/8823.bugfix.rst b/CHANGES/8823.bugfix.rst new file mode 100644 index 00000000000..ea18e65fd4a --- /dev/null +++ b/CHANGES/8823.bugfix.rst @@ -0,0 +1 @@ +Fixed Python parser chunked handling with multiple Transfer-Encoding values -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index b992955a011..f46cf833c03 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -277,8 +277,10 @@ def __init__( ) @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: - pass + def parse_message(self, lines: List[bytes]) -> _MsgT: ... + + @abc.abstractmethod + def _is_chunked_te(self, te: str) -> bool: ... def feed_eof(self) -> Optional[_MsgT]: if self._payload_parser is not None: @@ -537,10 +539,8 @@ def parse_headers( # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te is not None: - if "chunked" == te.lower(): + if self._is_chunked_te(te): chunked = True - else: - raise BadHttpMessage("Request has invalid `Transfer-Encoding`") if hdrs.CONTENT_LENGTH in headers: raise BadHttpMessage( @@ -650,6 +650,12 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: url, ) + def _is_chunked_te(self, te: str) -> bool: + if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + return True + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + class HttpResponseParser(HttpParser[RawResponseMessage]): """Read response status line and headers. @@ -735,6 +741,10 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: chunked, ) + def _is_chunked_te(self, te: str) -> bool: + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked" + class HttpPayloadParser: def __init__( diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 74700df4253..78abe528cb0 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -84,6 +84,7 @@ def response(loop: Any, protocol: Any, request: Any): max_line_size=8190, max_headers=32768, max_field_size=8190, + read_until_eof=True, ) @@ -514,6 +515,23 @@ def test_request_te_chunked123(parser: Any) -> None: parser.feed_data(text) +async def test_request_te_last_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + assert await messages[0][1].read() == b"Test" + + +def test_request_te_first_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked, not\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + with pytest.raises( + http_exceptions.BadHttpMessage, + match="nvalid `Transfer-Encoding`", + ): + parser.feed_data(text) + + def test_conn_upgrade(parser: Any) -> None: text = ( b"GET /test HTTP/1.1\r\n" @@ -1154,6 +1172,23 @@ async def test_http_response_parser_bad_chunked_strict_c(loop, protocol) -> None response.feed_data(text) +async def test_http_response_parser_notchunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: notchunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + response.feed_eof() + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + + +async def test_http_response_parser_last_chunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"Test" + + def test_http_response_parser_bad(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTT/1\r\n\r\n") From 15d622cafb730b9874f2dee83f750654d724d89f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:36:24 +0100 Subject: [PATCH 0399/1511] [PR #8804/8156789a backport][3.10] docs: minor grammar fix in client_reference.rst (#8856) **This is a backport of PR #8804 as merged into master (8156789a1a1c6413233986b372dc933f5c13d712).** Co-authored-by: Dima Tisnek <dimaqq@gmail.com> --- .github/workflows/ci-cd.yml | 1 - docs/client_reference.rst | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 8e56acb497d..17632dba6e6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -238,7 +238,6 @@ jobs: if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: - fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 738892c6cc6..91444d117b1 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -125,7 +125,7 @@ The client session supports the context manager protocol for self closing. Automatically call :meth:`ClientResponse.raise_for_status()` for each response, ``False`` by default. - This parameter can be overridden when you making a request, e.g.:: + This parameter can be overridden when making a request, e.g.:: client_session = aiohttp.ClientSession(raise_for_status=True) resp = await client_session.get(url, raise_for_status=False) From 385e4303b4b8aeae6c388d98f50b776d98b53c27 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:53:39 +0100 Subject: [PATCH 0400/1511] [PR #8804/8156789a backport][3.11] docs: minor grammar fix in client_reference.rst (#8857) **This is a backport of PR #8804 as merged into master (8156789a1a1c6413233986b372dc933f5c13d712).** Co-authored-by: Dima Tisnek <dimaqq@gmail.com> --- .github/workflows/ci-cd.yml | 1 - docs/client_reference.rst | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index bb4df9b7760..df27a9108d6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -236,7 +236,6 @@ jobs: if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: - fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 738892c6cc6..91444d117b1 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -125,7 +125,7 @@ The client session supports the context manager protocol for self closing. Automatically call :meth:`ClientResponse.raise_for_status()` for each response, ``False`` by default. - This parameter can be overridden when you making a request, e.g.:: + This parameter can be overridden when making a request, e.g.:: client_session = aiohttp.ClientSession(raise_for_status=True) resp = await client_session.get(url, raise_for_status=False) From f1be53e5aeebf9e6eb18ff071c86bed3bd31926d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:26:54 +0000 Subject: [PATCH 0401/1511] Bump requests from 2.31.0 to 2.32.3 (#8859) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [requests](https://github.com/psf/requests) from 2.31.0 to 2.32.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/psf/requests/releases">requests's releases</a>.</em></p> <blockquote> <h2>v2.32.3</h2> <h2>2.32.3 (2024-05-29)</h2> <p><strong>Bugfixes</strong></p> <ul> <li>Fixed bug breaking the ability to specify custom SSLContexts in sub-classes of HTTPAdapter. (<a href="https://redirect.github.com/psf/requests/issues/6716">#6716</a>)</li> <li>Fixed issue where Requests started failing to run on Python versions compiled without the <code>ssl</code> module. (<a href="https://redirect.github.com/psf/requests/issues/6724">#6724</a>)</li> </ul> <h2>v2.32.2</h2> <h2>2.32.2 (2024-05-21)</h2> <p><strong>Deprecations</strong></p> <ul> <li> <p>To provide a more stable migration for custom HTTPAdapters impacted by the CVE changes in 2.32.0, we've renamed <code>_get_connection</code> to a new public API, <code>get_connection_with_tls_context</code>. Existing custom HTTPAdapters will need to migrate their code to use this new API. <code>get_connection</code> is considered deprecated in all versions of Requests>=2.32.0.</p> <p>A minimal (2-line) example has been provided in the linked PR to ease migration, but we strongly urge users to evaluate if their custom adapter is subject to the same issue described in CVE-2024-35195. (<a href="https://redirect.github.com/psf/requests/issues/6710">#6710</a>)</p> </li> </ul> <h2>v2.32.1</h2> <h2>2.32.1 (2024-05-20)</h2> <p><strong>Bugfixes</strong></p> <ul> <li>Add missing test certs to the sdist distributed on PyPI.</li> </ul> <h2>v2.32.0</h2> <h2>2.32.0 (2024-05-20)</h2> <h2>🐍 PYCON US 2024 EDITION 🐍</h2> <p><strong>Security</strong></p> <ul> <li>Fixed an issue where setting <code>verify=False</code> on the first request from a Session will cause subsequent requests to the <em>same origin</em> to also ignore cert verification, regardless of the value of <code>verify</code>. (<a href="https://github.com/psf/requests/security/advisories/GHSA-9wx4-h78v-vm56">https://github.com/psf/requests/security/advisories/GHSA-9wx4-h78v-vm56</a>)</li> </ul> <p><strong>Improvements</strong></p> <ul> <li><code>verify=True</code> now reuses a global SSLContext which should improve request time variance between first and subsequent requests. It should also minimize certificate load time on Windows systems when using a Python version built with OpenSSL 3.x. (<a href="https://redirect.github.com/psf/requests/issues/6667">#6667</a>)</li> <li>Requests now supports optional use of character detection (<code>chardet</code> or <code>charset_normalizer</code>) when repackaged or vendored.</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/psf/requests/blob/main/HISTORY.md">requests's changelog</a>.</em></p> <blockquote> <h2>2.32.3 (2024-05-29)</h2> <p><strong>Bugfixes</strong></p> <ul> <li>Fixed bug breaking the ability to specify custom SSLContexts in sub-classes of HTTPAdapter. (<a href="https://redirect.github.com/psf/requests/issues/6716">#6716</a>)</li> <li>Fixed issue where Requests started failing to run on Python versions compiled without the <code>ssl</code> module. (<a href="https://redirect.github.com/psf/requests/issues/6724">#6724</a>)</li> </ul> <h2>2.32.2 (2024-05-21)</h2> <p><strong>Deprecations</strong></p> <ul> <li> <p>To provide a more stable migration for custom HTTPAdapters impacted by the CVE changes in 2.32.0, we've renamed <code>_get_connection</code> to a new public API, <code>get_connection_with_tls_context</code>. Existing custom HTTPAdapters will need to migrate their code to use this new API. <code>get_connection</code> is considered deprecated in all versions of Requests>=2.32.0.</p> <p>A minimal (2-line) example has been provided in the linked PR to ease migration, but we strongly urge users to evaluate if their custom adapter is subject to the same issue described in CVE-2024-35195. (<a href="https://redirect.github.com/psf/requests/issues/6710">#6710</a>)</p> </li> </ul> <h2>2.32.1 (2024-05-20)</h2> <p><strong>Bugfixes</strong></p> <ul> <li>Add missing test certs to the sdist distributed on PyPI.</li> </ul> <h2>2.32.0 (2024-05-20)</h2> <p><strong>Security</strong></p> <ul> <li>Fixed an issue where setting <code>verify=False</code> on the first request from a Session will cause subsequent requests to the <em>same origin</em> to also ignore cert verification, regardless of the value of <code>verify</code>. (<a href="https://github.com/psf/requests/security/advisories/GHSA-9wx4-h78v-vm56">https://github.com/psf/requests/security/advisories/GHSA-9wx4-h78v-vm56</a>)</li> </ul> <p><strong>Improvements</strong></p> <ul> <li><code>verify=True</code> now reuses a global SSLContext which should improve request time variance between first and subsequent requests. It should also minimize certificate load time on Windows systems when using a Python version built with OpenSSL 3.x. (<a href="https://redirect.github.com/psf/requests/issues/6667">#6667</a>)</li> <li>Requests now supports optional use of character detection (<code>chardet</code> or <code>charset_normalizer</code>) when repackaged or vendored. This enables <code>pip</code> and other projects to minimize their vendoring surface area. The <code>Response.text()</code> and <code>apparent_encoding</code> APIs will default to <code>utf-8</code> if neither library is present. (<a href="https://redirect.github.com/psf/requests/issues/6702">#6702</a>)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/psf/requests/commit/0e322af87745eff34caffe4df68456ebc20d9068"><code>0e322af</code></a> v2.32.3</li> <li><a href="https://github.com/psf/requests/commit/e18879932287c2bf4bcee4ddf6ccb8a69b6fc656"><code>e188799</code></a> Don't create default SSLContext if ssl module isn't present (<a href="https://redirect.github.com/psf/requests/issues/6724">#6724</a>)</li> <li><a href="https://github.com/psf/requests/commit/145b5399486b56e00250204f033441f3fdf2f3c9"><code>145b539</code></a> Merge pull request <a href="https://redirect.github.com/psf/requests/issues/6716">#6716</a> from sigmavirus24/bug/6715</li> <li><a href="https://github.com/psf/requests/commit/b1d73ddb509a3a2d3e10744e85f9cdebdbde90f0"><code>b1d73dd</code></a> Don't use default SSLContext with custom poolmanager kwargs</li> <li><a href="https://github.com/psf/requests/commit/6badbac6e0d6b5a53872f26401761ad37a9002b8"><code>6badbac</code></a> Update HISTORY.md</li> <li><a href="https://github.com/psf/requests/commit/a62a2d35d918baa8e793f7aa4fb41527644dfca5"><code>a62a2d3</code></a> Allow for overriding of specific pool key params</li> <li><a href="https://github.com/psf/requests/commit/88dce9d854797c05d0ff296b70e0430535ef8aaf"><code>88dce9d</code></a> v2.32.2</li> <li><a href="https://github.com/psf/requests/commit/c98e4d133ef29c46a9b68cd783087218a8075e05"><code>c98e4d1</code></a> Merge pull request <a href="https://redirect.github.com/psf/requests/issues/6710">#6710</a> from nateprewitt/api_rename</li> <li><a href="https://github.com/psf/requests/commit/92075b330a30b9883f466a43d3f7566ab849f91b"><code>92075b3</code></a> Add deprecation warning</li> <li><a href="https://github.com/psf/requests/commit/aa1461b68aa73e2f6ec0e78c8853b635c76fd099"><code>aa1461b</code></a> Move _get_connection to get_connection_with_tls_context</li> <li>Additional commits viewable in <a href="https://github.com/psf/requests/compare/v2.31.0...v2.32.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=requests&package-manager=pip&previous-version=2.31.0&new-version=2.32.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ee6c6328122..eb013753530 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -196,7 +196,7 @@ re-assert==1.1.0 # via -r requirements/test.in regex==2024.7.24 # via re-assert -requests==2.31.0 +requests==2.32.3 # via # cherry-picker # python-on-whales diff --git a/requirements/dev.txt b/requirements/dev.txt index d08ca566672..b4ef0dfe74b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -191,7 +191,7 @@ re-assert==1.1.0 # via -r requirements/test.in regex==2024.7.24 # via re-assert -requests==2.31.0 +requests==2.32.3 # via # cherry-picker # python-on-whales diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 5b080130969..977da96e6e2 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -50,7 +50,7 @@ pygments==2.18.0 # via sphinx pytz==2023.3.post1 # via babel -requests==2.31.0 +requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index 44cc009b5c4..371034a8849 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -48,7 +48,7 @@ pygments==2.18.0 # via sphinx pytz==2023.3.post1 # via babel -requests==2.31.0 +requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 3dc829156d6..d0cf9e44248 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -84,7 +84,7 @@ python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -requests==2.31.0 +requests==2.32.3 # via python-on-whales rich==13.7.1 # via typer diff --git a/requirements/test.txt b/requirements/test.txt index c9eaef76c05..dfdcf8f7b1e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -100,7 +100,7 @@ re-assert==1.1.0 # via -r requirements/test.in regex==2024.7.24 # via re-assert -requests==2.31.0 +requests==2.32.3 # via python-on-whales rich==13.7.1 # via typer From 5319115c6312e777dd7f89fac937ca3b73b31f8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:44:57 +0000 Subject: [PATCH 0402/1511] Bump sphinxcontrib-htmlhelp from 2.0.0 to 2.0.1 (#8860) Bumps [sphinxcontrib-htmlhelp](https://github.com/sphinx-doc/sphinxcontrib-htmlhelp) from 2.0.0 to 2.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/blob/master/CHANGES.rst">sphinxcontrib-htmlhelp's changelog</a>.</em></p> <blockquote> <h1>Release 2.0.1 (2023-01-31)</h1> <ul> <li>Drop support for Python 3.7 and lower</li> <li>Fix deprecation warnings from Sphinx 6.1</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/81362a175cf405bc608fe57b303964a0a0cbc16b"><code>81362a1</code></a> Bump to 2.0.1 final</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/547e73d164a98a100650122f8bd577d4b0d1abb1"><code>547e73d</code></a> Update test matrix</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/390d3422b1df58cebdf101e3fa62971bebf8078d"><code>390d342</code></a> Update Tox commands and environment list</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/31f11e076c20ab7920649d33f037f4c07e9848d2"><code>31f11e0</code></a> Fix imports for Sphinx above 6.1</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/2981e43aeb209697fa26677e4adb50760311f729"><code>2981e43</code></a> Merge branch 'pyproject'</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/9003a00e9d6a52b4f1abc045789c952d6c455cec"><code>9003a00</code></a> Move tool configuration to individual files</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/7b2a66abcf5f8a564a97d184ee206316082c42a7"><code>7b2a66a</code></a> Drop support for Python 3.7 and below</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/c077df14171fec7d12e1c56f8a425e6aafc162fe"><code>c077df1</code></a> Use modern <code>pyproject</code> based packaging</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/ced637873c84e1fbb6e31f50dfdaa11e47362aad"><code>ced6378</code></a> git mv setup.py pyproject.toml</li> <li><a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/commit/7e05fea72537d9f40d781c38517cd059afd90dc0"><code>7e05fea</code></a> Merge pull request <a href="https://redirect.github.com/sphinx-doc/sphinxcontrib-htmlhelp/issues/16">#16</a> from tk0miya/9457_test</li> <li>Additional commits viewable in <a href="https://github.com/sphinx-doc/sphinxcontrib-htmlhelp/compare/2.0.0...2.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sphinxcontrib-htmlhelp&package-manager=pip&previous-version=2.0.0&new-version=2.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index eb013753530..4aa2ea37e3a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -225,7 +225,7 @@ sphinxcontrib-blockdiag==3.0.0 # via -r requirements/doc.in sphinxcontrib-devhelp==1.0.2 # via sphinx -sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-htmlhelp==2.0.1 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx From 4cdbf01d609db0c21da1cba03b1eb529e715bef2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:56:58 +0000 Subject: [PATCH 0403/1511] Bump exceptiongroup from 1.1.2 to 1.2.2 (#8861) Bumps [exceptiongroup](https://github.com/agronholm/exceptiongroup) from 1.1.2 to 1.2.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/agronholm/exceptiongroup/releases">exceptiongroup's releases</a>.</em></p> <blockquote> <h2>1.2.2</h2> <ul> <li>Removed an <code>assert</code> in <code>exceptiongroup._formatting</code> that caused compatibility issues with Sentry (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/123">#123</a>)</li> </ul> <h2>1.2.1</h2> <ul> <li>Updated the copying of <code>__notes__</code> to match CPython behavior (PR by CF Bolz-Tereick)</li> <li>Corrected the type annotation of the exception handler callback to accept a <code>BaseExceptionGroup</code> instead of <code>BaseException</code></li> <li>Fixed type errors on Python < 3.10 and the type annotation of <code>suppress()</code> (PR by John Litborn)</li> </ul> <h2>1.2.0</h2> <ul> <li>Added special monkeypatching if <a href="https://github.com/canonical/apport">Apport</a> has overridden <code>sys.excepthook</code> so it will format exception groups correctly (PR by John Litborn)</li> <li>Added a backport of <code>contextlib.suppress()</code> from Python 3.12.1 which also handles suppressing exceptions inside exception groups</li> <li>Fixed bare <code>raise</code> in a handler reraising the original naked exception rather than an exception group which is what is raised when you do a <code>raise</code> in an <code>except*</code> handler</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/agronholm/exceptiongroup/blob/main/CHANGES.rst">exceptiongroup's changelog</a>.</em></p> <blockquote> <h1>Version history</h1> <p>This library adheres to <code>Semantic Versioning 2.0 <http://semver.org/></code>_.</p> <p><strong>1.2.2</strong></p> <ul> <li>Removed an <code>assert</code> in <code>exceptiongroup._formatting</code> that caused compatibility issues with Sentry (<code>[#123](https://github.com/agronholm/exceptiongroup/issues/123) <https://github.com/agronholm/exceptiongroup/issues/123></code>_)</li> </ul> <p><strong>1.2.1</strong></p> <ul> <li>Updated the copying of <code>__notes__</code> to match CPython behavior (PR by CF Bolz-Tereick)</li> <li>Corrected the type annotation of the exception handler callback to accept a <code>BaseExceptionGroup</code> instead of <code>BaseException</code></li> <li>Fixed type errors on Python < 3.10 and the type annotation of <code>suppress()</code> (PR by John Litborn)</li> </ul> <p><strong>1.2.0</strong></p> <ul> <li>Added special monkeypatching if <code>Apport <https://github.com/canonical/apport></code>_ has overridden <code>sys.excepthook</code> so it will format exception groups correctly (PR by John Litborn)</li> <li>Added a backport of <code>contextlib.suppress()</code> from Python 3.12.1 which also handles suppressing exceptions inside exception groups</li> <li>Fixed bare <code>raise</code> in a handler reraising the original naked exception rather than an exception group which is what is raised when you do a <code>raise</code> in an <code>except*</code> handler</li> </ul> <p><strong>1.1.3</strong></p> <ul> <li><code>catch()</code> now raises a <code>TypeError</code> if passed an async exception handler instead of just giving a <code>RuntimeWarning</code> about the coroutine never being awaited. (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/66">#66</a>, PR by John Litborn)</li> <li>Fixed plain <code>raise</code> statement in an exception handler callback to work like a <code>raise</code> in an <code>except*</code> block</li> <li>Fixed new exception group not being chained to the original exception when raising an exception group from exceptions raised in handler callbacks</li> <li>Fixed type annotations of the <code>derive()</code>, <code>subgroup()</code> and <code>split()</code> methods to match the ones in typeshed</li> </ul> <p><strong>1.1.2</strong></p> <ul> <li>Changed handling of exceptions in exception group handler callbacks to not wrap a single exception in an exception group, as per <code>CPython issue 103590 <https://github.com/python/cpython/issues/103590></code>_</li> </ul> <p><strong>1.1.1</strong></p> <ul> <li>Worked around</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/agronholm/exceptiongroup/commit/2399d5474325017229c93e210df735bc1209ef6b"><code>2399d54</code></a> Added the release version</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/bec9651b6ed54f12e5fd0aa02d8e8652a45dc96b"><code>bec9651</code></a> Removed problematic assert that caused compatibility issues</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/f3f0ff6a362f760bc784ff57b625a602511cd018"><code>f3f0ff6</code></a> Updated Ruff configuration</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/bb43ee0f7293b8ce46a142f8f4bf5e728c7f1045"><code>bb43ee0</code></a> Fixed formatting tests failing on Python 3.13</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/eb8fbbcfc9b647241448ad87e3e6822fdfc549ad"><code>eb8fbbc</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/129">#129</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/6ff8300b5f6c53872007ff37d2a668cae85958e2"><code>6ff8300</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/128">#128</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/761933fdc3a8bfa718132bc4981cd285f5de2f4c"><code>761933f</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/127">#127</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/1b43294bb35e899880371b6dc9d1347057acb6d1"><code>1b43294</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/125">#125</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/dd8701802e22c281fd798984e8d37c4acce6a948"><code>dd87018</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/124">#124</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/54d8b8d9208bfc8f1e89c052e45e89328491ca06"><code>54d8b8d</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/121">#121</a>)</li> <li>Additional commits viewable in <a href="https://github.com/agronholm/exceptiongroup/compare/1.1.2...1.2.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=exceptiongroup&package-manager=pip&previous-version=1.1.2&new-version=1.2.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4aa2ea37e3a..15e2e39f670 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -68,7 +68,7 @@ distlib==0.3.8 # via virtualenv docutils==0.20.1 # via sphinx -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest filelock==3.15.4 # via virtualenv diff --git a/requirements/dev.txt b/requirements/dev.txt index b4ef0dfe74b..06a905e4b1e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ distlib==0.3.8 # via virtualenv docutils==0.20.1 # via sphinx -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest filelock==3.12.2 # via virtualenv diff --git a/requirements/lint.txt b/requirements/lint.txt index d0cf9e44248..6424a9c147a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -30,7 +30,7 @@ cryptography==43.0.0 # via trustme distlib==0.3.8 # via virtualenv -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest filelock==3.12.2 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index dfdcf8f7b1e..61497339cf8 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -36,7 +36,7 @@ coverage==7.6.1 # pytest-cov cryptography==43.0.0 # via trustme -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest freezegun==1.5.1 # via -r requirements/test.in From 17fa09f90e3409cf9f12f13f5c84b63446e72b0f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:58:50 +0000 Subject: [PATCH 0404/1511] Bump pycparser from 2.21 to 2.22 (#8862) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pycparser](https://github.com/eliben/pycparser) from 2.21 to 2.22. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/eliben/pycparser/releases">pycparser's releases</a>.</em></p> <blockquote> <h2>release_v2.22</h2> <h2>What's Changed</h2> <ul> <li>Add missing SCHAR limit defines by <a href="https://github.com/matamegger"><code>@​matamegger</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/449">eliben/pycparser#449</a></li> <li>Use proper SPDX identifier by <a href="https://github.com/Shortfinga"><code>@​Shortfinga</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/474">eliben/pycparser#474</a></li> <li>Add Python 3.11 as a supported version by <a href="https://github.com/erlend-aasland"><code>@​erlend-aasland</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/469">eliben/pycparser#469</a></li> <li>Fix multi-pragma/single statement blocks (<a href="https://redirect.github.com/eliben/pycparser/issues/479">#479</a>) by <a href="https://github.com/ldore"><code>@​ldore</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/480">eliben/pycparser#480</a></li> <li>Add an encoding parameter to parse_file by <a href="https://github.com/jordr"><code>@​jordr</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/486">eliben/pycparser#486</a></li> <li>Feature/add pragma support by <a href="https://github.com/jordr"><code>@​jordr</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/487">eliben/pycparser#487</a></li> <li>Set up permissions to ci.yml by <a href="https://github.com/joycebrum"><code>@​joycebrum</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/492">eliben/pycparser#492</a></li> <li>_build_tables: Invalidate cache before importing generated modules by <a href="https://github.com/mgorny"><code>@​mgorny</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/494">eliben/pycparser#494</a></li> <li>Upgrade GitHub Actions by <a href="https://github.com/cclauss"><code>@​cclauss</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/500">eliben/pycparser#500</a></li> <li>Create a Security Policy by <a href="https://github.com/joycebrum"><code>@​joycebrum</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/499">eliben/pycparser#499</a></li> <li>New example to generate AST from scratch by <a href="https://github.com/Andree37"><code>@​Andree37</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/507">eliben/pycparser#507</a></li> <li>Add support for Python 3.12 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/515">eliben/pycparser#515</a></li> <li>ply: Make generated lextab.py deterministic by <a href="https://github.com/jackrosenthal"><code>@​jackrosenthal</code></a> in <a href="https://redirect.github.com/eliben/pycparser/pull/531">eliben/pycparser#531</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/matamegger"><code>@​matamegger</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/449">eliben/pycparser#449</a></li> <li><a href="https://github.com/Shortfinga"><code>@​Shortfinga</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/474">eliben/pycparser#474</a></li> <li><a href="https://github.com/erlend-aasland"><code>@​erlend-aasland</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/469">eliben/pycparser#469</a></li> <li><a href="https://github.com/jordr"><code>@​jordr</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/486">eliben/pycparser#486</a></li> <li><a href="https://github.com/joycebrum"><code>@​joycebrum</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/492">eliben/pycparser#492</a></li> <li><a href="https://github.com/mgorny"><code>@​mgorny</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/494">eliben/pycparser#494</a></li> <li><a href="https://github.com/cclauss"><code>@​cclauss</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/500">eliben/pycparser#500</a></li> <li><a href="https://github.com/Andree37"><code>@​Andree37</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/507">eliben/pycparser#507</a></li> <li><a href="https://github.com/jackrosenthal"><code>@​jackrosenthal</code></a> made their first contribution in <a href="https://redirect.github.com/eliben/pycparser/pull/531">eliben/pycparser#531</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/eliben/pycparser/compare/release_v2.21...release_v2.22">https://github.com/eliben/pycparser/compare/release_v2.21...release_v2.22</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/eliben/pycparser/blob/main/CHANGES">pycparser's changelog</a>.</em></p> <blockquote> <ul> <li>Starting with version 2.22, please use the GitHub UI at <a href="https://github.com/eliben/pycparser/tags">https://github.com/eliben/pycparser/tags</a> to compare tags in order to find out what changed.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/eliben/pycparser/commit/129d32ef805d715d90a3b2035b13168c17ca63d2"><code>129d32e</code></a> Prepare for release 2.22</li> <li><a href="https://github.com/eliben/pycparser/commit/c3e2644cb26c41a24f387c713202a1a94e70eae6"><code>c3e2644</code></a> update CHANGES file for future changes</li> <li><a href="https://github.com/eliben/pycparser/commit/c500fb6de5a073a4bf6452c565abea69bd9932f2"><code>c500fb6</code></a> ply: Make generated lextab.py deterministic (<a href="https://redirect.github.com/eliben/pycparser/issues/531">#531</a>)</li> <li><a href="https://github.com/eliben/pycparser/commit/f7409953060f1f4d0f8988f1e131a49f84c95eba"><code>f740995</code></a> Add support for Python 3.12 (<a href="https://redirect.github.com/eliben/pycparser/issues/515">#515</a>)</li> <li><a href="https://github.com/eliben/pycparser/commit/6cf69df2ccd9af86a6ae1618a10d7136b30b5c49"><code>6cf69df</code></a> New example to generate AST from scratch (<a href="https://redirect.github.com/eliben/pycparser/issues/507">#507</a>)</li> <li><a href="https://github.com/eliben/pycparser/commit/50a26acb3674ff05d16ab174efbff3d2e549dbdb"><code>50a26ac</code></a> Remove unneeded import in an example</li> <li><a href="https://github.com/eliben/pycparser/commit/d86a9e58b78120c6c2fc61654eafc1d958ba8863"><code>d86a9e5</code></a> Remove from <strong>future</strong> imports from all files in this repo</li> <li><a href="https://github.com/eliben/pycparser/commit/a9f073e26922786d08592a7b92f4f56d436e3905"><code>a9f073e</code></a> Remove from <strong>future</strong> imports in examples</li> <li><a href="https://github.com/eliben/pycparser/commit/670979b89a1e84f9c4828a8a08e9187b90674afa"><code>670979b</code></a> Update SECURITY.md</li> <li><a href="https://github.com/eliben/pycparser/commit/9e8cd294f1527d918ec75927260d6ed3ab0331de"><code>9e8cd29</code></a> Create a Security Policy (<a href="https://redirect.github.com/eliben/pycparser/issues/499">#499</a>)</li> <li>Additional commits viewable in <a href="https://github.com/eliben/pycparser/compare/release_v2.21...release_v2.22">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycparser&package-manager=pip&previous-version=2.21&new-version=2.22)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 113be2767ec..50817d4db17 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -34,7 +34,7 @@ packaging==24.1 # via gunicorn pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 15e2e39f670..68cf03350d4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -150,7 +150,7 @@ proxy-py==2.4.7 # via -r requirements/test.in pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi pydantic==2.2.0 # via python-on-whales diff --git a/requirements/dev.txt b/requirements/dev.txt index 06a905e4b1e..d2e718798e8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -147,7 +147,7 @@ proxy-py==2.4.7 # via -r requirements/test.in pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi pydantic==2.2.0 # via python-on-whales diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4d1981d5e3b..a8c2eebe1c8 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -30,7 +30,7 @@ multidict==6.0.5 # yarl pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi yarl==1.9.4 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 61497339cf8..58d18c54f6f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -75,7 +75,7 @@ proxy-py==2.4.7 # via -r requirements/test.in pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi pydantic==2.2.0 # via python-on-whales From 95e32fb2e1b16ec4348c93e4c8d48fc9445466a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 12:59:06 +0000 Subject: [PATCH 0405/1511] Bump charset-normalizer from 3.2.0 to 3.3.2 (#8863) Bumps [charset-normalizer](https://github.com/Ousret/charset_normalizer) from 3.2.0 to 3.3.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Ousret/charset_normalizer/releases">charset-normalizer's releases</a>.</em></p> <blockquote> <h2>Version 3.3.2</h2> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2">3.3.2</a> (2023-10-31)</h2> <h3>Fixed</h3> <ul> <li>Unintentional memory usage regression when using large payloads that match several encodings (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/376">#376</a>)</li> <li>Regression on some detection cases showcased in the documentation (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/371">#371</a>)</li> </ul> <h3>Added</h3> <ul> <li>Noise (md) probe that identifies malformed Arabic representation due to the presence of letters in isolated form (credit to my wife, thanks!)</li> </ul> <h2>Version 3.3.1</h2> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1">3.3.1</a> (2023-10-22)</h2> <h3>Changed</h3> <ul> <li>Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8</li> <li>Improved the general detection reliability based on reports from the community</li> </ul> <h2>Release 3.3.0</h2> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0">3.3.0</a> (2023-09-30)</h2> <h3>Added</h3> <ul> <li>Allow to execute the CLI (e.g. normalizer) through <code>python -m charset_normalizer.cli</code> or <code>python -m charset_normalizer</code></li> <li>Support for 9 forgotten encodings that are supported by Python but unlisted in <code>encoding.aliases</code> as they have no alias (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/323">#323</a>)</li> </ul> <h3>Removed</h3> <ul> <li>(internal) Redundant utils.is_ascii function and unused function is_private_use_only</li> <li>(internal) charset_normalizer.assets is moved inside charset_normalizer.constant</li> </ul> <h3>Changed</h3> <ul> <li>(internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection</li> <li>Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8</li> </ul> <h3>Fixed</h3> <ul> <li>Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in __lt__ (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/350">#350</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md">charset-normalizer's changelog</a>.</em></p> <blockquote> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2">3.3.2</a> (2023-10-31)</h2> <h3>Fixed</h3> <ul> <li>Unintentional memory usage regression when using large payload that match several encoding (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/376">#376</a>)</li> <li>Regression on some detection case showcased in the documentation (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/371">#371</a>)</li> </ul> <h3>Added</h3> <ul> <li>Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)</li> </ul> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1">3.3.1</a> (2023-10-22)</h2> <h3>Changed</h3> <ul> <li>Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8</li> <li>Improved the general detection reliability based on reports from the community</li> </ul> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0">3.3.0</a> (2023-09-30)</h2> <h3>Added</h3> <ul> <li>Allow to execute the CLI (e.g. normalizer) through <code>python -m charset_normalizer.cli</code> or <code>python -m charset_normalizer</code></li> <li>Support for 9 forgotten encoding that are supported by Python but unlisted in <code>encoding.aliases</code> as they have no alias (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/323">#323</a>)</li> </ul> <h3>Removed</h3> <ul> <li>(internal) Redundant utils.is_ascii function and unused function is_private_use_only</li> <li>(internal) charset_normalizer.assets is moved inside charset_normalizer.constant</li> </ul> <h3>Changed</h3> <ul> <li>(internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection</li> <li>Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8</li> </ul> <h3>Fixed</h3> <ul> <li>Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in __lt__ (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/350">#350</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jawah/charset_normalizer/commit/79dce4857914fead2ffe55eb787cad6d5cf14643"><code>79dce48</code></a> :bug: Regression on some detection case showcased in the documentation (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/371">#371</a>)...</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/a4b9b012b0370f216b9bb3b8649ebcd0a3309870"><code>a4b9b01</code></a> Bump github/codeql-action from 2.22.4 to 2.22.5 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/375">#375</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/dcc01cc2e760699a63a0521f425b85e5dd21e6c2"><code>dcc01cc</code></a> Bump ossf/scorecard-action from 2.3.0 to 2.3.1 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/374">#374</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/9cd402ca25b56cc8b205c66e2ecfc9e50f069816"><code>9cd402c</code></a> Bump pytest from 7.4.2 to 7.4.3 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/373">#373</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/e274dcc348f77d6687466cc893818ba03117c484"><code>e274dcc</code></a> :bug: Fix unintentional memory usage regression when using large payload that...</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/07f30414f2c2210b45ec15c805d4eb84b93348cd"><code>07f3041</code></a> :arrow_up: Bump github/codeql-action from 2.22.3 to 2.22.4 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/370">#370</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/52086441844a216040e79201eceaf9ca787ffffc"><code>5208644</code></a> :bookmark: Release 3.3.1 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/367">#367</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/66966f1d7911a6ed29bc5d6dd927aa1fd5fac8a1"><code>66966f1</code></a> :sparkle: Improve the detection around some cases (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/366">#366</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/49653a69d8ba6e20f62b557794e9ee3b57370afb"><code>49653a6</code></a> :arrow_up: Bump actions/setup-python from 4.7.0 to 4.7.1 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/359">#359</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/f6a66ed76f703aa081e97bea80dc34e47ae7e886"><code>f6a66ed</code></a> :arrow_up: Bump pypa/cibuildwheel from 2.16.0 to 2.16.2 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/361">#361</a>)</li> <li>Additional commits viewable in <a href="https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=charset-normalizer&package-manager=pip&previous-version=3.2.0&new-version=3.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 68cf03350d4..2ad79ce2686 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -42,7 +42,7 @@ cffi==1.17.0 # pycares cfgv==3.4.0 # via pre-commit -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d2e718798e8..41f343a7adc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -42,7 +42,7 @@ cffi==1.17.0 # pycares cfgv==3.4.0 # via pre-commit -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 977da96e6e2..ebab702f1b1 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -14,7 +14,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag certifi==2024.7.4 # via requests -charset-normalizer==3.3.1 +charset-normalizer==3.3.2 # via requests click==8.1.6 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 371034a8849..efa46279ff5 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -14,7 +14,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag certifi==2024.7.4 # via requests -charset-normalizer==3.3.1 +charset-normalizer==3.3.2 # via requests click==8.1.6 # via towncrier diff --git a/requirements/test.txt b/requirements/test.txt index 58d18c54f6f..05903cfa5a4 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -24,7 +24,7 @@ cffi==1.17.0 # via # cryptography # pycares -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests click==8.1.6 # via From 7173b33b6026c933d001c3d12d3e8464c6b3812b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 13:25:38 +0000 Subject: [PATCH 0406/1511] Bump pydantic from 2.2.0 to 2.8.2 (#8864) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.2.0 to 2.8.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.8.2 (2024-07-03)</h2> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Fix issue with assertion caused by pluggable schema validator by <a href="https://github.com/dmontagu"><code>@​dmontagu</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9838">#9838</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.8.1...v2.8.2">https://github.com/pydantic/pydantic/compare/v2.8.1...v2.8.2</a></p> <h2>v2.8.1 (2024-07-03)</h2> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Bump <code>ruff</code> to <code>v0.5.0</code> and <code>pyright</code> to <code>v1.1.369</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9801">#9801</a></li> <li>Bump <code>pydantic-core</code> to <code>v2.20.1</code>, <code>pydantic-extra-types</code> to <code>v2.9.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9832">#9832</a></li> </ul> <h4>Fixes</h4> <ul> <li>Fix breaking change in <code>to_snake</code> from v2.7 -> v2.8 by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9812">#9812</a></li> <li>Fix list constraint json schema application by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9818">#9818</a></li> <li>Support time duration more than 23 by <a href="https://github.com/nix010"><code>@​nix010</code></a> in <a href="https://redirect.github.com/pydantic/speedate/pull/64">pydantic/speedate#64</a></li> <li>Fix millisecond fraction being handled with the wrong scale by <a href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in <a href="https://redirect.github.com/pydantic/speedate/pull/65">pydantic/speedate#65</a></li> <li>Handle negative fractional durations correctly by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/speedate/pull/71">pydantic/speedate#71</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/kwint"><code>@​kwint</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/9787">pydantic/pydantic#9787</a></li> <li><a href="https://github.com/seekinginfiniteloop"><code>@​seekinginfiniteloop</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/9822">pydantic/pydantic#9822</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.8.0...v2.8.1">https://github.com/pydantic/pydantic/compare/v2.8.0...v2.8.1</a></p> <h2>v2.8.0 (2024-07-01)</h2> <p>The code released in <code>v2.8.0</code> is functionally identical to that of <code>v2.8.0b1</code>.</p> <p>Check out our <a href="https://pydantic.dev/articles/pydantic-v2-8-release">blog post</a> to learn more about the release highlights!</p> <h2>What's Changed</h2> <h3>Packaging</h3> <ul> <li>Update citation version automatically with new releases by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9673">#9673</a></li> <li>Bump pyright to <code>v1.1.367</code> and add type checking tests for pipeline API by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9674">#9674</a></li> <li>Update <code>pydantic.v1</code> stub to <code>v1.10.17</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9707">#9707</a></li> <li>General package updates to prep for <code>v2.8.0b1</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9741">#9741</a></li> <li>Bump <code>pydantic-core</code> to <code>v2.20.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9745">#9745</a></li> <li>Add support for Python 3.13 by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9743">#9743</a></li> <li>Update <code>pdm</code> version used for <code>pdm.lock</code> to <code>v2.16.1</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9761">#9761</a></li> <li>Update to <code>ruff</code> <code>v0.4.8</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9585">#9585</a></li> </ul> <h3>New Features</h3> <ul> <li>Experimental: support <code>defer_build</code> for <code>TypeAdapter</code> by <a href="https://github.com/MarkusSintonen"><code>@​MarkusSintonen</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/8939">#8939</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.8.2 (2024-07-03)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.8.2">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Fix issue with assertion caused by pluggable schema validator by <a href="https://github.com/dmontagu"><code>@​dmontagu</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9838">#9838</a></li> </ul> <h2>v2.8.1 (2024-07-03)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.8.1">GitHub release</a></p> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Bump <code>ruff</code> to <code>v0.5.0</code> and <code>pyright</code> to <code>v1.1.369</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9801">#9801</a></li> <li>Bump <code>pydantic-core</code> to <code>v2.20.1</code>, <code>pydantic-extra-types</code> to <code>v2.9.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9832">#9832</a></li> </ul> <h4>Fixes</h4> <ul> <li>Fix breaking change in <code>to_snake</code> from v2.7 -> v2.8 by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9812">#9812</a></li> <li>Fix list constraint json schema application by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9818">#9818</a></li> <li>Support time duration more than 23 by <a href="https://github.com/nix010"><code>@​nix010</code></a> in <a href="https://redirect.github.com/pydantic/speedate/pull/64">pydantic/speedate#64</a></li> <li>Fix millisecond fraction being handled with the wrong scale by <a href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in <a href="https://redirect.github.com/pydantic/speedate/pull/65">pydantic/speedate#65</a></li> <li>Handle negative fractional durations correctly by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/speedate/pull/71">pydantic/speedate#71</a></li> </ul> <h2>v2.8.0 (2024-07-01)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.8.0">GitHub release</a></p> <p>The code released in v2.8.0 is functionally identical to that of v2.8.0b1.</p> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Update citation version automatically with new releases by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9673">#9673</a></li> <li>Bump pyright to <code>v1.1.367</code> and add type checking tests for pipeline API by <a href="https://github.com/adriangb"><code>@​adriangb</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9674">#9674</a></li> <li>Update <code>pydantic.v1</code> stub to <code>v1.10.17</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9707">#9707</a></li> <li>General package updates to prep for <code>v2.8.0b1</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9741">#9741</a></li> <li>Bump <code>pydantic-core</code> to <code>v2.20.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9745">#9745</a></li> <li>Add support for Python 3.13 by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9743">#9743</a></li> <li>Update <code>pdm</code> version used for <code>pdm.lock</code> to v2.16.1 by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9761">#9761</a></li> <li>Update to <code>ruff</code> <code>v0.4.8</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9585">#9585</a></li> </ul> <h4>New Features</h4> <ul> <li>Experimental: support <code>defer_build</code> for <code>TypeAdapter</code> by <a href="https://github.com/MarkusSintonen"><code>@​MarkusSintonen</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/8939">#8939</a></li> <li>Implement <code>deprecated</code> field in json schema by <a href="https://github.com/NeevCohen"><code>@​NeevCohen</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9298">#9298</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/4978ee235bf5f0fa11159b2dfe46068ef3deba0a"><code>4978ee2</code></a> update history</li> <li><a href="https://github.com/pydantic/pydantic/commit/03459290582af49a7dd09464a584fe315438c3fc"><code>0345929</code></a> v bump</li> <li><a href="https://github.com/pydantic/pydantic/commit/d390a0413f1317c129bb79e46a57397e2ef32cd1"><code>d390a04</code></a> Fix issue with assertion caused by pluggable schema validator (<a href="https://redirect.github.com/pydantic/pydantic/issues/9838">#9838</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/040865f3b8b9b201282826f92f552efa238b1f2e"><code>040865f</code></a> update history</li> <li><a href="https://github.com/pydantic/pydantic/commit/5a33e3b237285b721a42a1816ea1e67a4c6ecb75"><code>5a33e3b</code></a> bump version</li> <li><a href="https://github.com/pydantic/pydantic/commit/2f9abb29bf15981e9cab0037808808c860d9afbc"><code>2f9abb2</code></a> Bump <code>pydantic-core</code> to <code>v2.20.1</code>, <code>pydantic-extra-types</code> to <code>v2.9.0</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/9832">#9832</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/ce9c5f78b580755945a323628cdd9f673325133d"><code>ce9c5f7</code></a> Remove spooky meetings file (<a href="https://redirect.github.com/pydantic/pydantic/issues/9824">#9824</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/6bdd6d11440a43b0acff5d7d0e32f5f6ca8d9876"><code>6bdd6d1</code></a> Pedantic typo correction within explanation of Pydantic's root in 'pedantic' ...</li> <li><a href="https://github.com/pydantic/pydantic/commit/701ccdecec02da2b23f216cd6b2e33911b81bbc9"><code>701ccde</code></a> Fix list constraint json schema application (<a href="https://redirect.github.com/pydantic/pydantic/issues/9818">#9818</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/2a066a26f7515fb99498ad9f5fcaba9c6abcb41e"><code>2a066a2</code></a> Bump <code>ruff</code> to <code>v0.5.0</code> and <code>pyright</code> to <code>v1.1.369</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/9801">#9801</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pydantic/pydantic/compare/v2.2.0...v2.8.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.2.0&new-version=2.8.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2ad79ce2686..9389b1a0701 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -152,9 +152,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.2.0 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.20.1 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index 41f343a7adc..4c9b1c43f09 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,9 +149,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.2.0 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.20.1 # via pydantic pygments==2.18.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 6424a9c147a..5cfc111450b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -66,9 +66,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.7.1 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic pygments==2.18.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 05903cfa5a4..4886cbb15e6 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,9 +77,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.2.0 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.20.1 # via pydantic pygments==2.18.0 # via rich From a74e4f0ad3824b4808da9816d87cd4637e6eabc5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 13:35:51 +0000 Subject: [PATCH 0407/1511] Bump filelock from 3.12.2 to 3.15.4 (#8867) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.12.2 to 3.15.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/py-filelock/releases">filelock's releases</a>.</em></p> <blockquote> <h2>3.15.4</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Pass <code>file_lock</code> as positional argument by <a href="https://github.com/kwist-sgr"><code>@​kwist-sgr</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/347">tox-dev/filelock#347</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.15.3...3.15.4">https://github.com/tox-dev/filelock/compare/3.15.3...3.15.4</a></p> <h2>3.15.3</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Add test for virtualenv stability by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/344">tox-dev/filelock#344</a></li> <li>Fix <code>TypeError: _CountedFileLock.__init__() got an unexpected keyword argument 'timeout'</code> by <a href="https://github.com/kwist-sgr"><code>@​kwist-sgr</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/345">tox-dev/filelock#345</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.15.2...3.15.3">https://github.com/tox-dev/filelock/compare/3.15.2...3.15.3</a></p> <h2>3.15.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Use a metaclass to implement the singleton pattern by <a href="https://github.com/kwist-sgr"><code>@​kwist-sgr</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/340">tox-dev/filelock#340</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/kwist-sgr"><code>@​kwist-sgr</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/filelock/pull/340">tox-dev/filelock#340</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.15.1...3.15.2">https://github.com/tox-dev/filelock/compare/3.15.1...3.15.2</a></p> <h2>3.15.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Hotfix: Restore <strong>init</strong> method; more robust initialization for singleton locks by <a href="https://github.com/ethanbb"><code>@​ethanbb</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/338">tox-dev/filelock#338</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.15.0...3.15.1">https://github.com/tox-dev/filelock/compare/3.15.0...3.15.1</a></p> <h2>3.15.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>asyncio support by <a href="https://github.com/Ovizro"><code>@​Ovizro</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/332">tox-dev/filelock#332</a></li> <li>Don't initialize BaseFileLock when just returning existing instance by <a href="https://github.com/ethanbb"><code>@​ethanbb</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/334">tox-dev/filelock#334</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/Ovizro"><code>@​Ovizro</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/filelock/pull/332">tox-dev/filelock#332</a></li> <li><a href="https://github.com/ethanbb"><code>@​ethanbb</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/filelock/pull/334">tox-dev/filelock#334</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/filelock/commit/9a979dfe68fab75517c0612952ea401a232055a0"><code>9a979df</code></a> Pass file_lock as positional argument (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/347">#347</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/3a79343711eef57b864670dc2cafd352ed749d9b"><code>3a79343</code></a> Fix `TypeError: _CountedFileLock.<strong>init</strong>() got an unexpected keyword argumen...</li> <li><a href="https://github.com/tox-dev/filelock/commit/81d4cf9e9ea98cefdf9f380b024d627dffb28efb"><code>81d4cf9</code></a> Add test for virtualenv stability (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/344">#344</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/192f1efcb95924010d785c3f726433273f6ed114"><code>192f1ef</code></a> Use a metaclass to implement the singleton pattern (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/340">#340</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/48788c5b09b57ac121566af4aedd6d312afd1ce8"><code>48788c5</code></a> Bump pypa/gh-action-pypi-publish from 1.8.14 to 1.9.0 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/341">#341</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/7bd3b7b8067cbaaee0d2fc4f6a8de2363eb1e915"><code>7bd3b7b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/342">#342</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/3d7b1a6276144bdbad0086067feb36e1c1a8d2f7"><code>3d7b1a6</code></a> Hotfix: Restore <strong>init</strong> method; more robust initialization for singleton loc...</li> <li><a href="https://github.com/tox-dev/filelock/commit/c64787f2a50d7dbc9882f4ac9401c2a2e57cd8aa"><code>c64787f</code></a> Don't initialize BaseFileLock when just returning existing instance (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/334">#334</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/87453f3dbed64821b164ba2dbd586eae98ff0dc8"><code>87453f3</code></a> asyncio support (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/332">#332</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/0ee2e3ca5f3ee2f35928c8129b5aff9f4b2ff1e5"><code>0ee2e3c</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/333">#333</a>)</li> <li>Additional commits viewable in <a href="https://github.com/tox-dev/py-filelock/compare/3.12.2...3.15.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.12.2&new-version=3.15.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 4c9b1c43f09..3f408533ec4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -68,7 +68,7 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest -filelock==3.12.2 +filelock==3.15.4 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 5cfc111450b..94091021093 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -32,7 +32,7 @@ distlib==0.3.8 # via virtualenv exceptiongroup==1.2.2 # via pytest -filelock==3.12.2 +filelock==3.15.4 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in From 3b3156e0ff6fd87cdd4c6f737310ba18a56371e8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 14:59:30 +0000 Subject: [PATCH 0408/1511] [PR #8846/4dd8c807 backport][3.11] Format status lines with f-strings (#8869) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 5 ++--- aiohttp/web_response.py | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index bea76d84c39..0e67607f5d9 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -727,9 +727,8 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.headers[hdrs.CONNECTION] = connection # status + headers - status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( - self.method, path, v=self.version - ) + v = self.version + status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) coro = self.write_bytes(writer, conn) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 78d3fe32949..ae22d587ab4 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -500,9 +500,7 @@ async def _write_headers(self) -> None: assert writer is not None # status line version = request.version - status_line = "HTTP/{}.{} {} {}".format( - version[0], version[1], self._status, self._reason - ) + status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) async def write(self, data: bytes) -> None: From 5948290685c8a0fb460b3e9d021ef2f5057a8587 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 Aug 2024 15:45:53 +0000 Subject: [PATCH 0409/1511] [PR #8846/4dd8c807 backport][3.10] Format status lines with f-strings (#8868) **This is a backport of PR #8846 as merged into master (4dd8c807774aee39f9d6fbe16f1ed11388aa9498).** <!-- Thank you for your contribution! --> ## What do these changes do? Format status lines with f-strings ## Are there changes in behavior for the user? no, maybe a tiny performance improvement ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 5 ++--- aiohttp/web_response.py | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index bea76d84c39..0e67607f5d9 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -727,9 +727,8 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.headers[hdrs.CONNECTION] = connection # status + headers - status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( - self.method, path, v=self.version - ) + v = self.version + status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) coro = self.write_bytes(writer, conn) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 78d3fe32949..ae22d587ab4 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -500,9 +500,7 @@ async def _write_headers(self) -> None: assert writer is not None # status line version = request.version - status_line = "HTTP/{}.{} {} {}".format( - version[0], version[1], self._status, self._reason - ) + status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) async def write(self, data: bytes) -> None: From b4092d96a1350d9d18bc2db787fdff8f784dbb3f Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 23 Aug 2024 16:46:01 +0100 Subject: [PATCH 0410/1511] No default Content-Type when no content (#8858) (#8871) (cherry picked from commit 26772ad320c1ee0efa5b91ae35ae5860a37cd709) --- CHANGES/8858.bugfix.rst | 1 + aiohttp/helpers.py | 6 ++++-- aiohttp/web_request.py | 6 +++--- aiohttp/web_response.py | 3 ++- tests/test_client_functional.py | 2 -- tests/test_web_functional.py | 15 +++++++++++++++ tests/test_web_response.py | 2 -- 7 files changed, 25 insertions(+), 10 deletions(-) create mode 100644 CHANGES/8858.bugfix.rst diff --git a/CHANGES/8858.bugfix.rst b/CHANGES/8858.bugfix.rst new file mode 100644 index 00000000000..e4efa91a2fd --- /dev/null +++ b/CHANGES/8858.bugfix.rst @@ -0,0 +1 @@ +Stopped adding a default Content-Type header when response has no content -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index ccfa9d5e2fe..f759bddc099 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -762,7 +762,8 @@ def content_type(self) -> str: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_type # type: ignore[return-value] + assert self._content_type is not None + return self._content_type @property def charset(self) -> Optional[str]: @@ -770,7 +771,8 @@ def charset(self) -> Optional[str]: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore[union-attr] + assert self._content_dict is not None + return self._content_dict.get("charset") @property def content_length(self) -> Optional[int]: diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a485f0dcea6..a63d3074ea5 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -79,7 +79,7 @@ class FileField: filename: str file: io.BufferedReader content_type: str - headers: "CIMultiDictProxy[str]" + headers: CIMultiDictProxy[str] _TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" @@ -169,7 +169,7 @@ def __init__( self._payload_writer = payload_writer self._payload = payload - self._headers = message.headers + self._headers: CIMultiDictProxy[str] = message.headers self._method = message.method self._version = message.version self._cache: Dict[str, Any] = {} @@ -493,7 +493,7 @@ def query_string(self) -> str: return self._rel_url.query_string @reify - def headers(self) -> "MultiMapping[str]": + def headers(self) -> CIMultiDictProxy[str]: """A case-insensitive multidict proxy with all headers.""" return self._headers diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index ae22d587ab4..95028a929ff 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -479,7 +479,8 @@ async def _prepare_headers(self) -> None: # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 if hdrs.TRANSFER_ENCODING in headers: del headers[hdrs.TRANSFER_ENCODING] - else: + elif self.content_length != 0: + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 872876d4a32..566c47522ce 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -727,7 +727,6 @@ async def handler(request): raw_headers = tuple((bytes(h), bytes(v)) for h, v in resp.raw_headers) assert raw_headers == ( (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) @@ -760,7 +759,6 @@ async def handler(request): assert raw_headers == ( (b"X-Empty", b""), (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index ee61537068b..96a4f82ba9f 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -148,6 +148,21 @@ async def handler(request): assert resp.headers["Content-Length"] == "4" +@pytest.mark.parametrize("status", (201, 204, 404)) +async def test_default_content_type_no_body(aiohttp_client: Any, status: int) -> None: + async def handler(request): + return web.Response(status=status) + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == status + assert await resp.read() == b"" + assert "Content-Type" not in resp.headers + + async def test_response_before_complete(aiohttp_client: Any) -> None: async def handler(request): return web.Response(body=b"OK") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index d1b407c090c..ad1286ca91e 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1139,7 +1139,6 @@ async def test_send_headers_for_empty_body(buf, writer) -> None: Matches( "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) @@ -1182,7 +1181,6 @@ async def test_send_set_cookie_header(buf, writer) -> None: "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" "Set-Cookie: name=value\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) From f09f88d1eb7a089d83cb2426811a45256cbc8410 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 23 Aug 2024 16:46:17 +0100 Subject: [PATCH 0411/1511] No default Content-Type when no content (#8858) (#8872) (cherry picked from commit 26772ad320c1ee0efa5b91ae35ae5860a37cd709) --- CHANGES/8858.bugfix.rst | 1 + aiohttp/helpers.py | 6 ++++-- aiohttp/web_request.py | 6 +++--- aiohttp/web_response.py | 3 ++- tests/test_client_functional.py | 2 -- tests/test_web_functional.py | 15 +++++++++++++++ tests/test_web_response.py | 2 -- 7 files changed, 25 insertions(+), 10 deletions(-) create mode 100644 CHANGES/8858.bugfix.rst diff --git a/CHANGES/8858.bugfix.rst b/CHANGES/8858.bugfix.rst new file mode 100644 index 00000000000..e4efa91a2fd --- /dev/null +++ b/CHANGES/8858.bugfix.rst @@ -0,0 +1 @@ +Stopped adding a default Content-Type header when response has no content -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index ccfa9d5e2fe..f759bddc099 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -762,7 +762,8 @@ def content_type(self) -> str: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_type # type: ignore[return-value] + assert self._content_type is not None + return self._content_type @property def charset(self) -> Optional[str]: @@ -770,7 +771,8 @@ def charset(self) -> Optional[str]: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore[union-attr] + assert self._content_dict is not None + return self._content_dict.get("charset") @property def content_length(self) -> Optional[int]: diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a485f0dcea6..a63d3074ea5 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -79,7 +79,7 @@ class FileField: filename: str file: io.BufferedReader content_type: str - headers: "CIMultiDictProxy[str]" + headers: CIMultiDictProxy[str] _TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" @@ -169,7 +169,7 @@ def __init__( self._payload_writer = payload_writer self._payload = payload - self._headers = message.headers + self._headers: CIMultiDictProxy[str] = message.headers self._method = message.method self._version = message.version self._cache: Dict[str, Any] = {} @@ -493,7 +493,7 @@ def query_string(self) -> str: return self._rel_url.query_string @reify - def headers(self) -> "MultiMapping[str]": + def headers(self) -> CIMultiDictProxy[str]: """A case-insensitive multidict proxy with all headers.""" return self._headers diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index ae22d587ab4..95028a929ff 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -479,7 +479,8 @@ async def _prepare_headers(self) -> None: # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 if hdrs.TRANSFER_ENCODING in headers: del headers[hdrs.TRANSFER_ENCODING] - else: + elif self.content_length != 0: + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 872876d4a32..566c47522ce 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -727,7 +727,6 @@ async def handler(request): raw_headers = tuple((bytes(h), bytes(v)) for h, v in resp.raw_headers) assert raw_headers == ( (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) @@ -760,7 +759,6 @@ async def handler(request): assert raw_headers == ( (b"X-Empty", b""), (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index ee61537068b..96a4f82ba9f 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -148,6 +148,21 @@ async def handler(request): assert resp.headers["Content-Length"] == "4" +@pytest.mark.parametrize("status", (201, 204, 404)) +async def test_default_content_type_no_body(aiohttp_client: Any, status: int) -> None: + async def handler(request): + return web.Response(status=status) + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == status + assert await resp.read() == b"" + assert "Content-Type" not in resp.headers + + async def test_response_before_complete(aiohttp_client: Any) -> None: async def handler(request): return web.Response(body=b"OK") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index d1b407c090c..ad1286ca91e 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1139,7 +1139,6 @@ async def test_send_headers_for_empty_body(buf, writer) -> None: Matches( "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) @@ -1182,7 +1181,6 @@ async def test_send_set_cookie_header(buf, writer) -> None: "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" "Set-Cookie: name=value\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) From 8fd19cb9f6c42989c540852ccf2fd89efa297ddd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 10:44:00 +0000 Subject: [PATCH 0412/1511] Bump typer from 0.12.4 to 0.12.5 (#8879) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [typer](https://github.com/fastapi/typer) from 0.12.4 to 0.12.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/fastapi/typer/releases">typer's releases</a>.</em></p> <blockquote> <h2>0.12.5</h2> <h3>Features</h3> <ul> <li>💄 Unify the width of the Rich console for help and errors. PR <a href="https://redirect.github.com/fastapi/typer/pull/788">#788</a> by <a href="https://github.com/racinmat"><code>@​racinmat</code></a>.</li> <li>🚸 Improve assertion error message if a group is not a valid subclass. PR <a href="https://redirect.github.com/fastapi/typer/pull/425">#425</a> by <a href="https://github.com/chrisburr"><code>@​chrisburr</code></a>.</li> </ul> <h3>Fixes</h3> <ul> <li>🐛 Ensure <code>rich_markup_mode=None</code> disables Rich formatting. PR <a href="https://redirect.github.com/fastapi/typer/pull/859">#859</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>🐛 Fix sourcing of completion path for Git Bash. PR <a href="https://redirect.github.com/fastapi/typer/pull/801">#801</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>🐛 Fix PowerShell completion with incomplete word. PR <a href="https://redirect.github.com/fastapi/typer/pull/360">#360</a> by <a href="https://github.com/patricksurry"><code>@​patricksurry</code></a>.</li> </ul> <h3>Refactors</h3> <ul> <li>🔥 Remove Python 3.6 specific code paths. PR <a href="https://redirect.github.com/fastapi/typer/pull/850">#850</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>🔥 Clean up redundant code. PR <a href="https://redirect.github.com/fastapi/typer/pull/858">#858</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> </ul> <h3>Docs</h3> <ul> <li>♻️ Use F-strings in Click examples in docs. PR <a href="https://redirect.github.com/fastapi/typer/pull/891">#891</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>📝Add missing <code>main.py</code> in tutorial on CLI option names. PR <a href="https://redirect.github.com/fastapi/typer/pull/868">#868</a> by <a href="https://github.com/fsramalho"><code>@​fsramalho</code></a>.</li> <li>📝 Fix broken link. PR <a href="https://redirect.github.com/fastapi/typer/pull/835">#835</a> by <a href="https://github.com/OhioDschungel6"><code>@​OhioDschungel6</code></a>.</li> <li>📝 Update package docs with the latest versions of Typer and Poetry. PR <a href="https://redirect.github.com/fastapi/typer/pull/781">#781</a> by <a href="https://github.com/kinuax"><code>@​kinuax</code></a>.</li> <li>📝 Update the Progress Bar tutorial with correct output. PR <a href="https://redirect.github.com/fastapi/typer/pull/199">#199</a> by <a href="https://github.com/n1ckdm"><code>@​n1ckdm</code></a>.</li> <li>📝 Add docs and scripts to test completion in different shells. PR <a href="https://redirect.github.com/fastapi/typer/pull/953">#953</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>✏️ Fix a typo in <code>docs/virtual-environments.md</code>. PR <a href="https://redirect.github.com/fastapi/typer/pull/952">#952</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>✏️ Fix typo in <code>docs/contributing.md</code>. PR <a href="https://redirect.github.com/fastapi/typer/pull/947">#947</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>📝 Add docs for virtual environments, environment variables, and update contributing. PR <a href="https://redirect.github.com/fastapi/typer/pull/946">#946</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> </ul> <h3>Internal</h3> <ul> <li>🔨 Pre-install dependencies in Docker so that testing in Docker is faster. PR <a href="https://redirect.github.com/fastapi/typer/pull/954">#954</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>✅ Add <code>needs_bash</code> test fixture. PR <a href="https://redirect.github.com/fastapi/typer/pull/888">#888</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>⬆ Bump mkdocs-material from 9.5.18 to 9.5.33. PR <a href="https://redirect.github.com/fastapi/typer/pull/945">#945</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Bump pillow from 10.3.0 to 10.4.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/939">#939</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>👷 Fix issue-manager. PR <a href="https://redirect.github.com/fastapi/typer/pull/948">#948</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🙈 Remove extra line in .gitignore. PR <a href="https://redirect.github.com/fastapi/typer/pull/936">#936</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>⬆ Update pytest-cov requirement from <!-- raw HTML omitted -->=2.10.0,<6.0.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/844">#844</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Bump pypa/gh-action-pypi-publish from 1.8.11 to 1.9.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/865">#865</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Update pytest requirement from <!-- raw HTML omitted -->=4.4.0,<9.0.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/915">#915</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Update pytest-sugar requirement from <!-- raw HTML omitted -->=0.9.4,<1.1.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/841">#841</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/fastapi/typer/blob/master/docs/release-notes.md">typer's changelog</a>.</em></p> <blockquote> <h2>0.12.5</h2> <h3>Features</h3> <ul> <li>💄 Unify the width of the Rich console for help and errors. PR <a href="https://redirect.github.com/fastapi/typer/pull/788">#788</a> by <a href="https://github.com/racinmat"><code>@​racinmat</code></a>.</li> <li>🚸 Improve assertion error message if a group is not a valid subclass. PR <a href="https://redirect.github.com/fastapi/typer/pull/425">#425</a> by <a href="https://github.com/chrisburr"><code>@​chrisburr</code></a>.</li> </ul> <h3>Fixes</h3> <ul> <li>🐛 Ensure <code>rich_markup_mode=None</code> disables Rich formatting. PR <a href="https://redirect.github.com/fastapi/typer/pull/859">#859</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>🐛 Fix sourcing of completion path for Git Bash. PR <a href="https://redirect.github.com/fastapi/typer/pull/801">#801</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>🐛 Fix PowerShell completion with incomplete word. PR <a href="https://redirect.github.com/fastapi/typer/pull/360">#360</a> by <a href="https://github.com/patricksurry"><code>@​patricksurry</code></a>.</li> </ul> <h3>Refactors</h3> <ul> <li>🔥 Remove Python 3.6 specific code paths. PR <a href="https://redirect.github.com/fastapi/typer/pull/850">#850</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>🔥 Clean up redundant code. PR <a href="https://redirect.github.com/fastapi/typer/pull/858">#858</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> </ul> <h3>Docs</h3> <ul> <li>♻️ Use F-strings in Click examples in docs. PR <a href="https://redirect.github.com/fastapi/typer/pull/891">#891</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>📝Add missing <code>main.py</code> in tutorial on CLI option names. PR <a href="https://redirect.github.com/fastapi/typer/pull/868">#868</a> by <a href="https://github.com/fsramalho"><code>@​fsramalho</code></a>.</li> <li>📝 Fix broken link. PR <a href="https://redirect.github.com/fastapi/typer/pull/835">#835</a> by <a href="https://github.com/OhioDschungel6"><code>@​OhioDschungel6</code></a>.</li> <li>📝 Update package docs with the latest versions of Typer and Poetry. PR <a href="https://redirect.github.com/fastapi/typer/pull/781">#781</a> by <a href="https://github.com/kinuax"><code>@​kinuax</code></a>.</li> <li>📝 Update the Progress Bar tutorial with correct output. PR <a href="https://redirect.github.com/fastapi/typer/pull/199">#199</a> by <a href="https://github.com/n1ckdm"><code>@​n1ckdm</code></a>.</li> <li>📝 Add docs and scripts to test completion in different shells. PR <a href="https://redirect.github.com/fastapi/typer/pull/953">#953</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>✏️ Fix a typo in <code>docs/virtual-environments.md</code>. PR <a href="https://redirect.github.com/fastapi/typer/pull/952">#952</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>✏️ Fix typo in <code>docs/contributing.md</code>. PR <a href="https://redirect.github.com/fastapi/typer/pull/947">#947</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>📝 Add docs for virtual environments, environment variables, and update contributing. PR <a href="https://redirect.github.com/fastapi/typer/pull/946">#946</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> </ul> <h3>Internal</h3> <ul> <li>🔨 Pre-install dependencies in Docker so that testing in Docker is faster. PR <a href="https://redirect.github.com/fastapi/typer/pull/954">#954</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>✅ Add <code>needs_bash</code> test fixture. PR <a href="https://redirect.github.com/fastapi/typer/pull/888">#888</a> by <a href="https://github.com/svlandeg"><code>@​svlandeg</code></a>.</li> <li>⬆ Bump mkdocs-material from 9.5.18 to 9.5.33. PR <a href="https://redirect.github.com/fastapi/typer/pull/945">#945</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Bump pillow from 10.3.0 to 10.4.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/939">#939</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>👷 Fix issue-manager. PR <a href="https://redirect.github.com/fastapi/typer/pull/948">#948</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>🙈 Remove extra line in .gitignore. PR <a href="https://redirect.github.com/fastapi/typer/pull/936">#936</a> by <a href="https://github.com/tiangolo"><code>@​tiangolo</code></a>.</li> <li>⬆ Update pytest-cov requirement from <!-- raw HTML omitted -->=2.10.0,<6.0.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/844">#844</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Bump pypa/gh-action-pypi-publish from 1.8.11 to 1.9.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/865">#865</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Update pytest requirement from <!-- raw HTML omitted -->=4.4.0,<9.0.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/915">#915</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> <li>⬆ Update pytest-sugar requirement from <!-- raw HTML omitted -->=0.9.4,<1.1.0. PR <a href="https://redirect.github.com/fastapi/typer/pull/841">#841</a> by <a href="https://github.com/apps/dependabot"><code>@​dependabot[bot]</code></a>.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/fastapi/typer/commit/88aefd4492696bd1afa7bdb49477dec2c29d7e77"><code>88aefd4</code></a> 🔖 Release version 0.12.5</li> <li><a href="https://github.com/fastapi/typer/commit/3ac3644d480677ae44c50e436920def381aab915"><code>3ac3644</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/d93c0ac714386b39df87597e30149bf2c2a7e824"><code>d93c0ac</code></a> 🔨 Pre-install dependencies in Docker so that testing in Docker is faster (<a href="https://redirect.github.com/fastapi/typer/issues/954">#954</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/a3378509cf2cc949473daf3c144ed759051de6c9"><code>a337850</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/a5b7557140eda1f494050a791d51469ca6483805"><code>a5b7557</code></a> ✅ Add <code>needs_bash</code> test fixture (<a href="https://redirect.github.com/fastapi/typer/issues/888">#888</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/6cc1f9a361414c09db45a0ffd36823fdfd405636"><code>6cc1f9a</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/fc2c54f0aa9508b4a72ae50cb3bcb3bd0e6c1a21"><code>fc2c54f</code></a> 🐛 Ensure <code>rich_markup_mode=None</code> disables Rich formatting (<a href="https://redirect.github.com/fastapi/typer/issues/859">#859</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/f17bb0675a33d23064b24c67754e8fa78e3dfc41"><code>f17bb06</code></a> 📝 Update release notes</li> <li><a href="https://github.com/fastapi/typer/commit/afac2b80dca716f65e63f539b5885c37e031a0cf"><code>afac2b8</code></a> 🐛 Fix sourcing of completion path for Git Bash (<a href="https://redirect.github.com/fastapi/typer/issues/801">#801</a>)</li> <li><a href="https://github.com/fastapi/typer/commit/6b35a70cd128366fe70d019ebaea2556ab50d4f0"><code>6b35a70</code></a> 📝 Update release notes</li> <li>Additional commits viewable in <a href="https://github.com/fastapi/typer/compare/0.12.4...0.12.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typer&package-manager=pip&previous-version=0.12.4&new-version=0.12.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9389b1a0701..d825f618dcf 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -259,7 +259,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.12.4 +typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 3f408533ec4..2f11c4a77a9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -251,7 +251,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.12.4 +typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 94091021093..8362d0edadc 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -103,7 +103,7 @@ tqdm==4.66.5 # via python-on-whales trustme==1.1.0 # via -r requirements/lint.in -typer==0.12.4 +typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 4886cbb15e6..ef30207e747 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -119,7 +119,7 @@ tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in -typer==0.12.4 +typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via From 9b136b9934ccaf4f2db8fc941445d62f869d1bc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 10:49:56 +0000 Subject: [PATCH 0413/1511] Bump pytz from 2023.3.post1 to 2024.1 (#8880) Bumps [pytz](https://github.com/stub42/pytz) from 2023.3.post1 to 2024.1. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/stub42/pytz/commit/fb43f957c5149e750c3be3cfc72b22ad94db4886"><code>fb43f95</code></a> Github releases</li> <li><a href="https://github.com/stub42/pytz/commit/368095322408e14898640a426db1e4658b8acd8c"><code>3680953</code></a> Bump version numbers to 2024.1 / 2024a</li> <li><a href="https://github.com/stub42/pytz/commit/8cde1786a5b9c25e2945bbc6c37e80fe9292798c"><code>8cde178</code></a> IANA 2024a</li> <li><a href="https://github.com/stub42/pytz/commit/e22640ed537602adb2638951d5a5e6bd5395c4ce"><code>e22640e</code></a> Squashed 'tz/' changes from cc48c2dfa..beeb53a3d</li> <li><a href="https://github.com/stub42/pytz/commit/ebee1ae4ef35f6fe73603b7582b458a732ae9b8a"><code>ebee1ae</code></a> Stop building unused distribution files</li> <li><a href="https://github.com/stub42/pytz/commit/77d9c8c39c17ff80eb82aff6bfe62d2b26b253bb"><code>77d9c8c</code></a> Default to Python 3.12</li> <li><a href="https://github.com/stub42/pytz/commit/8f15f8061c59cb04050c1d524df9b34294d9d78b"><code>8f15f80</code></a> PyPI publication from github</li> <li><a href="https://github.com/stub42/pytz/commit/ee2e37b6ae386299ad6a01fbcdc44af5f3739e9c"><code>ee2e37b</code></a> Bump version to 2023.4 (2023d)</li> <li><a href="https://github.com/stub42/pytz/commit/4a37f7cbb98b170d29c7114c832451cb80476675"><code>4a37f7c</code></a> Squashed 'tz/' changes from ddb8cf09d..2bb5bb955</li> <li><a href="https://github.com/stub42/pytz/commit/4a8ca236bd92e0b66c6d612fc256b71f6f47a109"><code>4a8ca23</code></a> IANA 2023d</li> <li>See full diff in <a href="https://github.com/stub42/pytz/compare/release_2023.3.post1...release_2024.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytz&package-manager=pip&previous-version=2023.3.post1&new-version=2024.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d825f618dcf..db1b3f97582 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,7 @@ python-on-whales==0.72.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2023.3.post1 +pytz==2024.1 # via babel pyyaml==6.0.1 # via pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 2f11c4a77a9..f867f95a159 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -183,7 +183,7 @@ python-on-whales==0.72.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2023.3.post1 +pytz==2024.1 # via babel pyyaml==6.0.1 # via pre-commit diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index ebab702f1b1..4c189f13158 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -48,7 +48,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.18.0 # via sphinx -pytz==2023.3.post1 +pytz==2024.1 # via babel requests==2.32.3 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index efa46279ff5..1623f18c928 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -46,7 +46,7 @@ pillow==9.5.0 # blockdiag pygments==2.18.0 # via sphinx -pytz==2023.3.post1 +pytz==2024.1 # via babel requests==2.32.3 # via sphinx From 6dd4c1048bfe839ff18f6714a2fd221df9771b42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 11:12:57 +0000 Subject: [PATCH 0414/1511] Bump mypy from 1.11.1 to 1.11.2 (#8881) Bumps [mypy](https://github.com/python/mypy) from 1.11.1 to 1.11.2. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy/commit/789f02c83a5d5cb35f5e33ba91df46c8fea6b28e"><code>789f02c</code></a> Bump version to 1.11.2</li> <li><a href="https://github.com/python/mypy/commit/917cc75fd6f1417edb45eb77e449934f794c18fc"><code>917cc75</code></a> An alternative fix for a union-like literal string (<a href="https://redirect.github.com/python/mypy/issues/17639">#17639</a>)</li> <li><a href="https://github.com/python/mypy/commit/7d805b364ee80396e0b9ca906f32f901b2ac7e12"><code>7d805b3</code></a> Unwrap TypedDict item types before storing (<a href="https://redirect.github.com/python/mypy/issues/17640">#17640</a>)</li> <li><a href="https://github.com/python/mypy/commit/32675dddfacccef616557916cb872757605ab493"><code>32675dd</code></a> Revert "Fix Literal strings containing pipe characters" (<a href="https://redirect.github.com/python/mypy/issues/17638">#17638</a>)</li> <li><a href="https://github.com/python/mypy/commit/778542b93a6f5b3c168a8acc03717700ae6f8048"><code>778542b</code></a> Revert "Fix <code>RawExpressionType.accept</code> crash with <code>--cache-fine-grained</code>" (<a href="https://redirect.github.com/python/mypy/issues/1">#1</a>...</li> <li><a href="https://github.com/python/mypy/commit/14ab742dec6b58a4e94772115cb3b5c67a4b3d33"><code>14ab742</code></a> Bump version to 1.11.2+dev</li> <li>See full diff in <a href="https://github.com/python/mypy/compare/v1.11.1...v1.11.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.11.1&new-version=1.11.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index db1b3f97582..7cdc216c1ec 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -120,7 +120,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index f867f95a159..59fe5baa706 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -117,7 +117,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 8362d0edadc..917aebf0f0e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -48,7 +48,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index ef30207e747..cc98efc6d0f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -61,7 +61,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From cb9fa5c7424bd4216ed4b56e80ea9fe353a6903e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 11:18:52 +0000 Subject: [PATCH 0415/1511] Bump pyyaml from 6.0.1 to 6.0.2 (#8866) Bumps [pyyaml](https://github.com/yaml/pyyaml) from 6.0.1 to 6.0.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/yaml/pyyaml/releases">pyyaml's releases</a>.</em></p> <blockquote> <h2>6.0.2</h2> <h2>What's Changed</h2> <ul> <li>Support for Cython 3.x and Python 3.13.</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/yaml/pyyaml/compare/6.0.1...6.0.2">https://github.com/yaml/pyyaml/compare/6.0.1...6.0.2</a></p> <h2>6.0.2rc1</h2> <ul> <li>Support for extension build with Cython 3.x</li> <li>Support for Python 3.13</li> <li>Added PyPI wheels for musllinux on aarch64</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/yaml/pyyaml/blob/main/CHANGES">pyyaml's changelog</a>.</em></p> <blockquote> <p>6.0.2 (2024-08-06)</p> <ul> <li><a href="https://redirect.github.com/yaml/pyyaml/pull/808">yaml/pyyaml#808</a> -- Support for Cython 3.x and Python 3.13</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/yaml/pyyaml/commit/41309b0bcb4559edb1d691d47199035ef539d785"><code>41309b0</code></a> Release 6.0.2 (<a href="https://redirect.github.com/yaml/pyyaml/issues/819">#819</a>)</li> <li><a href="https://github.com/yaml/pyyaml/commit/dd9f0e1236775dcce682c91823e009556ce2a271"><code>dd9f0e1</code></a> 6.0.2rc1 (<a href="https://redirect.github.com/yaml/pyyaml/issues/809">#809</a>)</li> <li><a href="https://github.com/yaml/pyyaml/commit/f5527a26d518b3e9c66f9211e0af00c83f09a97e"><code>f5527a2</code></a> disable CI trigger on PR edits</li> <li><a href="https://github.com/yaml/pyyaml/commit/b4d80a742142004490d2da7691d534923820b81c"><code>b4d80a7</code></a> Python 3.12 + musllinux_1_1_x86_64 wheel support</li> <li>See full diff in <a href="https://github.com/yaml/pyyaml/compare/6.0.1...6.0.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pyyaml&package-manager=pip&previous-version=6.0.1&new-version=6.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7cdc216c1ec..025d91d6bc6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -190,7 +190,7 @@ python-on-whales==0.72.0 # -r requirements/test.in pytz==2024.1 # via babel -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 59fe5baa706..4510091a134 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -185,7 +185,7 @@ python-on-whales==0.72.0 # -r requirements/test.in pytz==2024.1 # via babel -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 917aebf0f0e..afc0a3e5b0d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -82,7 +82,7 @@ python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via -r requirements/lint.in -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit requests==2.32.3 # via python-on-whales From d73e5c4be10b8ea30c05054f71b5d0dc7f3d4736 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 11:35:08 +0000 Subject: [PATCH 0416/1511] Bump platformdirs from 3.10.0 to 4.2.2 (#8883) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/platformdirs/platformdirs) from 3.10.0 to 4.2.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/platformdirs/platformdirs/releases">platformdirs's releases</a>.</em></p> <blockquote> <h2>4.2.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Fix android detection when python4android is present by <a href="https://github.com/tmolitor-stud-tu"><code>@​tmolitor-stud-tu</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/277">platformdirs/platformdirs#277</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/tmolitor-stud-tu"><code>@​tmolitor-stud-tu</code></a> made their first contribution in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/277">platformdirs/platformdirs#277</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/platformdirs/platformdirs/compare/4.2.1...4.2.2">https://github.com/platformdirs/platformdirs/compare/4.2.1...4.2.2</a></p> <h2>4.2.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Switch to ruff for formatting and use codespell and docformatter by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/261">platformdirs/platformdirs#261</a></li> <li>Use hatch over tox by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/262">platformdirs/platformdirs#262</a></li> <li>chore: various minor fixes by <a href="https://github.com/deronnax"><code>@​deronnax</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/263">platformdirs/platformdirs#263</a></li> <li>chore: update dead Microsoft's known folders documentation link by <a href="https://github.com/deronnax"><code>@​deronnax</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/267">platformdirs/platformdirs#267</a></li> <li>Allow working without ctypes by <a href="https://github.com/youknowone"><code>@​youknowone</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/275">platformdirs/platformdirs#275</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/deronnax"><code>@​deronnax</code></a> made their first contribution in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/263">platformdirs/platformdirs#263</a></li> <li><a href="https://github.com/youknowone"><code>@​youknowone</code></a> made their first contribution in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/275">platformdirs/platformdirs#275</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/platformdirs/platformdirs/compare/4.2.0...4.2.1">https://github.com/platformdirs/platformdirs/compare/4.2.0...4.2.1</a></p> <h2>4.2.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Fix 2 typos about XDG_DATA_DIR by <a href="https://github.com/Freed-Wu"><code>@​Freed-Wu</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/256">platformdirs/platformdirs#256</a></li> <li>Add convenience methods to <code>PlatformDirsAPI</code> that allow iterating over both user and site dirs/paths. by <a href="https://github.com/SpaceshipOperations"><code>@​SpaceshipOperations</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/258">platformdirs/platformdirs#258</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/SpaceshipOperations"><code>@​SpaceshipOperations</code></a> made their first contribution in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/258">platformdirs/platformdirs#258</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/platformdirs/platformdirs/compare/4.1.0...4.2.0">https://github.com/platformdirs/platformdirs/compare/4.1.0...4.2.0</a></p> <h2>4.1.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Update changelog for 4.0.0 by <a href="https://github.com/rafalkrupinski"><code>@​rafalkrupinski</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/242">platformdirs/platformdirs#242</a></li> <li>docs: fix Linux user_log_dir example in README by <a href="https://github.com/dbohdan"><code>@​dbohdan</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/245">platformdirs/platformdirs#245</a></li> <li>Drop support for EOL Python 3.7 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/246">platformdirs/platformdirs#246</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/rafalkrupinski"><code>@​rafalkrupinski</code></a> made their first contribution in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/242">platformdirs/platformdirs#242</a></li> <li><a href="https://github.com/dbohdan"><code>@​dbohdan</code></a> made their first contribution in <a href="https://redirect.github.com/platformdirs/platformdirs/pull/245">platformdirs/platformdirs#245</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/platformdirs/platformdirs/blob/main/CHANGES.rst">platformdirs's changelog</a>.</em></p> <blockquote> <h1>platformdirs Changelog</h1> <h2>platformdirs 4.1.0 (2024-01-XX)</h2> <ul> <li>Add convenience methods <code>iter_{config,cache,data,runtime}_{dirs,paths}</code>.</li> </ul> <h2>platformdirs 4.0.0 (2023-11-10)</h2> <ul> <li>UNIX: revert site_cache_dir to use <code>/var/cache</code> instead of <code>/var/tmp</code></li> </ul> <h2>platformdirs 3.8.1 (2023-07-06)</h2> <ul> <li>BSD: provide a fallback for <code>user_runtime_dir</code></li> </ul> <h2>platformdirs 3.8.0 (2023-06-22)</h2> <ul> <li>Add missing user media directory docs</li> </ul> <h2>platformdirs 3.7.0 (2023-06-20)</h2> <ul> <li>Have user_runtime_dir return /var/run/user/uid for *BSD</li> </ul> <h2>platformdirs 3.6.0 (2023-06-19)</h2> <ul> <li>introduce <code>user_downloads_dir</code></li> </ul> <h2>platformdirs 3.5.3 (2023-06-09)</h2> <ul> <li>Use ruff</li> </ul> <h2>platformdirs 3.5.2 (2023-05-30)</h2> <ul> <li>test with 3.12.0.b1</li> </ul> <h2>platformdirs 3.5.1 (2023-05-11)</h2> <ul> <li>Add 3.12 support</li> <li>Add tox.ini to sdist</li> <li>removing Windows versions</li> <li>Better handling for UNIX support</li> </ul> <h2>platformdirs 3.5.0 (2023-04-27)</h2> <ul> <li>introduce <code>user_music_dir</code></li> </ul> <h2>platformdirs 3.4.0 (2023-04-26)</h2> <ul> <li>introduce <code>user_videos_dir</code></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/platformdirs/platformdirs/commit/5ec69d86917fb1ac23207893ccd598f8af805ba5"><code>5ec69d8</code></a> Fix android detection when python4android is present (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/277">#277</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/dbf360f9f894a8937c624ce4259b5c070465f78d"><code>dbf360f</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/280">#280</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/b318012877fe1c605adc6dda717db67746cd2e4b"><code>b318012</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/279">#279</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/120a441c6ef7b4f29ca4936d5b1ad4421fc96f23"><code>120a441</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/278">#278</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/9781f05834860772fdb96231abf3f23848e6b2b5"><code>9781f05</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/276">#276</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/b44d57d2610f7d734a428c61ddcf922d3952a0ea"><code>b44d57d</code></a> Allow working without ctypes (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/275">#275</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/89b6b2bca3eb9dee9bedb1dac7b9fd329eb66e03"><code>89b6b2b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/274">#274</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/f211ab4e76dad0d575387e5e14297e3f3e65d42c"><code>f211ab4</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/273">#273</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/077e11e1cb503a9ce96f2cde0600ddca75b570c3"><code>077e11e</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/272">#272</a>)</li> <li><a href="https://github.com/platformdirs/platformdirs/commit/c8870a1ce72db7c57b5d998265b681327e1a6f69"><code>c8870a1</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/271">#271</a>)</li> <li>Additional commits viewable in <a href="https://github.com/platformdirs/platformdirs/compare/3.10.0...4.2.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=3.10.0&new-version=4.2.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 4510091a134..f9cb6106edf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==3.10.0 +platformdirs==4.2.2 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/lint.txt b/requirements/lint.txt index afc0a3e5b0d..1184ed4653b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -56,7 +56,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.1 # via pytest -platformdirs==3.10.0 +platformdirs==4.2.2 # via virtualenv pluggy==1.5.0 # via pytest From 996204af9baac83797d30531c154f90fadd98476 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 12:34:33 +0000 Subject: [PATCH 0417/1511] Bump webcolors from 1.11.1 to 24.8.0 (#8865) Bumps [webcolors](https://github.com/ubernostrum/webcolors) from 1.11.1 to 24.8.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/ubernostrum/webcolors/blob/trunk/docs/changelog.rst">webcolors's changelog</a>.</em></p> <blockquote> <p>.. _changelog:</p> <h1>Changelog</h1> <p>This document lists the changes in each release of <code>webcolors</code>.</p> <h2>Version numbering</h2> <p>This library currently tracks its version numbers using the <code>YY.MM.MICRO</code> form of <code>Calendar Versioning <https://calver.org></code>_ ("CalVer"), in which the first two components of the version number are the (two-digit) year and (non-zero-padded) month of the release date, while the third component is an incrementing value for releases occurring in that month. For example, the first release issued in January 2025 would have a version number of 25.1.0; a subsequent release in the same month would be 25.1.1; a release the following month (February) would be 25.2.0.</p> <p>The CalVer system was adopted for this library in 2024, and the first release to use a CalVer version number was 24.6.0.</p> <h2>API stability and deprecations</h2> <p>The API stability/deprecation policy for this library is as follows:</p> <ul> <li> <p>The supported stable public API of this library is the set of symbols which are exported by its <code>__all__</code> declaration and which are documented in this documentation. For classes exported there, the supported stable public API is the set of methods and attributes of those classes whose names do not begin with one or more underscore (<code>_</code>) characters and which are documented in this documentation.</p> </li> <li> <p>When a public API is to be removed, or undergo a backwards-incompatible change, it will emit a deprecation warning which serves as notice of the intended removal or change, and which will give a date -- which will always be at least in the next calendar year after the first release which emits the deprecation warning -- past which the removal or change may occur without further warning.</p> </li> <li> <p>Security fixes, and fixes for high-severity bugs (such as those which might cause unrecoverable crash or data loss), are not required to emit deprecation warnings, and may -- if needed -- impose backwards-incompatible change in any release. If this occurs, this changelog document will contain a note explaining why the usual deprecation process could not be followed for that case.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/ubernostrum/webcolors/commit/ec0702e3d9e9a2b1d5e2ed53c82668b45f601626"><code>ec0702e</code></a> Release 24.8.0.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/a9a668a187ba708c7f6bcd45c55d37cb3377121e"><code>a9a668a</code></a> Fix reST formatting in testing docs.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/dee9d4668976805427bd23dd1f5ddbec52cf547f"><code>dee9d46</code></a> Add first draft of names() function (refs <a href="https://redirect.github.com/ubernostrum/webcolors/issues/20">#20</a>).</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/bb8768639c57167c99b14ce65a42ce400ba8438d"><code>bb87686</code></a> Release 24.6.0.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/41046b00da5e892809d83cf4c0725d471e36118f"><code>41046b0</code></a> Reorganize and update the FAQ.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/0f865b27bf4b3ee6bbf851314a32b157b718ee03"><code>0f865b2</code></a> Slight clarification in testing docs.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/368e2c5b940496f4d11924cb340c4242a88d2d35"><code>368e2c5</code></a> Minor word-order change for version references in docs.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/516e863425f4592f56ed90c21481cd3cc10a040a"><code>516e863</code></a> Small modernization of Python idioms.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/68ba427417b6065684a162f1484c45886a401a85"><code>68ba427</code></a> Be absolutely clear about the mappings no longer being supported API.</li> <li><a href="https://github.com/ubernostrum/webcolors/commit/785d6da1f4820029b188ccd3001de2d64c86cc30"><code>785d6da</code></a> Update the code comment on gray/grey choices.</li> <li>Additional commits viewable in <a href="https://github.com/ubernostrum/webcolors/compare/1.11.1...24.8.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=webcolors&package-manager=pip&previous-version=1.11.1&new-version=24.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 025d91d6bc6..f921bb0e786 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -283,7 +283,7 @@ virtualenv==20.26.3 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -webcolors==1.11.1 +webcolors==24.8.0 # via blockdiag wheel==0.37.0 # via pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index f9cb6106edf..35a83a15360 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -275,7 +275,7 @@ virtualenv==20.26.3 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -webcolors==1.13 +webcolors==24.8.0 # via blockdiag wheel==0.41.0 # via pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4c189f13158..57bc9bc47b6 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -88,7 +88,7 @@ towncrier==23.11.0 # sphinxcontrib-towncrier urllib3==2.2.2 # via requests -webcolors==1.13 +webcolors==24.8.0 # via blockdiag zipp==3.20.0 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index 1623f18c928..91ad44582d4 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -83,7 +83,7 @@ towncrier==23.11.0 # sphinxcontrib-towncrier urllib3==2.2.2 # via requests -webcolors==1.13 +webcolors==24.8.0 # via blockdiag zipp==3.20.0 # via From 400feb641e51f3b9afe0eee9c73cafcb8a0d3744 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 Aug 2024 15:16:12 +0100 Subject: [PATCH 0418/1511] Return 500 error when handler has wrong return type (#8845) (#8891) (cherry picked from commit 48a5e07ad833bd1a8fcb2ce6f85a41ad0cef9dc6) --- CHANGES/8845.bugfix.rst | 1 + aiohttp/web_protocol.py | 36 ++++++++++++++++++------------------ tests/test_web_functional.py | 17 ++++------------- 3 files changed, 23 insertions(+), 31 deletions(-) create mode 100644 CHANGES/8845.bugfix.rst diff --git a/CHANGES/8845.bugfix.rst b/CHANGES/8845.bugfix.rst new file mode 100644 index 00000000000..ff0016ac14b --- /dev/null +++ b/CHANGES/8845.bugfix.rst @@ -0,0 +1 @@ +Changed behaviour when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 39e1c8be50e..a4941d103ef 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -38,7 +38,7 @@ from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD, StreamReader from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException +from .web_exceptions import HTTPException, HTTPInternalServerError from .web_log import AccessLogger from .web_request import BaseRequest from .web_response import Response, StreamResponse @@ -464,16 +464,16 @@ async def _handle_request( self._current_request = None except HTTPException as exc: resp = exc - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except asyncio.CancelledError: raise except asyncio.TimeoutError as exc: self.log_debug("Request handler timed out.", exc_info=exc) resp = self.handle_error(request, 504) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except Exception as exc: resp = self.handle_error(request, 500, exc) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) else: # Deprecation warning (See #2415) if getattr(resp, "__http_exception__", False): @@ -484,7 +484,7 @@ async def _handle_request( DeprecationWarning, ) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) finally: self._handler_waiter.set_result(None) @@ -584,10 +584,6 @@ async def start(self) -> None: except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") break - except RuntimeError as exc: - if self.debug: - self.log_exception("Unhandled runtime exception", exc_info=exc) - self.force_close() except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() @@ -616,7 +612,7 @@ async def start(self) -> None: async def finish_response( self, request: BaseRequest, resp: StreamResponse, start_time: float - ) -> bool: + ) -> Tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. This has to @@ -635,22 +631,26 @@ async def finish_response( prepare_meth = resp.prepare except AttributeError: if resp is None: - raise RuntimeError("Missing return " "statement on request handler") + self.log_exception("Missing return statement on request handler") else: - raise RuntimeError( - "Web-handler should return " - "a response instance, " + self.log_exception( + "Web-handler should return a response instance, " "got {!r}".format(resp) ) + exc = HTTPInternalServerError() + resp = Response( + status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers + ) + prepare_meth = resp.prepare try: await prepare_meth(request) await resp.write_eof() except ConnectionError: self.log_access(request, resp, start_time) - return True - else: - self.log_access(request, resp, start_time) - return False + return resp, True + + self.log_access(request, resp, start_time) + return resp, False def handle_error( self, diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 96a4f82ba9f..431971de288 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -101,12 +101,8 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") - - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + async with client.get("/") as resp: + assert resp.status == 500 async def test_handler_returns_none(aiohttp_server, aiohttp_client) -> None: @@ -121,13 +117,8 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") - - # Actual error text is placed in exc_info - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + async with client.get("/") as resp: + assert resp.status == 500 async def test_head_returns_empty_body(aiohttp_client) -> None: From 86b97776aa454984daf2a9c42b686eb05b65bdf7 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 Aug 2024 15:16:23 +0100 Subject: [PATCH 0419/1511] Return 500 error when handler has wrong return type (#8845) (#8892) (cherry picked from commit 48a5e07ad833bd1a8fcb2ce6f85a41ad0cef9dc6) --- CHANGES/8845.bugfix.rst | 1 + aiohttp/web_protocol.py | 36 ++++++++++++++++++------------------ tests/test_web_functional.py | 17 ++++------------- 3 files changed, 23 insertions(+), 31 deletions(-) create mode 100644 CHANGES/8845.bugfix.rst diff --git a/CHANGES/8845.bugfix.rst b/CHANGES/8845.bugfix.rst new file mode 100644 index 00000000000..ff0016ac14b --- /dev/null +++ b/CHANGES/8845.bugfix.rst @@ -0,0 +1 @@ +Changed behaviour when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 39e1c8be50e..a4941d103ef 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -38,7 +38,7 @@ from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD, StreamReader from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException +from .web_exceptions import HTTPException, HTTPInternalServerError from .web_log import AccessLogger from .web_request import BaseRequest from .web_response import Response, StreamResponse @@ -464,16 +464,16 @@ async def _handle_request( self._current_request = None except HTTPException as exc: resp = exc - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except asyncio.CancelledError: raise except asyncio.TimeoutError as exc: self.log_debug("Request handler timed out.", exc_info=exc) resp = self.handle_error(request, 504) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except Exception as exc: resp = self.handle_error(request, 500, exc) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) else: # Deprecation warning (See #2415) if getattr(resp, "__http_exception__", False): @@ -484,7 +484,7 @@ async def _handle_request( DeprecationWarning, ) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) finally: self._handler_waiter.set_result(None) @@ -584,10 +584,6 @@ async def start(self) -> None: except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") break - except RuntimeError as exc: - if self.debug: - self.log_exception("Unhandled runtime exception", exc_info=exc) - self.force_close() except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() @@ -616,7 +612,7 @@ async def start(self) -> None: async def finish_response( self, request: BaseRequest, resp: StreamResponse, start_time: float - ) -> bool: + ) -> Tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. This has to @@ -635,22 +631,26 @@ async def finish_response( prepare_meth = resp.prepare except AttributeError: if resp is None: - raise RuntimeError("Missing return " "statement on request handler") + self.log_exception("Missing return statement on request handler") else: - raise RuntimeError( - "Web-handler should return " - "a response instance, " + self.log_exception( + "Web-handler should return a response instance, " "got {!r}".format(resp) ) + exc = HTTPInternalServerError() + resp = Response( + status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers + ) + prepare_meth = resp.prepare try: await prepare_meth(request) await resp.write_eof() except ConnectionError: self.log_access(request, resp, start_time) - return True - else: - self.log_access(request, resp, start_time) - return False + return resp, True + + self.log_access(request, resp, start_time) + return resp, False def handle_error( self, diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 96a4f82ba9f..431971de288 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -101,12 +101,8 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") - - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + async with client.get("/") as resp: + assert resp.status == 500 async def test_handler_returns_none(aiohttp_server, aiohttp_client) -> None: @@ -121,13 +117,8 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") - - # Actual error text is placed in exc_info - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + async with client.get("/") as resp: + assert resp.status == 500 async def test_head_returns_empty_body(aiohttp_client) -> None: From b2e1294f700eae58a87085b6342d7eb5ed105700 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:16:42 +0100 Subject: [PATCH 0420/1511] [PR #5344/bff76313 backport][3.11] Fix StreamResponse.prepared not returning True after EOF is sent (#8894) **This is a backport of PR #5344 as merged into master (bff76313298d8b2c7539f9dc496801ee86ab7097).** --- CHANGES/5343.bugfix | 1 + aiohttp/web_response.py | 2 +- tests/test_web_response.py | 15 ++++++++++----- 3 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 CHANGES/5343.bugfix diff --git a/CHANGES/5343.bugfix b/CHANGES/5343.bugfix new file mode 100644 index 00000000000..4e33071ea94 --- /dev/null +++ b/CHANGES/5343.bugfix @@ -0,0 +1 @@ +Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 95028a929ff..7074542621b 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -102,7 +102,7 @@ def __init__( @property def prepared(self) -> bool: - return self._payload_writer is not None + return self._eof_sent or self._payload_writer is not None @property def task(self) -> "Optional[asyncio.Task[None]]": diff --git a/tests/test_web_response.py b/tests/test_web_response.py index ad1286ca91e..c3dab10c310 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -773,11 +773,8 @@ async def test___repr___after_eof() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) - assert resp.prepared - await resp.write(b"data") await resp.write_eof() - assert not resp.prepared resp_repr = repr(resp) assert resp_repr == "<StreamResponse OK eof>" @@ -1243,14 +1240,22 @@ def test_content_type_with_set_body() -> None: assert resp.content_type == "application/octet-stream" -def test_started_when_not_started() -> None: +def test_prepared_when_not_started() -> None: resp = StreamResponse() assert not resp.prepared -async def test_started_when_started() -> None: +async def test_prepared_when_started() -> None: + resp = StreamResponse() + await resp.prepare(make_request("GET", "/")) + assert resp.prepared + + +async def test_prepared_after_eof() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) + await resp.write(b"data") + await resp.write_eof() assert resp.prepared From b78cef374c1e0bcce62faef144b9f5067c3bf53e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:16:55 +0100 Subject: [PATCH 0421/1511] [PR #5344/bff76313 backport][3.10] Fix StreamResponse.prepared not returning True after EOF is sent (#8893) **This is a backport of PR #5344 as merged into master (bff76313298d8b2c7539f9dc496801ee86ab7097).** Co-authored-by: Arthur Darcet <arthur@glose.com> --- CHANGES/5343.bugfix | 1 + aiohttp/web_response.py | 2 +- tests/test_web_response.py | 15 ++++++++++----- 3 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 CHANGES/5343.bugfix diff --git a/CHANGES/5343.bugfix b/CHANGES/5343.bugfix new file mode 100644 index 00000000000..4e33071ea94 --- /dev/null +++ b/CHANGES/5343.bugfix @@ -0,0 +1 @@ +Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 95028a929ff..7074542621b 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -102,7 +102,7 @@ def __init__( @property def prepared(self) -> bool: - return self._payload_writer is not None + return self._eof_sent or self._payload_writer is not None @property def task(self) -> "Optional[asyncio.Task[None]]": diff --git a/tests/test_web_response.py b/tests/test_web_response.py index ad1286ca91e..c3dab10c310 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -773,11 +773,8 @@ async def test___repr___after_eof() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) - assert resp.prepared - await resp.write(b"data") await resp.write_eof() - assert not resp.prepared resp_repr = repr(resp) assert resp_repr == "<StreamResponse OK eof>" @@ -1243,14 +1240,22 @@ def test_content_type_with_set_body() -> None: assert resp.content_type == "application/octet-stream" -def test_started_when_not_started() -> None: +def test_prepared_when_not_started() -> None: resp = StreamResponse() assert not resp.prepared -async def test_started_when_started() -> None: +async def test_prepared_when_started() -> None: + resp = StreamResponse() + await resp.prepare(make_request("GET", "/")) + assert resp.prepared + + +async def test_prepared_after_eof() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) + await resp.write(b"data") + await resp.write_eof() assert resp.prepared From 275985cbbd034e83053a775cf3e21b0a44d71c21 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 16:39:40 +0100 Subject: [PATCH 0422/1511] [PR #8874/d6a677d1 backport][3.11] Update testing utility examples (#8897) **This is a backport of PR #8874 as merged into master (d6a677d1dee563a7e6be7d68da015d824ecbfe80).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/testing.rst | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/docs/testing.rst b/docs/testing.rst index 828b5072b4d..a7b93e714f6 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -449,14 +449,12 @@ Framework Agnostic Utilities High level test creation:: - from aiohttp.test_utils import TestClient, TestServer, loop_context + from aiohttp.test_utils import TestClient, TestServer from aiohttp import request - # loop_context is provided as a utility. You can use any - # asyncio.BaseEventLoop class in its place. - with loop_context() as loop: + async def test(): app = _create_example_app() - with TestClient(TestServer(app), loop=loop) as client: + async with TestClient(TestServer(app)) as client: async def test_get_route(): nonlocal client @@ -465,7 +463,7 @@ High level test creation:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) + await test_get_route() If it's preferred to handle the creation / teardown on a more granular @@ -473,10 +471,10 @@ basis, the TestClient object can be used directly:: from aiohttp.test_utils import TestClient, TestServer - with loop_context() as loop: + async def test(): app = _create_example_app() - client = TestClient(TestServer(app), loop=loop) - loop.run_until_complete(client.start_server()) + client = TestClient(TestServer(app)) + await client.start_server() root = "http://127.0.0.1:{}".format(port) async def test_get_route(): @@ -485,8 +483,8 @@ basis, the TestClient object can be used directly:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) - loop.run_until_complete(client.close()) + await test_get_route() + await client.close() A full list of the utilities provided can be found at the From 4179aba72d570854d69294344f2e12c0d62af144 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 16:39:58 +0100 Subject: [PATCH 0423/1511] [PR #8874/d6a677d1 backport][3.10] Update testing utility examples (#8896) **This is a backport of PR #8874 as merged into master (d6a677d1dee563a7e6be7d68da015d824ecbfe80).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/testing.rst | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/docs/testing.rst b/docs/testing.rst index 828b5072b4d..a7b93e714f6 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -449,14 +449,12 @@ Framework Agnostic Utilities High level test creation:: - from aiohttp.test_utils import TestClient, TestServer, loop_context + from aiohttp.test_utils import TestClient, TestServer from aiohttp import request - # loop_context is provided as a utility. You can use any - # asyncio.BaseEventLoop class in its place. - with loop_context() as loop: + async def test(): app = _create_example_app() - with TestClient(TestServer(app), loop=loop) as client: + async with TestClient(TestServer(app)) as client: async def test_get_route(): nonlocal client @@ -465,7 +463,7 @@ High level test creation:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) + await test_get_route() If it's preferred to handle the creation / teardown on a more granular @@ -473,10 +471,10 @@ basis, the TestClient object can be used directly:: from aiohttp.test_utils import TestClient, TestServer - with loop_context() as loop: + async def test(): app = _create_example_app() - client = TestClient(TestServer(app), loop=loop) - loop.run_until_complete(client.start_server()) + client = TestClient(TestServer(app)) + await client.start_server() root = "http://127.0.0.1:{}".format(port) async def test_get_route(): @@ -485,8 +483,8 @@ basis, the TestClient object can be used directly:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) - loop.run_until_complete(client.close()) + await test_get_route() + await client.close() A full list of the utilities provided can be found at the From a74e8187bf55679772c3771036ce74ca28ddfa36 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 Aug 2024 17:27:33 +0100 Subject: [PATCH 0424/1511] Fix server disconnect when error after 100-continue (#8876) (#8899) (cherry picked from commit e058cbd14dd754c2b0138fdef28df58c5e3a2c5f) --- CHANGES/8876.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 2 ++ tests/test_web_functional.py | 16 +++++++++++++++- 3 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8876.bugfix.rst diff --git a/CHANGES/8876.bugfix.rst b/CHANGES/8876.bugfix.rst new file mode 100644 index 00000000000..539eeb4c7d3 --- /dev/null +++ b/CHANGES/8876.bugfix.rst @@ -0,0 +1 @@ +Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 558fb7d0c9b..aee7aecd2a9 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -341,6 +341,8 @@ async def _default_expect_handler(request: Request) -> None: if request.version == HttpVersion11: if expect.lower() == "100-continue": await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + # Reset output_size as we haven't started the main body yet. + request.writer.output_size = 0 else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 431971de288..6f612ffc011 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -4,7 +4,7 @@ import pathlib import socket import zlib -from typing import Any, Optional +from typing import Any, NoReturn, Optional from unittest import mock import pytest @@ -121,6 +121,20 @@ async def handler(request): assert resp.status == 500 +async def test_handler_returns_not_response_after_100expect( + aiohttp_server, aiohttp_client +) -> None: + async def handler(request: web.Request) -> NoReturn: + raise Exception("foo") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", expect100=True) as resp: + assert resp.status == 500 + + async def test_head_returns_empty_body(aiohttp_client) -> None: async def handler(request): return web.Response(body=b"test") From e2631c7c058796ac8fab88722cffd582300cdc0e Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 Aug 2024 20:01:21 +0100 Subject: [PATCH 0425/1511] Fix server disconnect when error after 100-continue (#8876) (#8900) (cherry picked from commit e058cbd14dd754c2b0138fdef28df58c5e3a2c5f) --- CHANGES/8876.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 2 ++ tests/test_web_functional.py | 16 +++++++++++++++- 3 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8876.bugfix.rst diff --git a/CHANGES/8876.bugfix.rst b/CHANGES/8876.bugfix.rst new file mode 100644 index 00000000000..539eeb4c7d3 --- /dev/null +++ b/CHANGES/8876.bugfix.rst @@ -0,0 +1 @@ +Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 558fb7d0c9b..aee7aecd2a9 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -341,6 +341,8 @@ async def _default_expect_handler(request: Request) -> None: if request.version == HttpVersion11: if expect.lower() == "100-continue": await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + # Reset output_size as we haven't started the main body yet. + request.writer.output_size = 0 else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 431971de288..6f612ffc011 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -4,7 +4,7 @@ import pathlib import socket import zlib -from typing import Any, Optional +from typing import Any, NoReturn, Optional from unittest import mock import pytest @@ -121,6 +121,20 @@ async def handler(request): assert resp.status == 500 +async def test_handler_returns_not_response_after_100expect( + aiohttp_server, aiohttp_client +) -> None: + async def handler(request: web.Request) -> NoReturn: + raise Exception("foo") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", expect100=True) as resp: + assert resp.status == 500 + + async def test_head_returns_empty_body(aiohttp_client) -> None: async def handler(request): return web.Response(body=b"test") From 68629b523f4f91651ab75b23a534881794f82a26 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 Aug 2024 09:25:02 -1000 Subject: [PATCH 0426/1511] Cache the hash generation of the ConnectionKey (#8895) --- aiohttp/client_reqrep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 0e67607f5d9..d8578a5daed 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -209,7 +209,7 @@ def _merge_ssl_params( return ssl -@attr.s(auto_attribs=True, slots=True, frozen=True) +@attr.s(auto_attribs=True, slots=True, frozen=True, cache_hash=True) class ConnectionKey: # the key should contain an information about used proxy / TLS # to prevent reusing wrong connections from a pool From 804e16c39b8cd91cdd092e68ffcd73b2f724e05c Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 Aug 2024 20:59:45 +0100 Subject: [PATCH 0427/1511] Ensure error is raised when reading from closed client response. (#8878) (#8902) --- CHANGES/8878.bugfix.rst | 1 + aiohttp/streams.py | 3 +++ tests/test_client_functional.py | 19 ++++++++++++++++++- 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8878.bugfix.rst diff --git a/CHANGES/8878.bugfix.rst b/CHANGES/8878.bugfix.rst new file mode 100644 index 00000000000..df53dea3c35 --- /dev/null +++ b/CHANGES/8878.bugfix.rst @@ -0,0 +1 @@ +Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index b9b9c3fd96f..c927cfbb1b3 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -296,6 +296,9 @@ def end_http_chunk_receiving(self) -> None: set_result(waiter, None) async def _wait(self, func_name: str) -> None: + if not self._protocol.connected: + raise RuntimeError("Connection closed.") + # StreamReader uses a future to link the protocol feed_data() method # to a read coroutine. Running two read coroutines at the same time # would have an unexpected behaviour. It would not possible to know diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 566c47522ce..18fb5fe9f86 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -29,7 +29,7 @@ SocketTimeoutError, TooManyRedirects, ) -from aiohttp.pytest_plugin import AiohttpClient, TestClient +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient from aiohttp.test_utils import unused_port @@ -3645,3 +3645,20 @@ async def handler(_: web.Request) -> web.Response: session = await aiohttp_client(app, raise_for_status=None) # type: ignore[arg-type] await session.get("/") + + +async def test_exception_when_read_outside_of_session( + aiohttp_server: AiohttpServer, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"1" * 1000000) + + app = web.Application() + app.router.add_get("/", handler) + + server = await aiohttp_server(app) + async with aiohttp.ClientSession() as sess: + resp = await sess.get(server.make_url("/")) + + with pytest.raises(RuntimeError, match="Connection closed"): + await resp.read() From 9360a918cdc828c79ce6dd311875768ed3b548af Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 19:59:50 +0000 Subject: [PATCH 0428/1511] [PR #8895/68629b52 backport][3.10] Cache the hash generation of the ConnectionKey (#8903) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 0e67607f5d9..d8578a5daed 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -209,7 +209,7 @@ def _merge_ssl_params( return ssl -@attr.s(auto_attribs=True, slots=True, frozen=True) +@attr.s(auto_attribs=True, slots=True, frozen=True, cache_hash=True) class ConnectionKey: # the key should contain an information about used proxy / TLS # to prevent reusing wrong connections from a pool From 6c7aebbe099c2e4ced4340349eaac7d7ba15a419 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 Aug 2024 21:00:07 +0100 Subject: [PATCH 0429/1511] Ensure error is raised when reading from closed client response. (#8878) (#8901) --- CHANGES/8878.bugfix.rst | 1 + aiohttp/streams.py | 3 +++ tests/test_client_functional.py | 19 ++++++++++++++++++- 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8878.bugfix.rst diff --git a/CHANGES/8878.bugfix.rst b/CHANGES/8878.bugfix.rst new file mode 100644 index 00000000000..df53dea3c35 --- /dev/null +++ b/CHANGES/8878.bugfix.rst @@ -0,0 +1 @@ +Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index b9b9c3fd96f..c927cfbb1b3 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -296,6 +296,9 @@ def end_http_chunk_receiving(self) -> None: set_result(waiter, None) async def _wait(self, func_name: str) -> None: + if not self._protocol.connected: + raise RuntimeError("Connection closed.") + # StreamReader uses a future to link the protocol feed_data() method # to a read coroutine. Running two read coroutines at the same time # would have an unexpected behaviour. It would not possible to know diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 566c47522ce..18fb5fe9f86 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -29,7 +29,7 @@ SocketTimeoutError, TooManyRedirects, ) -from aiohttp.pytest_plugin import AiohttpClient, TestClient +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient from aiohttp.test_utils import unused_port @@ -3645,3 +3645,20 @@ async def handler(_: web.Request) -> web.Response: session = await aiohttp_client(app, raise_for_status=None) # type: ignore[arg-type] await session.get("/") + + +async def test_exception_when_read_outside_of_session( + aiohttp_server: AiohttpServer, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"1" * 1000000) + + app = web.Application() + app.router.add_get("/", handler) + + server = await aiohttp_server(app) + async with aiohttp.ClientSession() as sess: + resp = await sess.get(server.make_url("/")) + + with pytest.raises(RuntimeError, match="Connection closed"): + await resp.read() From c19cc7791353b780cb1ca4051230bc9ce50fbcc7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 20:16:47 +0000 Subject: [PATCH 0430/1511] [PR #8847/6d974274 backport][3.10] Improve performance of handling skip_auto_headers (#8904) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8847.misc.rst | 1 + aiohttp/client.py | 2 +- aiohttp/client_reqrep.py | 24 ++++++++++++++---------- 3 files changed, 16 insertions(+), 11 deletions(-) create mode 100644 CHANGES/8847.misc.rst diff --git a/CHANGES/8847.misc.rst b/CHANGES/8847.misc.rst new file mode 100644 index 00000000000..58f61d48420 --- /dev/null +++ b/CHANGES/8847.misc.rst @@ -0,0 +1 @@ +Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 3d1045f355a..8edd14d01ff 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -627,7 +627,7 @@ async def _request( url, params=params, headers=headers, - skip_auto_headers=skip_headers, + skip_auto_headers=skip_headers if skip_headers else None, data=data, cookies=all_cookies, auth=auth, diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d8578a5daed..d055e70e87c 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -265,7 +265,7 @@ def __init__( *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, - skip_auto_headers: Iterable[str] = frozenset(), + skip_auto_headers: Optional[Iterable[str]] = None, data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, @@ -451,12 +451,18 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: else: self.headers.add(key, value) - def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: - self.skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + if skip_auto_headers is not None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + else: + # Fast path when there are no headers to skip + # which is the most common case. + self.skip_auto_headers = CIMultiDict() + used_headers = self.headers for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: @@ -573,9 +579,7 @@ def update_body_from_data(self, body: Any) -> None: # copy payload headers assert body.headers for key, value in body.headers.items(): - if key in self.headers: - continue - if key in self.skip_auto_headers: + if key in self.headers or key in self.skip_auto_headers: continue self.headers[key] = value From 770fdcb1080fe02c7903b09eb1abfa76fc3734bb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 20:17:03 +0000 Subject: [PATCH 0431/1511] [PR #8847/6d974274 backport][3.11] Improve performance of handling skip_auto_headers (#8905) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8847.misc.rst | 1 + aiohttp/client.py | 2 +- aiohttp/client_reqrep.py | 24 ++++++++++++++---------- 3 files changed, 16 insertions(+), 11 deletions(-) create mode 100644 CHANGES/8847.misc.rst diff --git a/CHANGES/8847.misc.rst b/CHANGES/8847.misc.rst new file mode 100644 index 00000000000..58f61d48420 --- /dev/null +++ b/CHANGES/8847.misc.rst @@ -0,0 +1 @@ +Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 3d1045f355a..8edd14d01ff 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -627,7 +627,7 @@ async def _request( url, params=params, headers=headers, - skip_auto_headers=skip_headers, + skip_auto_headers=skip_headers if skip_headers else None, data=data, cookies=all_cookies, auth=auth, diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d8578a5daed..d055e70e87c 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -265,7 +265,7 @@ def __init__( *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, - skip_auto_headers: Iterable[str] = frozenset(), + skip_auto_headers: Optional[Iterable[str]] = None, data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, @@ -451,12 +451,18 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: else: self.headers.add(key, value) - def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: - self.skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + if skip_auto_headers is not None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + else: + # Fast path when there are no headers to skip + # which is the most common case. + self.skip_auto_headers = CIMultiDict() + used_headers = self.headers for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: @@ -573,9 +579,7 @@ def update_body_from_data(self, body: Any) -> None: # copy payload headers assert body.headers for key, value in body.headers.items(): - if key in self.headers: - continue - if key in self.skip_auto_headers: + if key in self.headers or key in self.skip_auto_headers: continue self.headers[key] = value From adcdf5c9d6e0c0aa2e6f24b3b634eeb965d03738 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 22:00:10 +0100 Subject: [PATCH 0432/1511] [PR #8875/0478f143 backport][3.11] Fix unclosed transport warning (#8907) **This is a backport of PR #8875 as merged into master (0478f143caba33c7f6b355b8513e3c26d8db7d4d).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8875.bugfix.rst | 1 + aiohttp/web_protocol.py | 15 +++------------ setup.cfg | 2 -- 3 files changed, 4 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8875.bugfix.rst diff --git a/CHANGES/8875.bugfix.rst b/CHANGES/8875.bugfix.rst new file mode 100644 index 00000000000..fa33df05ae2 --- /dev/null +++ b/CHANGES/8875.bugfix.rst @@ -0,0 +1 @@ +Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a4941d103ef..a2f159c3b7c 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -260,9 +260,6 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._keepalive_handle is not None: self._keepalive_handle.cancel() - if self._waiter: - self._waiter.cancel() - # Wait for graceful handler completion if self._handler_waiter is not None: with suppress(asyncio.CancelledError, asyncio.TimeoutError): @@ -281,9 +278,7 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._task_handler is not None: self._task_handler.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None + self.force_close() def connection_made(self, transport: asyncio.BaseTransport) -> None: super().connection_made(transport) @@ -307,13 +302,12 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: return self._manager.connection_lost(self, exc) - super().connection_lost(exc) - # Grab value before setting _manager to None. handler_cancellation = self._manager.handler_cancellation + self.force_close() + super().connection_lost(exc) self._manager = None - self._force_close = True self._request_factory = None self._request_handler = None self._request_parser = None @@ -326,9 +320,6 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: exc = ConnectionResetError("Connection lost") self._current_request._cancel(exc) - if self._waiter is not None: - self._waiter.cancel() - if handler_cancellation and self._task_handler is not None: self._task_handler.cancel() diff --git a/setup.cfg b/setup.cfg index 4000b5a40a7..c058fc2f05f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -137,8 +137,6 @@ addopts = filterwarnings = error ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning - ignore:unclosed transport <asyncio.sslproto._SSLProtocolTransport object.*:ResourceWarning - ignore:unclosed transport <_ProactorSocketTransport closing fd=-1>:ResourceWarning ignore:Unclosed client session <aiohttp.client.ClientSession object at 0x:ResourceWarning # Temporarily ignore warnings internal to Python 3.9.7, can be removed again in 3.9.8. ignore:The loop argument is deprecated since Python 3.8, and scheduled for removal in Python 3.10.:DeprecationWarning:asyncio From ade02bf8700dbb13ea7a4038014fd7b54a228120 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 22:05:32 +0100 Subject: [PATCH 0433/1511] [PR #8875/0478f143 backport][3.10] Fix unclosed transport warning (#8906) **This is a backport of PR #8875 as merged into master (0478f143caba33c7f6b355b8513e3c26d8db7d4d).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8875.bugfix.rst | 1 + aiohttp/web_protocol.py | 15 +++------------ setup.cfg | 2 -- 3 files changed, 4 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8875.bugfix.rst diff --git a/CHANGES/8875.bugfix.rst b/CHANGES/8875.bugfix.rst new file mode 100644 index 00000000000..fa33df05ae2 --- /dev/null +++ b/CHANGES/8875.bugfix.rst @@ -0,0 +1 @@ +Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a4941d103ef..a2f159c3b7c 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -260,9 +260,6 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._keepalive_handle is not None: self._keepalive_handle.cancel() - if self._waiter: - self._waiter.cancel() - # Wait for graceful handler completion if self._handler_waiter is not None: with suppress(asyncio.CancelledError, asyncio.TimeoutError): @@ -281,9 +278,7 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._task_handler is not None: self._task_handler.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None + self.force_close() def connection_made(self, transport: asyncio.BaseTransport) -> None: super().connection_made(transport) @@ -307,13 +302,12 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: return self._manager.connection_lost(self, exc) - super().connection_lost(exc) - # Grab value before setting _manager to None. handler_cancellation = self._manager.handler_cancellation + self.force_close() + super().connection_lost(exc) self._manager = None - self._force_close = True self._request_factory = None self._request_handler = None self._request_parser = None @@ -326,9 +320,6 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: exc = ConnectionResetError("Connection lost") self._current_request._cancel(exc) - if self._waiter is not None: - self._waiter.cancel() - if handler_cancellation and self._task_handler is not None: self._task_handler.cancel() diff --git a/setup.cfg b/setup.cfg index 71ed6b98e0e..bd93b00cb2f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -138,8 +138,6 @@ addopts = filterwarnings = error ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning - ignore:unclosed transport <asyncio.sslproto._SSLProtocolTransport object.*:ResourceWarning - ignore:unclosed transport <_ProactorSocketTransport closing fd=-1>:ResourceWarning ignore:Unclosed client session <aiohttp.client.ClientSession object at 0x:ResourceWarning # Temporarily ignore warnings internal to Python 3.9.7, can be removed again in 3.9.8. ignore:The loop argument is deprecated since Python 3.8, and scheduled for removal in Python 3.10.:DeprecationWarning:asyncio From cadffc7064e510d3e660f7829b95700fc99b4092 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 10:32:29 +0000 Subject: [PATCH 0434/1511] Bump wheel from 0.37.0 to 0.44.0 (#8910) Bumps [wheel](https://github.com/pypa/wheel) from 0.37.0 to 0.44.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/wheel/releases">wheel's releases</a>.</em></p> <blockquote> <h2>0.44.0</h2> <ul> <li>Canonicalized requirements in METADATA file (PR by Wim Jeantine-Glenn)</li> <li>Deprecated the <code>bdist_wheel</code> module, as the code was migrated to <code>setuptools</code> itself</li> </ul> <h2>0.43.0</h2> <ul> <li>Dropped support for Python 3.7</li> <li>Updated vendored <code>packaging</code> to 24.0</li> </ul> <h2>0.42.0</h2> <ul> <li>Allowed removing build tag with <code>wheel tags --build ""</code></li> <li>Fixed <code>wheel pack</code> and <code>wheel tags</code> writing updated <code>WHEEL</code> fields after a blank line, causing other tools to ignore them</li> <li>Fixed <code>wheel pack</code> and <code>wheel tags</code> writing <code>WHEEL</code> with CRLF line endings or a mix of CRLF and LF</li> <li>Fixed <code>wheel pack --build-number ""</code> not removing build tag from <code>WHEEL</code> (above changes by Benjamin Gilbert)</li> </ul> <h2>0.41.3</h2> <ul> <li>Updated vendored <code>packaging</code> to 23.2</li> <li>Fixed ABI tag generation for CPython 3.13a1 on Windows (PR by Sam Gross)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/wheel/blob/main/docs/news.rst">wheel's changelog</a>.</em></p> <blockquote> <h1>Release Notes</h1> <p><strong>0.44.0 (2024-08-04)</strong></p> <ul> <li>Canonicalized requirements in METADATA file (PR by Wim Jeantine-Glenn)</li> <li>Deprecated the <code>bdist_wheel</code> module, as the code was migrated to <code>setuptools</code> itself</li> </ul> <p><strong>0.43.0 (2024-03-11)</strong></p> <ul> <li>Dropped support for Python 3.7</li> <li>Updated vendored <code>packaging</code> to 24.0</li> </ul> <p><strong>0.42.0 (2023-11-26)</strong></p> <ul> <li>Allowed removing build tag with <code>wheel tags --build ""</code></li> <li>Fixed <code>wheel pack</code> and <code>wheel tags</code> writing updated <code>WHEEL</code> fields after a blank line, causing other tools to ignore them</li> <li>Fixed <code>wheel pack</code> and <code>wheel tags</code> writing <code>WHEEL</code> with CRLF line endings or a mix of CRLF and LF</li> <li>Fixed <code>wheel pack --build-number ""</code> not removing build tag from <code>WHEEL</code> (above changes by Benjamin Gilbert)</li> </ul> <p><strong>0.41.3 (2023-10-30)</strong></p> <ul> <li>Updated vendored <code>packaging</code> to 23.2</li> <li>Fixed ABI tag generation for CPython 3.13a1 on Windows (PR by Sam Gross)</li> </ul> <p><strong>0.41.2 (2023-08-22)</strong></p> <ul> <li>Fixed platform tag detection for GraalPy and 32-bit python running on an aarch64 kernel (PR by Matthieu Darbois)</li> <li>Fixed <code>wheel tags</code> to not list directories in <code>RECORD</code> files (PR by Mike Taves)</li> <li>Fixed ABI tag generation for GraalPy (PR by Michael Simacek)</li> </ul> <p><strong>0.41.1 (2023-08-05)</strong></p> <ul> <li>Fixed naming of the <code>data_dir</code> directory in the presence of local version segment given via <code>egg_info.tag_build</code> (PR by Anderson Bravalheri)</li> <li>Fixed version specifiers in <code>Requires-Dist</code> being wrapped in parentheses</li> </ul> <p><strong>0.41.0 (2023-07-22)</strong></p> <ul> <li>Added full support of the build tag syntax to <code>wheel tags</code> (you can now set a build tag like <code>123mytag</code>)</li> <li>Fixed warning on Python 3.12 about <code>onerror</code> deprecation. (PR by Henry Schreiner)</li> <li>Support testing on Python 3.12 betas (PR by Ewout ter Hoeven)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/wheel/commit/7bb46d7727e6e89fe56b3c78297b3af2672bbbe2"><code>7bb46d7</code></a> Created a new release</li> <li><a href="https://github.com/pypa/wheel/commit/0add7d6c5fe6c03d7bec22d7d18f51d8a519862c"><code>0add7d6</code></a> Deprecated bdist_wheel and updated the README (<a href="https://redirect.github.com/pypa/wheel/issues/631">#631</a>)</li> <li><a href="https://github.com/pypa/wheel/commit/46c2389891cb09c1c5373c5920991c9149585ee5"><code>46c2389</code></a> chore: make sure local ruff runs don't touch vendored (<a href="https://redirect.github.com/pypa/wheel/issues/618">#618</a>)</li> <li><a href="https://github.com/pypa/wheel/commit/78b9ea9a14a4e9473b4409a27054e932ddde494e"><code>78b9ea9</code></a> Updated Cirrus CI config to use FreeBSD 14</li> <li><a href="https://github.com/pypa/wheel/commit/3d3916add41a452be1ff7ecb9af2b32fa5f5d39e"><code>3d3916a</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/wheel/issues/627">#627</a>)</li> <li><a href="https://github.com/pypa/wheel/commit/1e00742acc9fb33f6e71460c3844c2b66532af7f"><code>1e00742</code></a> Revert "Apply new ruff/pyupgrade rule UP032 (<a href="https://redirect.github.com/pypa/wheel/issues/617">#617</a>)"</li> <li><a href="https://github.com/pypa/wheel/commit/16206e67f161135cf964eb069296dcee994e4db3"><code>16206e6</code></a> Apply new ruff/pyupgrade rule UP032 (<a href="https://redirect.github.com/pypa/wheel/issues/617">#617</a>)</li> <li><a href="https://github.com/pypa/wheel/commit/0b7771e6bc46b8905d8b6c02d8f825bbf9623cdc"><code>0b7771e</code></a> Updated pre-commit modules and applied ruff fixes</li> <li><a href="https://github.com/pypa/wheel/commit/bd8ab856863226ad58b5a70597f990d16836cefa"><code>bd8ab85</code></a> Extended the ruff rule list and applied fixes</li> <li><a href="https://github.com/pypa/wheel/commit/376373b9690e746c36e3984808759e9c32d5e014"><code>376373b</code></a> Allow bdist_wheel working without ctypes (<a href="https://redirect.github.com/pypa/wheel/issues/613">#613</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/wheel/compare/0.37.0...0.44.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=wheel&package-manager=pip&previous-version=0.37.0&new-version=0.44.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f921bb0e786..3881dcfc314 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -285,7 +285,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in webcolors==24.8.0 # via blockdiag -wheel==0.37.0 +wheel==0.44.0 # via pip-tools yarl==1.9.4 # via -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 35a83a15360..314ba878d45 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -277,7 +277,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in webcolors==24.8.0 # via blockdiag -wheel==0.41.0 +wheel==0.44.0 # via pip-tools yarl==1.9.4 # via -r requirements/runtime-deps.in From fe85c54692fcbecbb002e3808277c39878b4c9ba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 10:39:39 +0000 Subject: [PATCH 0435/1511] Bump pyproject-hooks from 1.0.0 to 1.1.0 (#8911) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pyproject-hooks](https://github.com/pypa/pyproject-hooks) from 1.0.0 to 1.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pyproject-hooks/blob/main/docs/changelog.rst">pyproject-hooks's changelog</a>.</em></p> <blockquote> <h1>Changelog</h1> <h2>v1.1</h2> <ul> <li>Add type annotations to the public API.</li> <li>More careful handling of the <code>backend-path</code> key from <code>pyproject.toml</code>. Previous versions would load the backend and then check that it was loaded from the specified path; the new version only loads it from the specified path. The <code>BackendInvalid</code> exception is now a synonym for :exc:<code>BackendUnavailable</code>, and code should move to using the latter name.</li> </ul> <h2>v1.0</h2> <ul> <li>Rename package to <code>pyproject_hooks</code> (from <code>pep517</code>).</li> <li>Remove deprecated modules (<code>.build</code>, <code>.check</code> and <code>.envbuild</code>). Use the <code>build <https://pypa-build.readthedocs.io/en/stable/></code>_ project instead for this higher-level functionality of setting up a temporary build environment.</li> <li>Require Python 3.7 or above.</li> <li>Use <code>tomllib</code> from the standard library on Python 3.11. <code>pyproject_hooks</code> now has no external dependencies when installed in Python 3.11.</li> <li>Avoid chaining exceptions when using the fallback implementation for :meth:<code>.prepare_metadata_for_build_wheel</code>.</li> <li>Fix propagating error message for :exc:<code>.BackendInvalid</code> errors.</li> </ul> <h2>v0.13</h2> <ul> <li>Remove support for end-of-life Pythons. Now requires Python3.6+.</li> <li>Remove support for <code>toml</code> package. Now requires <code>tomli</code>.</li> <li>Rely on preferred "files" API on Python 3.9 and later (<a href="https://redirect.github.com/pypa/pyproject-hooks/issues/140">#140</a>).</li> </ul> <h2>v0.12</h2> <ul> <li>Add method for pip to check if build_editable hook is supported. This is a private API for now.</li> </ul> <h2>v0.11.1</h2> <ul> <li>Fix DeprecationWarning in tomli.</li> </ul> <h2>v0.11</h2> <ul> <li>Support editable hooks (<code>PEP 660 <https://www.python.org/dev/peps/pep-0660/></code>_).</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pyproject-hooks/commit/903ad919c7857a7321dee080e708e13afb2bd4d5"><code>903ad91</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/189">#189</a> from pypa/ci-rm-mac-py37</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/9a22b3e052ca8c9ae10445aba20d784ab7aeb63a"><code>9a22b3e</code></a> Exclude Mac + Python 3.7 from CI matrix</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/d79646fcac2f888a91e2ea9e17cceec19d9a2b24"><code>d79646f</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/188">#188</a> from pypa/prepare-1.1</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/7de0160e5ab3a82d2be393781577736096d6f29e"><code>7de0160</code></a> Bump version: 1.0.0 → 1.1.0</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/dd15b2a55f4e1a0aa858d6195e07ecf6d6ac44cb"><code>dd15b2a</code></a> Mention backend-path changes in changelog</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/debf816af9373232fbf79f1efdf37320d2e0853c"><code>debf816</code></a> Add back BackendInvalid as a synonym for BackendUnavailable</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/c667aeb10e7d6805aed53c95e9727c3ccbaad40e"><code>c667aeb</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/187">#187</a> from pypa/gha-trusted-publish</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/79a2edf4a3d40c842cb7916bf6622981be1751de"><code>79a2edf</code></a> Remove trailing space</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/f66e6a11097c477aeb08ad5e08c01a5c8823bfda"><code>f66e6a1</code></a> Specify environment for publishing job</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/1248ac2f0526291771f2cab520cf82ee6323a7ca"><code>1248ac2</code></a> Set up trusted publishing for making releases to PyPI</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pyproject-hooks/compare/v1.0.0...v1.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pyproject-hooks&package-manager=pip&previous-version=1.0.0&new-version=1.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 3 +-- requirements/dev.txt | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3881dcfc314..831e2b9553e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyjwt==2.9.0 # via # gidgethub # pyjwt -pyproject-hooks==1.0.0 +pyproject-hooks==1.1.0 # via # build # pip-tools @@ -245,7 +245,6 @@ tomli==2.0.1 # incremental # mypy # pip-tools - # pyproject-hooks # pytest # slotscheck # towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index 314ba878d45..84ee10dc526 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -161,7 +161,7 @@ pyjwt==2.8.0 # via # gidgethub # pyjwt -pyproject-hooks==1.0.0 +pyproject-hooks==1.1.0 # via # build # pip-tools @@ -237,7 +237,6 @@ tomli==2.0.1 # incremental # mypy # pip-tools - # pyproject-hooks # pytest # slotscheck # towncrier From 124181f207c8cfd3257bc2063527322b1ed8ed3a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:26:09 +0000 Subject: [PATCH 0436/1511] Bump rich from 13.7.1 to 13.8.0 (#8913) Bumps [rich](https://github.com/Textualize/rich) from 13.7.1 to 13.8.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>The Thanks for your patience Release</h2> <p>This is a fairly large update. Mostly an accumulation of small fixes and enhancements. Nothing qualifies as a *breaking change (for some definition), but there may be some subtly changes to output. Check below for anything that might affect you!</p> <h2>[13.8.0] - 2024-08-26</h2> <h3>Fixed</h3> <ul> <li>Fixed <code>Table</code> rendering of box elements so "footer" elements truly appear at bottom of table, "mid" elements in main table body.</li> <li>Fixed styles in Panel when Text objects are used for title <a href="https://redirect.github.com/Textualize/rich/pull/3401">Textualize/rich#3401</a></li> <li>Fix pretty repr for <code>collections.deque</code> <a href="https://redirect.github.com/Textualize/rich/pull/2864">Textualize/rich#2864</a></li> <li>Thread used in progress.track will exit if an exception occurs in a generator <a href="https://redirect.github.com/Textualize/rich/pull/3402">Textualize/rich#3402</a></li> <li>Progress track thread is now a daemon thread <a href="https://redirect.github.com/Textualize/rich/pull/3402">Textualize/rich#3402</a></li> <li>Fixed cached hash preservation upon clearing meta and links <a href="https://redirect.github.com/Textualize/rich/issues/2942">Textualize/rich#2942</a></li> <li>Fixed overriding the <code>background_color</code> of <code>Syntax</code> not including padding <a href="https://redirect.github.com/Textualize/rich/issues/3295">Textualize/rich#3295</a></li> <li>Fixed pretty printing of dataclasses with a default repr in Python 3.13 <a href="https://redirect.github.com/Textualize/rich/pull/3455">Textualize/rich#3455</a></li> <li>Fixed selective enabling of highlighting when disabled in the <code>Console</code> <a href="https://redirect.github.com/Textualize/rich/issues/3419">Textualize/rich#3419</a></li> <li>Fixed BrokenPipeError writing an error message <a href="https://redirect.github.com/Textualize/rich/pull/3468">Textualize/rich#3468</a></li> <li>Fixed superfluous space above Markdown tables <a href="https://redirect.github.com/Textualize/rich/pull/3469">Textualize/rich#3469</a></li> <li>Fixed issue with record and capture interaction <a href="https://redirect.github.com/Textualize/rich/pull/3470">Textualize/rich#3470</a></li> <li>Fixed control codes breaking in <code>append_tokens</code> <a href="https://redirect.github.com/Textualize/rich/pull/3471">Textualize/rich#3471</a></li> <li>Fixed exception pretty printing a dataclass with missing fields <a href="https://redirect.github.com/Textualize/rich/pull/3472">Textualize/rich#3472</a></li> </ul> <h3>Changed</h3> <ul> <li><code>RichHandler</code> errors and warnings will now use different colors (red and yellow) <a href="https://redirect.github.com/Textualize/rich/issues/2825">Textualize/rich#2825</a></li> <li>Removed the empty line printed in jupyter while using <code>Progress</code> <a href="https://redirect.github.com/Textualize/rich/pull/2616">Textualize/rich#2616</a></li> <li>Running tests in environment with <code>FORCE_COLOR</code> or <code>NO_COLOR</code> environment variables</li> <li>ansi decoder will now strip problematic private escape sequences (like <code>\x1b7</code>) <a href="https://redirect.github.com/Textualize/rich/pull/3278/">Textualize/rich#3278</a></li> <li>Tree's ASCII_GUIDES and TREE_GUIDES constants promoted to class attributes</li> </ul> <h3>Added</h3> <ul> <li>Adds a <code>case_sensitive</code> parameter to <code>prompt.Prompt</code>. This determines if the response is treated as case-sensitive. Defaults to <code>True</code>.</li> <li>Added <code>Console.on_broken_pipe</code> <a href="https://redirect.github.com/Textualize/rich/pull/3468">Textualize/rich#3468</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[13.8.0] - 2024-08-26</h2> <h3>Fixed</h3> <ul> <li>Fixed <code>Table</code> rendering of box elements so "footer" elements truly appear at bottom of table, "mid" elements in main table body.</li> <li>Fixed styles in Panel when Text objects are used for title <a href="https://redirect.github.com/Textualize/rich/pull/3401">Textualize/rich#3401</a></li> <li>Fix pretty repr for <code>collections.deque</code> <a href="https://redirect.github.com/Textualize/rich/pull/2864">Textualize/rich#2864</a></li> <li>Thread used in progress.track will exit if an exception occurs in a generator <a href="https://redirect.github.com/Textualize/rich/pull/3402">Textualize/rich#3402</a></li> <li>Progress track thread is now a daemon thread <a href="https://redirect.github.com/Textualize/rich/pull/3402">Textualize/rich#3402</a></li> <li>Fixed cached hash preservation upon clearing meta and links <a href="https://redirect.github.com/Textualize/rich/issues/2942">Textualize/rich#2942</a></li> <li>Fixed overriding the <code>background_color</code> of <code>Syntax</code> not including padding <a href="https://redirect.github.com/Textualize/rich/issues/3295">Textualize/rich#3295</a></li> <li>Fixed pretty printing of dataclasses with a default repr in Python 3.13 <a href="https://redirect.github.com/Textualize/rich/pull/3455">Textualize/rich#3455</a></li> <li>Fixed selective enabling of highlighting when disabled in the <code>Console</code> <a href="https://redirect.github.com/Textualize/rich/issues/3419">Textualize/rich#3419</a></li> <li>Fixed BrokenPipeError writing an error message <a href="https://redirect.github.com/Textualize/rich/pull/3468">Textualize/rich#3468</a></li> <li>Fixed superfluous space above Markdown tables <a href="https://redirect.github.com/Textualize/rich/pull/3469">Textualize/rich#3469</a></li> <li>Fixed issue with record and capture interaction <a href="https://redirect.github.com/Textualize/rich/pull/3470">Textualize/rich#3470</a></li> <li>Fixed control codes breaking in <code>append_tokens</code> <a href="https://redirect.github.com/Textualize/rich/pull/3471">Textualize/rich#3471</a></li> <li>Fixed exception pretty printing a dataclass with missing fields <a href="https://redirect.github.com/Textualize/rich/pull/3472">Textualize/rich#3472</a></li> </ul> <h3>Changed</h3> <ul> <li><code>RichHandler</code> errors and warnings will now use different colors (red and yellow) <a href="https://redirect.github.com/Textualize/rich/issues/2825">Textualize/rich#2825</a></li> <li>Removed the empty line printed in jupyter while using <code>Progress</code> <a href="https://redirect.github.com/Textualize/rich/pull/2616">Textualize/rich#2616</a></li> <li>Running tests in environment with <code>FORCE_COLOR</code> or <code>NO_COLOR</code> environment variables</li> <li>ansi decoder will now strip problematic private escape sequences (like <code>\x1b7</code>) <a href="https://redirect.github.com/Textualize/rich/pull/3278/">Textualize/rich#3278</a></li> <li>Tree's ASCII_GUIDES and TREE_GUIDES constants promoted to class attributes</li> </ul> <h3>Added</h3> <ul> <li>Adds a <code>case_sensitive</code> parameter to <code>prompt.Prompt</code>. This determines if the response is treated as case-sensitive. Defaults to <code>True</code>.</li> <li>Added <code>Console.on_broken_pipe</code> <a href="https://redirect.github.com/Textualize/rich/pull/3468">Textualize/rich#3468</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/9ec419154102acce7363e3958bec6c261864d7fb"><code>9ec4191</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3473">#3473</a> from Textualize/bump1380</li> <li><a href="https://github.com/Textualize/rich/commit/9c74f03f5c0aa2ed20e1d184cc9b7000cb8f6358"><code>9c74f03</code></a> bump to v13.8.0</li> <li><a href="https://github.com/Textualize/rich/commit/dc7a195acd480326c2449ec0829726ecd91e0c68"><code>dc7a195</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3472">#3472</a> from Textualize/fix-bad-dataclass</li> <li><a href="https://github.com/Textualize/rich/commit/c93883011aa059b5683ae2b30d02952d21a0bc5d"><code>c938830</code></a> changelog</li> <li><a href="https://github.com/Textualize/rich/commit/6055e2d8ef98e5487949b9c6e5d0dfce6a5b9e28"><code>6055e2d</code></a> fix for missing field in dataclass</li> <li><a href="https://github.com/Textualize/rich/commit/b6f2f7aa5b27f612f391cd28212aff51ad0d42d1"><code>b6f2f7a</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3454">#3454</a> from subrat-lima/master</li> <li><a href="https://github.com/Textualize/rich/commit/b1397be1e79fc3ac6d7fd4f66474fbe6d5e820bf"><code>b1397be</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3455">#3455</a> from jjhelmus/dataclasses_3.13</li> <li><a href="https://github.com/Textualize/rich/commit/035f3ea790f5dc6451ce7f15b41f452e6ea523d3"><code>035f3ea</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3452">#3452</a> from sbraz/typos_examples</li> <li><a href="https://github.com/Textualize/rich/commit/d6abebd8bf7adb58cddc6a0652f8f80684b311ee"><code>d6abebd</code></a> Merge branch 'master' into dataclasses_3.13</li> <li><a href="https://github.com/Textualize/rich/commit/1b2dada1c9fa6f16ec319a8d370458c8bde203f7"><code>1b2dada</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3471">#3471</a> from Textualize/fix-append-tokens</li> <li>Additional commits viewable in <a href="https://github.com/Textualize/rich/compare/v13.7.1...v13.8.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.7.1&new-version=13.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 831e2b9553e..6f298fcfed9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -201,7 +201,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.7.1 +rich==13.8.0 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 84ee10dc526..a829b815d0b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -196,7 +196,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.7.1 +rich==13.8.0 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 1184ed4653b..a643194a1cb 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -86,7 +86,7 @@ pyyaml==6.0.2 # via pre-commit requests==2.32.3 # via python-on-whales -rich==13.7.1 +rich==13.8.0 # via typer shellingham==1.5.4 # via typer diff --git a/requirements/test.txt b/requirements/test.txt index cc98efc6d0f..73287916d52 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -102,7 +102,7 @@ regex==2024.7.24 # via re-assert requests==2.32.3 # via python-on-whales -rich==13.7.1 +rich==13.8.0 # via typer setuptools-git==1.2 # via -r requirements/test.in From a0ec9497e271444f13d554383b7f680f11fc1482 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:26:13 +0000 Subject: [PATCH 0437/1511] Bump importlib-resources from 6.1.1 to 6.4.4 (#8915) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [importlib-resources](https://github.com/python/importlib_resources) from 6.1.1 to 6.4.4. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/importlib_resources/blob/main/NEWS.rst">importlib-resources's changelog</a>.</em></p> <blockquote> <h1>v6.4.4</h1> <p>No significant changes.</p> <h1>v6.4.3</h1> <h2>Bugfixes</h2> <ul> <li>When inferring the caller in <code>files()</code><code>python/cpython#123085</code></li> </ul> <h1>v6.4.2</h1> <h2>Bugfixes</h2> <ul> <li>Merged fix for UTF-16 BOM handling in functional tests. (<a href="https://redirect.github.com/python/importlib_resources/issues/312">#312</a>)</li> </ul> <h1>v6.4.1</h1> <h2>Bugfixes</h2> <ul> <li><code>python/cpython#121735</code></li> </ul> <h1>v6.4.0</h1> <h2>Features</h2> <ul> <li>The functions <code>is_resource()</code>, <code>open_binary()</code>, <code>open_text()</code>, <code>path()</code>, <code>read_binary()</code>, and <code>read_text()</code> are un-deprecated, and support subdirectories via multiple positional arguments. The <code>contents()</code> function also allows subdirectories, but remains deprecated. (<a href="https://redirect.github.com/python/importlib_resources/issues/303">#303</a>)</li> <li><code>python/cpython#109829</code></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/importlib_resources/commit/9689f8f82ca838cd58d3d0d80828785ada7798c9"><code>9689f8f</code></a> Finalize</li> <li><a href="https://github.com/python/importlib_resources/commit/f3ad28a14feb85dc2631a8e3658f090ed0b7522d"><code>f3ad28a</code></a> 🧎‍♀️ Genuflect to the types.</li> <li><a href="https://github.com/python/importlib_resources/commit/045dde42b16581b99539667a87d61252bf4d08ac"><code>045dde4</code></a> 🧎‍♀️ Genuflect to the types.</li> <li><a href="https://github.com/python/importlib_resources/commit/2fecb5e596c7b6ff2d3f98af0ac63c9f38401c5a"><code>2fecb5e</code></a> 🧎‍♀️ Genuflect to the types.</li> <li><a href="https://github.com/python/importlib_resources/commit/75b301c95ce3d75626c949a26e650deceba8e62c"><code>75b301c</code></a> 🧎‍♀️ Genuflect to the types.</li> <li><a href="https://github.com/python/importlib_resources/commit/45167451781dd8fc03e5568da40144dd8a4f790d"><code>4516745</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/python/importlib_resources/commit/f1350e413775a9e79e20779cc9705e28a1c55900"><code>f1350e4</code></a> Add upstream and local sections for 'type' extra, since many projects will ha...</li> <li><a href="https://github.com/python/importlib_resources/commit/d02141768b62468e46064614276036ea5c746056"><code>d021417</code></a> Finalize</li> <li><a href="https://github.com/python/importlib_resources/commit/0ecbc3b374ae84ae10ded5e1ad1d8775e12c2dd7"><code>0ecbc3b</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_resources/issues/314">#314</a> from python/<a href="https://redirect.github.com/python/importlib_resources/issues/123085">gh-123085</a>/inferred-compiled</li> <li><a href="https://github.com/python/importlib_resources/commit/79fa62f4b5cbf8f358560651a714b282aee2226c"><code>79fa62f</code></a> Add docstring and reference to the issue.</li> <li>Additional commits viewable in <a href="https://github.com/python/importlib_resources/compare/v6.1.1...v6.4.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=importlib-resources&package-manager=pip&previous-version=6.1.1&new-version=6.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6f298fcfed9..6b9beaf9d5a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -99,7 +99,7 @@ importlib-metadata==8.4.0 # via # build # sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier incremental==24.7.2 # via towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index a829b815d0b..ec6aea6146a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -97,7 +97,7 @@ importlib-metadata==8.4.0 # via # build # sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier incremental==24.7.2 # via towncrier diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 57bc9bc47b6..4d55d216acb 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -28,7 +28,7 @@ imagesize==1.4.1 # via sphinx importlib-metadata==8.4.0 # via sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier incremental==24.7.2 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 91ad44582d4..fbfc99f686f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -28,7 +28,7 @@ imagesize==1.4.1 # via sphinx importlib-metadata==8.4.0 # via sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier incremental==24.7.2 # via towncrier From 94d2cb51dbae7038cbaece79a2225bab3d024be6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:31:28 +0000 Subject: [PATCH 0438/1511] Bump click from 8.0.3 to 8.1.7 (#8917) Bumps [click](https://github.com/pallets/click) from 8.0.3 to 8.1.7. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/click/releases">click's releases</a>.</em></p> <blockquote> <h2>8.1.7</h2> <p>This is a fix release for the 8.1.x feature branch.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-7">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-7</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/22?closed=1">https://github.com/pallets/click/milestone/22?closed=1</a></li> </ul> <h2>8.1.6</h2> <p>This is a fix release for the 8.1.x feature branch. If you were having issues with type checking tools like pyright or mypy not accepting uses of Click's decorators, this should fix that.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-6">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-6</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/21?closed=1">https://github.com/pallets/click/milestone/21?closed=1</a></li> </ul> <h2>8.1.5</h2> <p>This is a fix release for the 8.1.x feature branch. This fixes an issue with decorator type annotations that caused type checkers to fail for valid code. There are no runtime behavior changes.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-5">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-5</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/20?closed=1">https://github.com/pallets/click/milestone/20?closed=1</a></li> </ul> <h2>8.1.4</h2> <p>This is a fix release for the 8.1.x feature branch.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-4">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-4</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/19?closed=1">https://github.com/pallets/click/milestone/19?closed=1</a></li> </ul> <h2>8.1.3</h2> <p>This is a fix release for the <a href="https://github.com/pallets/click/releases/tag/8.1.0">8.1.0</a> feature release.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-3">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-3</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/18?closed=1">https://github.com/pallets/click/milestone/18?closed=1</a></li> </ul> <h2>8.1.2</h2> <p>This is a fix release for the <a href="https://github.com/pallets/click/releases/tag/8.1.0">8.1.0</a> feature release.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-2">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-2</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/17?closed=1">https://github.com/pallets/click/milestone/17?closed=1</a></li> </ul> <h2>8.1.1</h2> <p>This is a fix release for the <a href="https://github.com/pallets/click/releases/tag/8.1.0">8.1.0</a> feature release.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-1">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-1</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/14?closed=1">https://github.com/pallets/click/milestone/14?closed=1</a></li> </ul> <h2>8.1.0</h2> <p>This is a feature release, which includes new features and removes previously deprecated features. The 8.1.x branch is now the supported bugfix branch, the 8.0.x branch will become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as <a href="https://pypi.org/project/pip-tools/">pip-tools</a> to pin all dependencies and control upgrades.</p> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-0">https://click.palletsprojects.com/en/8.1.x/changes/#version-8-1-0</a></li> <li>Milestone: <a href="https://github.com/pallets/click/milestone/9?closed=1">https://github.com/pallets/click/milestone/9?closed=1</a></li> </ul> <h2>8.0.4</h2> <ul> <li>Changes: <a href="https://click.palletsprojects.com/en/8.0.x/changes/#version-8-0-4">https://click.palletsprojects.com/en/8.0.x/changes/#version-8-0-4</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/click/blob/main/CHANGES.rst">click's changelog</a>.</em></p> <blockquote> <h2>Version 8.1.7</h2> <p>Released 2023-08-17</p> <ul> <li>Fix issue with regex flags in shell completion. :issue:<code>2581</code></li> <li>Bash version detection issues a warning instead of an error. :issue:<code>2574</code></li> <li>Fix issue with completion script for Fish shell. :issue:<code>2567</code></li> </ul> <h2>Version 8.1.6</h2> <p>Released 2023-07-18</p> <ul> <li>Fix an issue with type hints for <code>@click.group()</code>. :issue:<code>2558</code></li> </ul> <h2>Version 8.1.5</h2> <p>Released 2023-07-13</p> <ul> <li>Fix an issue with type hints for <code>@click.command()</code>, <code>@click.option()</code>, and other decorators. Introduce typing tests. :issue:<code>2558</code></li> </ul> <h2>Version 8.1.4</h2> <p>Released 2023-07-06</p> <ul> <li>Replace all <code>typing.Dict</code> occurrences to <code>typing.MutableMapping</code> for parameter hints. :issue:<code>2255</code></li> <li>Improve type hinting for decorators and give all generic types parameters. :issue:<code>2398</code></li> <li>Fix return value and type signature of <code>shell_completion.add_completion_class</code> function. :pr:<code>2421</code></li> <li>Bash version detection doesn't fail on Windows. :issue:<code>2461</code></li> <li>Completion works if there is a dot (<code>.</code>) in the program name. :issue:<code>2166</code></li> <li>Improve type annotations for pyright type checker. :issue:<code>2268</code></li> <li>Improve responsiveness of <code>click.clear()</code>. :issue:<code>2284</code></li> <li>Improve command name detection when using Shiv or PEX. :issue:<code>2332</code></li> <li>Avoid showing empty lines if command help text is empty. :issue:<code>2368</code></li> <li>ZSH completion script works when loaded from <code>fpath</code>. :issue:<code>2344</code>.</li> <li><code>EOFError</code> and <code>KeyboardInterrupt</code> tracebacks are not suppressed when <code>standalone_mode</code> is disabled. :issue:<code>2380</code></li> <li><code>@group.command</code> does not fail if the group was created with a custom <code>command_class</code>. :issue:<code>2416</code></li> <li><code>multiple=True</code> is allowed for flag options again and does not require</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/click/commit/874ca2bc1c30d93a4ac6e36a15ed685eafe89097"><code>874ca2b</code></a> release version 8.1.7</li> <li><a href="https://github.com/pallets/click/commit/6e1f6d3052b6459fa3e43392d728376c423a7d4c"><code>6e1f6d3</code></a> completion(fish): add back ; as line endings in fish script (<a href="https://redirect.github.com/pallets/click/issues/2570">#2570</a>)</li> <li><a href="https://github.com/pallets/click/commit/a955c77719981af390b6d2349730c590c80e11e9"><code>a955c77</code></a> update fish enabling script</li> <li><a href="https://github.com/pallets/click/commit/3c1529e6bc12118ff5a83b2f81fc52f625367736"><code>3c1529e</code></a> add back semicolons in fish script</li> <li><a href="https://github.com/pallets/click/commit/a260ca6fa7675a87dc55c4c79cebad3de2d9ed6d"><code>a260ca6</code></a> Replace bash shell completion version error with warning (<a href="https://redirect.github.com/pallets/click/issues/2576">#2576</a>)</li> <li><a href="https://github.com/pallets/click/commit/d9db70cabdc9cb10cd5584464b8adb9a24545b92"><code>d9db70c</code></a> bash version support shows warning instead of error</li> <li><a href="https://github.com/pallets/click/commit/22b9b1ce7945518fa7838321d991f5e766bceb32"><code>22b9b1c</code></a> Fix incorrect passing of flags to re.sub (<a href="https://redirect.github.com/pallets/click/issues/2581">#2581</a>)</li> <li><a href="https://github.com/pallets/click/commit/d69d2106d821c43507829100f36774bbb7a1092b"><code>d69d210</code></a> fix flake8 finding</li> <li><a href="https://github.com/pallets/click/commit/af2da1ef9b9f7a5a19b6f77d432459afb699f408"><code>af2da1e</code></a> Fix incorrect passing of flags to re.sub</li> <li><a href="https://github.com/pallets/click/commit/bb6a8727aa4aab62e5a388b15c4443fedf422622"><code>bb6a872</code></a> start version 8.1.7</li> <li>Additional commits viewable in <a href="https://github.com/pallets/click/compare/8.0.3...8.1.7">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=click&package-manager=pip&previous-version=8.0.3&new-version=8.1.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6b9beaf9d5a..6d7e5c08543 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -46,7 +46,7 @@ charset-normalizer==3.3.2 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in -click==8.0.3 +click==8.1.7 # via # cherry-picker # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index ec6aea6146a..e1c9f8f57a2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -46,7 +46,7 @@ charset-normalizer==3.3.2 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in -click==8.1.6 +click==8.1.7 # via # cherry-picker # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4d55d216acb..5108b774343 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -16,7 +16,7 @@ certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via towncrier docutils==0.20.1 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index fbfc99f686f..1382a7bb892 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -16,7 +16,7 @@ certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via towncrier docutils==0.20.1 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index a643194a1cb..6d0647239ee 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via # slotscheck # typer diff --git a/requirements/test.txt b/requirements/test.txt index 73287916d52..2c63a08a37f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -26,7 +26,7 @@ cffi==1.17.0 # pycares charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via # typer # wait-for-it From 36647c6d6b5980c8ab969daf0c77a46ad9978f45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:47:44 +0000 Subject: [PATCH 0439/1511] Bump alabaster from 0.7.12 to 0.7.13 (#8918) Bumps [alabaster](https://github.com/sphinx-doc/alabaster) from 0.7.12 to 0.7.13. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/sphinx-doc/alabaster/blob/master/docs/changelog.rst">alabaster's changelog</a>.</em></p> <blockquote> <h2>:git_tag:<code>0.7.13</code> -- 2023-01-13</h2> <ul> <li> <p>Modernized the project: s/Travis/Circle/ for CI, README badges, <code>setup.cfg</code> removal, metadata refresh, etc.</p> </li> <li> <p>Dropped support for Python 2 and Python <3.6. This includes various minor updates to work correctly with modern versions of Sphinx (1.6 at the very least). Thanks to Adam Turner for a pile of patches here.</p> <p>.. warning:: This change is backwards incompatible if you're on an old Python version.</p> </li> <li> <p>Tweak CSS somewhat for compatibility with modern Sphinx versions' base stylesheet.</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/sphinx-doc/alabaster/commit/e5c058385c60901fc6916afe60a6dd26f0a0d6a8"><code>e5c0583</code></a> Cut 0.7.13</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/af4588b2366247d683908c27241b138338f89cc0"><code>af4588b</code></a> dev-reqs update, including temp invoke@git</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/c258ede539bc12d6008eb8ca023d972f2a172433"><code>c258ede</code></a> Oh right, no tests = no coverage. copypasta fail!</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/b492a0c515cedb6063999cb9c718d67265d65e65"><code>b492a0c</code></a> Use newer sphinx on newer pythons (in test matrix)</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/9dfc356c4be65bed41cdebee875b4d5c910c0ec3"><code>9dfc356</code></a> Use param for base docs job</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/160d6453632eb0a5d8a3a798c48a6c0b09a22bc6"><code>160d645</code></a> Add black</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/e630edbea715b4fda23dcd84b836799364eb305c"><code>e630edb</code></a> Matrix far too big lol</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/610fe18a858edf8fe07c20ac41dbb8a6d5eda745"><code>610fe18</code></a> Migrate to CircleCI, or start to anyhow.</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/f43d16263bbc1c6c8d0f80fc6546ef280ff98383"><code>f43d162</code></a> extra URLs for pypi</li> <li><a href="https://github.com/sphinx-doc/alabaster/commit/ec2c9c6c73dabc1b32f50213ff162442a5059400"><code>ec2c9c6</code></a> badges in readme</li> <li>Additional commits viewable in <a href="https://github.com/sphinx-doc/alabaster/compare/0.7.12...0.7.13">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=alabaster&package-manager=pip&previous-version=0.7.12&new-version=0.7.13)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6d7e5c08543..56d9f88c633 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -16,7 +16,7 @@ aioredis==2.0.1 # via -r requirements/lint.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -alabaster==0.7.12 +alabaster==0.7.13 # via sphinx annotated-types==0.7.0 # via pydantic From b0a97da9e1a02a1f3f77d04a6b077061b839f096 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Aug 2024 11:04:19 +0000 Subject: [PATCH 0440/1511] Bump zipp from 3.20.0 to 3.20.1 (#8923) Bumps [zipp](https://github.com/jaraco/zipp) from 3.20.0 to 3.20.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jaraco/zipp/blob/main/NEWS.rst">zipp's changelog</a>.</em></p> <blockquote> <h1>v3.20.1</h1> <h2>Bugfixes</h2> <ul> <li><code>python/cpython#123270</code></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jaraco/zipp/commit/c23e5498d156fabfadcb26453dc363ef7d26e51a"><code>c23e549</code></a> Finalize</li> <li><a href="https://github.com/jaraco/zipp/commit/c2b9015366cf3f3b67a4e88c75833b9e3f498826"><code>c2b9015</code></a> Merge pull request <a href="https://redirect.github.com/jaraco/zipp/issues/124">#124</a> from jaraco/bugfix/gh-123270-supported-names</li> <li><a href="https://github.com/jaraco/zipp/commit/774a3ac67f5b827684e8c3b2e03c5f8bbb440593"><code>774a3ac</code></a> Add TODO to consolidate this behavior in CPython.</li> <li><a href="https://github.com/jaraco/zipp/commit/cc61e6140f0dfde2ff372db932442cf6df890f09"><code>cc61e61</code></a> Prefer simpler path.rstrip to consolidate checks for empty or only paths.</li> <li><a href="https://github.com/jaraco/zipp/commit/bec712f098666b1767502d793d36b51afd0d7e94"><code>bec712f</code></a> Mark unused code as uncovered.</li> <li><a href="https://github.com/jaraco/zipp/commit/fde82dcfdea5722c5126e83921773c629a8ba400"><code>fde82dc</code></a> Add news fragment.</li> <li><a href="https://github.com/jaraco/zipp/commit/a421f7e38d88ca9b6b58c89c6b3f141c07fdc588"><code>a421f7e</code></a> Invent DirtyZipInfo to create an unsanitized zipfile with backslashes.</li> <li><a href="https://github.com/jaraco/zipp/commit/0a3a7b4652e417f61de2458506d05570e22df018"><code>0a3a7b4</code></a> Refine expectation that paths with leading slashes are simply not visible.</li> <li><a href="https://github.com/jaraco/zipp/commit/f89b93f0370dd85d23d243e25dfc1f99f4d8de48"><code>f89b93f</code></a> Address infinite loop when zipfile begins with more than one leading slash.</li> <li><a href="https://github.com/jaraco/zipp/commit/3cb5609002263eb19f7b5efda82d96f1f57fe876"><code>3cb5609</code></a> Removed SanitizedNames.</li> <li>Additional commits viewable in <a href="https://github.com/jaraco/zipp/compare/v3.20.0...v3.20.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=zipp&package-manager=pip&previous-version=3.20.0&new-version=3.20.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 56d9f88c633..11621911a1e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ wheel==0.44.0 # via pip-tools yarl==1.9.4 # via -r requirements/runtime-deps.in -zipp==3.20.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources diff --git a/requirements/dev.txt b/requirements/dev.txt index e1c9f8f57a2..180756490ac 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -280,7 +280,7 @@ wheel==0.44.0 # via pip-tools yarl==1.9.4 # via -r requirements/runtime-deps.in -zipp==3.20.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 5108b774343..2645cebaad6 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -90,7 +90,7 @@ urllib3==2.2.2 # via requests webcolors==24.8.0 # via blockdiag -zipp==3.20.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources diff --git a/requirements/doc.txt b/requirements/doc.txt index 1382a7bb892..1a22efd4b55 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -85,7 +85,7 @@ urllib3==2.2.2 # via requests webcolors==24.8.0 # via blockdiag -zipp==3.20.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources From a69a25ad962dd9e5f94bcdac6db7eb5f06fa8f6c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Aug 2024 11:17:36 +0000 Subject: [PATCH 0441/1511] Bump setuptools from 73.0.1 to 74.0.0 (#8924) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 73.0.1 to 74.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v74.0.0</h1> <h2>Features</h2> <ul> <li>Changed the type of error raised by <code>setuptools.command.easy_install.CommandSpec.from_param</code> on unsupported argument from <code>AttributeError</code> to <code>TypeError</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4548">#4548</a>)</li> <li>Added detection of ARM64 variant of MSVC -- by :user:<code>saschanaz</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4553">#4553</a>)</li> <li>Made <code>setuptools.package_index.Credential</code> a <code>typing.NamedTuple</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4585">#4585</a>)</li> <li>Reraise error from <code>setuptools.command.easy_install.auto_chmod</code> instead of nonsensical <code>TypeError: 'Exception' object is not subscriptable</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4593">#4593</a>)</li> <li>Fully typed all collection attributes in <code>pkg_resources</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4598">#4598</a>)</li> <li>Automatically exclude <code>.tox|.nox|.venv</code> directories from <code>sdist</code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/4603">#4603</a>)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Removed the monkeypatching of distutils._msvccompiler. Now all compiler logic is consolidated in distutils. (<a href="https://redirect.github.com/pypa/setuptools/issues/4600">#4600</a>)</li> <li>Synced with pypa/distutils@58fe058e4, including consolidating Visual Studio 2017 support (<a href="https://redirect.github.com/pypa/setuptools/issues/4600">#4600</a>, <a href="https://redirect.github.com/pypa/distutils/issues/289">pypa/distutils#289</a><code>pypa/distutils#287</code><a href="https://redirect.github.com/pypa/setuptools/issues/4606">#4606</a>)</li> </ul> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4592">#4592</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/98ad794354efecf4ed1f629d4e5f02feae00d2ae"><code>98ad794</code></a> Bump version: 73.0.1 → 74.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/b4fb91796ba9f9473280a69a3c8213066e5bc107"><code>b4fb917</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4600">#4600</a> from pypa/debt/msvc-monkey</li> <li><a href="https://github.com/pypa/setuptools/commit/18a44d8f5660df9e23ee823a073b2d3238bc8293"><code>18a44d8</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/5f8215d9888cb555d2a206635f0f59421cc4afa9"><code>5f8215d</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4548">#4548</a> from Avasam/from_param-TypeError</li> <li><a href="https://github.com/pypa/setuptools/commit/6928048a3c52370363fa47e7c2b9496ff0de0f79"><code>6928048</code></a> Merge branch 'main' into debt/msvc-monkey</li> <li><a href="https://github.com/pypa/setuptools/commit/11a6b596ed1453407061b3e57da04bd49c0adb91"><code>11a6b59</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4606">#4606</a> from pypa/distutils-58fe058e4</li> <li><a href="https://github.com/pypa/setuptools/commit/903604bb3a648a26ce268753f6f05ce049336e5c"><code>903604b</code></a> Reraise sensible errors from auto_chmod (<a href="https://redirect.github.com/pypa/setuptools/issues/4593">#4593</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/8ec5b5aeef7a6cd80ca8c3291ad66acc3986069b"><code>8ec5b5a</code></a> Add missing news fragment for PR 4603</li> <li><a href="https://github.com/pypa/setuptools/commit/e90dfd568eef9dde8aa69a8a0ec1a7e692c532c8"><code>e90dfd5</code></a> Exclude top-level <code>.tox|.nox|.venv</code> from sdist (<a href="https://redirect.github.com/pypa/setuptools/issues/4603">#4603</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/ef2957a952244e4dfd179d29e1eaaa2cd83a1e26"><code>ef2957a</code></a> Reraise sensible errors from auto_chmod</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v73.0.1...v74.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=73.0.1&new-version=74.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 11621911a1e..30e89bf4ba9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -296,7 +296,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==73.0.1 +setuptools==74.0.0 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index 180756490ac..a3eea2d8d88 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -288,7 +288,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==73.0.1 +setuptools==74.0.0 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 2645cebaad6..ae5f3a95597 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==73.0.1 +setuptools==74.0.0 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 1a22efd4b55..6e247c15b6e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==73.0.1 +setuptools==74.0.0 # via # blockdiag # incremental From 5e498df77b6034455d5b65f7a963f2cb2f605990 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 28 Aug 2024 16:06:06 +0100 Subject: [PATCH 0442/1511] Support credentials in URL with empty user (#6494) (#6495) (#8926) (cherry picked from commit ce9c4eb0f895f356e775ca268d7ccef908f4c936) Co-authored-by: Chris Shucksmith <chris@shucksmith.co.uk> --- CHANGES/6494.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client_reqrep.py | 4 ++-- aiohttp/helpers.py | 4 ++-- tests/test_client_request.py | 7 +++++++ tests/test_helpers.py | 8 ++++++++ 6 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 CHANGES/6494.bugfix.rst diff --git a/CHANGES/6494.bugfix.rst b/CHANGES/6494.bugfix.rst new file mode 100644 index 00000000000..3827644f0d1 --- /dev/null +++ b/CHANGES/6494.bugfix.rst @@ -0,0 +1 @@ +Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 202193375dd..57a4d2dbcf3 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -74,6 +74,7 @@ Chih-Yuan Chen Chris AtLee Chris Laws Chris Moore +Chris Shucksmith Christopher Schmitt Claudiu Popa Colin Dunklau diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d055e70e87c..933f3275e28 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -408,8 +408,8 @@ def update_host(self, url: URL) -> None: # basic auth info username, password = url.user, url.password - if username: - self.auth = helpers.BasicAuth(username, password or "") + if username or password: + self.auth = helpers.BasicAuth(username or "", password or "") def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index f759bddc099..0327d31d961 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -164,9 +164,9 @@ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth" """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") - if url.user is None: + if url.user is None and url.password is None: return None - return cls(url.user, url.password or "", encoding=encoding) + return cls(url.user or "", url.password or "", encoding=encoding) def encode(self) -> str: """Encode credentials.""" diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 7d9f69b52f0..2d70ebdd4f2 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -453,6 +453,13 @@ def test_basic_auth_from_url(make_request) -> None: assert "python.org" == req.host +def test_basic_auth_no_user_from_url(make_request) -> None: + req = make_request("get", "http://:1234@python.org") + assert "AUTHORIZATION" in req.headers + assert "Basic OjEyMzQ=" == req.headers["AUTHORIZATION"] + assert "python.org" == req.host + + def test_basic_auth_from_url_overridden(make_request) -> None: req = make_request( "get", "http://garbage@python.org", auth=aiohttp.BasicAuth("nkim", "1234") diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 67af32dc3be..827a417c299 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -189,6 +189,14 @@ def test_basic_auth_from_url() -> None: assert auth.password == "pass" +def test_basic_auth_no_user_from_url() -> None: + url = URL("http://:pass@example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is not None + assert auth.login == "" + assert auth.password == "pass" + + def test_basic_auth_from_not_url() -> None: with pytest.raises(TypeError): helpers.BasicAuth.from_url("http://user:pass@example.com") From 672cb20b38b10a329b5dfa8f1845fb022a426bf7 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 28 Aug 2024 17:34:48 +0100 Subject: [PATCH 0443/1511] Support credentials in URL with empty user (#6494) (#6495) (#8927) (cherry picked from commit ce9c4eb0f895f356e775ca268d7ccef908f4c936) Co-authored-by: Chris Shucksmith <chris@shucksmith.co.uk> --- CHANGES/6494.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client_reqrep.py | 4 ++-- aiohttp/helpers.py | 4 ++-- tests/test_client_request.py | 7 +++++++ tests/test_helpers.py | 8 ++++++++ 6 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 CHANGES/6494.bugfix.rst diff --git a/CHANGES/6494.bugfix.rst b/CHANGES/6494.bugfix.rst new file mode 100644 index 00000000000..3827644f0d1 --- /dev/null +++ b/CHANGES/6494.bugfix.rst @@ -0,0 +1 @@ +Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 202193375dd..57a4d2dbcf3 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -74,6 +74,7 @@ Chih-Yuan Chen Chris AtLee Chris Laws Chris Moore +Chris Shucksmith Christopher Schmitt Claudiu Popa Colin Dunklau diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d055e70e87c..933f3275e28 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -408,8 +408,8 @@ def update_host(self, url: URL) -> None: # basic auth info username, password = url.user, url.password - if username: - self.auth = helpers.BasicAuth(username, password or "") + if username or password: + self.auth = helpers.BasicAuth(username or "", password or "") def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index f759bddc099..0327d31d961 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -164,9 +164,9 @@ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth" """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") - if url.user is None: + if url.user is None and url.password is None: return None - return cls(url.user, url.password or "", encoding=encoding) + return cls(url.user or "", url.password or "", encoding=encoding) def encode(self) -> str: """Encode credentials.""" diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 7d9f69b52f0..2d70ebdd4f2 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -453,6 +453,13 @@ def test_basic_auth_from_url(make_request) -> None: assert "python.org" == req.host +def test_basic_auth_no_user_from_url(make_request) -> None: + req = make_request("get", "http://:1234@python.org") + assert "AUTHORIZATION" in req.headers + assert "Basic OjEyMzQ=" == req.headers["AUTHORIZATION"] + assert "python.org" == req.host + + def test_basic_auth_from_url_overridden(make_request) -> None: req = make_request( "get", "http://garbage@python.org", auth=aiohttp.BasicAuth("nkim", "1234") diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 67af32dc3be..827a417c299 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -189,6 +189,14 @@ def test_basic_auth_from_url() -> None: assert auth.password == "pass" +def test_basic_auth_no_user_from_url() -> None: + url = URL("http://:pass@example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is not None + assert auth.login == "" + assert auth.password == "pass" + + def test_basic_auth_from_not_url() -> None: with pytest.raises(TypeError): helpers.BasicAuth.from_url("http://user:pass@example.com") From daa4d5f683f791c6fb3da8204a5d59b6245ef363 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 19:04:22 +0100 Subject: [PATCH 0444/1511] [PR #8930/1575360e backport][3.11] Fix limit docs (#8932) **This is a backport of PR #8930 as merged into master (1575360ef32b5ad63d3d6822fc790a8b084cb0d3).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/client_reference.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 91444d117b1..03e812ff611 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -990,7 +990,7 @@ is controlled by *force_close* constructor's parameter). flag. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are @@ -1035,7 +1035,7 @@ is controlled by *force_close* constructor's parameter). Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. - If *limit_per_host* is ``None`` the connector has no limit per host. + If *limit_per_host* is ``0`` the connector has no limit per host. Read-only property. @@ -1130,7 +1130,7 @@ is controlled by *force_close* constructor's parameter). updated refreshing each entry after N seconds. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are From b9189df64353acc64e9a758b642c5310012ef3eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 19:32:44 +0100 Subject: [PATCH 0445/1511] [PR #8930/1575360e backport][3.10] Fix limit docs (#8931) **This is a backport of PR #8930 as merged into master (1575360ef32b5ad63d3d6822fc790a8b084cb0d3).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/client_reference.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 91444d117b1..03e812ff611 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -990,7 +990,7 @@ is controlled by *force_close* constructor's parameter). flag. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are @@ -1035,7 +1035,7 @@ is controlled by *force_close* constructor's parameter). Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. - If *limit_per_host* is ``None`` the connector has no limit per host. + If *limit_per_host* is ``0`` the connector has no limit per host. Read-only property. @@ -1130,7 +1130,7 @@ is controlled by *force_close* constructor's parameter). updated refreshing each entry after N seconds. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are From 6c6d45b7ca392f0d2b5d9baba7f37002cfeaf1e0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 21:02:31 +0100 Subject: [PATCH 0446/1511] [PR #8929/c0c3376e backport][3.10] Fix Site.name with empty host (#8934) **This is a backport of PR #8929 as merged into master (c0c3376e6699d0741424c43ac1b25beea366b1fd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8929.bugfix.rst | 1 + aiohttp/web_runner.py | 2 +- tests/test_web_runner.py | 7 +++++++ 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8929.bugfix.rst diff --git a/CHANGES/8929.bugfix.rst b/CHANGES/8929.bugfix.rst new file mode 100644 index 00000000000..229d5abd0e7 --- /dev/null +++ b/CHANGES/8929.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 2fe229c4e50..0a237ede2c5 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -108,7 +108,7 @@ def __init__( @property def name(self) -> str: scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if self._host is None else self._host + host = "0.0.0.0" if not self._host else self._host return str(URL.build(scheme=scheme, host=host, port=self._port)) async def start(self) -> None: diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index c7c94263234..b71c34fe912 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -165,6 +165,13 @@ async def mock_create_server(*args, **kwargs): assert port == 8080 +async def test_tcpsite_empty_str_host(make_runner: Any) -> None: + runner = make_runner() + await runner.setup() + site = web.TCPSite(runner, host="") + assert site.name == "http://0.0.0.0:8080" + + def test_run_after_asyncio_run() -> None: async def nothing(): pass From 37b2604d10e78b96b1fed51bab7f22221c868fc9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 21:02:53 +0100 Subject: [PATCH 0447/1511] [PR #8929/c0c3376e backport][3.11] Fix Site.name with empty host (#8935) **This is a backport of PR #8929 as merged into master (c0c3376e6699d0741424c43ac1b25beea366b1fd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8929.bugfix.rst | 1 + aiohttp/web_runner.py | 2 +- tests/test_web_runner.py | 7 +++++++ 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8929.bugfix.rst diff --git a/CHANGES/8929.bugfix.rst b/CHANGES/8929.bugfix.rst new file mode 100644 index 00000000000..229d5abd0e7 --- /dev/null +++ b/CHANGES/8929.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 2fe229c4e50..0a237ede2c5 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -108,7 +108,7 @@ def __init__( @property def name(self) -> str: scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if self._host is None else self._host + host = "0.0.0.0" if not self._host else self._host return str(URL.build(scheme=scheme, host=host, port=self._port)) async def start(self) -> None: diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index c7c94263234..b71c34fe912 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -165,6 +165,13 @@ async def mock_create_server(*args, **kwargs): assert port == 8080 +async def test_tcpsite_empty_str_host(make_runner: Any) -> None: + runner = make_runner() + await runner.setup() + site = web.TCPSite(runner, host="") + assert site.name == "http://0.0.0.0:8080" + + def test_run_after_asyncio_run() -> None: async def nothing(): pass From 1dc3cd6d733f5707285afc623bd81d109acff812 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 29 Aug 2024 22:40:26 +0100 Subject: [PATCH 0448/1511] Fix Response.text when body is Payload (#6485) (#8937) (cherry picked from commit 9418a4a1486beed0ae8a5c47277ecd67758eb5e2) --- CHANGES/6485.bugfix.rst | 1 + aiohttp/multipart.py | 17 ++++++++++++++ aiohttp/payload.py | 36 ++++++++++++++++++++++++++-- aiohttp/payload_streamer.py | 3 +++ aiohttp/web_response.py | 27 +++++++++------------ tests/test_payload.py | 3 +++ tests/test_web_response.py | 47 ++++++++++++++++++++++++++++++++++++- 7 files changed, 115 insertions(+), 19 deletions(-) create mode 100644 CHANGES/6485.bugfix.rst diff --git a/CHANGES/6485.bugfix.rst b/CHANGES/6485.bugfix.rst new file mode 100644 index 00000000000..b1d912f1579 --- /dev/null +++ b/CHANGES/6485.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e3680a7b2a1..965e4f279d3 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -561,6 +561,8 @@ def filename(self) -> Optional[str]: @payload_type(BodyPartReader, order=Order.try_first) class BodyPartReaderPayload(Payload): + _value: BodyPartReader + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: super().__init__(value, *args, **kwargs) @@ -573,6 +575,9 @@ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: if params: self.set_content_disposition("attachment", True, **params) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + async def write(self, writer: Any) -> None: field = self._value chunk = await field.read_chunk(size=2**16) @@ -790,6 +795,8 @@ async def _maybe_release_last_part(self) -> None: class MultipartWriter(Payload): """Multipart body writer.""" + _value: None + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: boundary = boundary if boundary is not None else uuid.uuid4().hex # The underlying Payload API demands a str (utf-8), not bytes, @@ -970,6 +977,16 @@ def size(self) -> Optional[int]: total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join( + "--" + + self.boundary + + "\n" + + part._binary_headers.decode(encoding, errors) + + part.decode() + for part, _e, _te in self._parts + ) + async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 5271393612a..e7039b46d1f 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -207,6 +207,13 @@ def set_content_disposition( disptype, quote_fields=quote_fields, _charset=_charset, **params ) + @abstractmethod + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return string representation of the value. + + This is named decode() to allow compatibility with bytes objects. + """ + @abstractmethod async def write(self, writer: AbstractStreamWriter) -> None: """Write payload. @@ -216,6 +223,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesPayload(Payload): + _value: bytes + def __init__( self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any ) -> None: @@ -242,6 +251,9 @@ def __init__( **kwargs, ) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.decode(encoding, errors) + async def write(self, writer: AbstractStreamWriter) -> None: await writer.write(self._value) @@ -283,7 +295,7 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): - _value: IO[Any] + _value: io.IOBase def __init__( self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any @@ -307,9 +319,12 @@ async def write(self, writer: AbstractStreamWriter) -> None: finally: await loop.run_in_executor(None, self._value.close) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join(r.decode(encoding, errors) for r in self._value.readlines()) + class TextIOPayload(IOBasePayload): - _value: TextIO + _value: io.TextIOBase def __init__( self, @@ -346,6 +361,9 @@ def size(self) -> Optional[int]: except OSError: return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read() + async def write(self, writer: AbstractStreamWriter) -> None: loop = asyncio.get_event_loop() try: @@ -363,6 +381,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesIOPayload(IOBasePayload): + _value: io.BytesIO + @property def size(self) -> int: position = self._value.tell() @@ -370,8 +390,13 @@ def size(self) -> int: self._value.seek(position) return end - position + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class BufferedReaderPayload(IOBasePayload): + _value: io.BufferedIOBase + @property def size(self) -> Optional[int]: try: @@ -381,6 +406,9 @@ def size(self) -> Optional[int]: # io.BufferedReader(io.BytesIO(b'data')) return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class JsonPayload(BytesPayload): def __init__( @@ -417,6 +445,7 @@ def __init__( class AsyncIterablePayload(Payload): _iter: Optional[_AsyncIterator] = None + _value: _AsyncIterable def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: if not isinstance(value, AsyncIterable): @@ -444,6 +473,9 @@ async def write(self, writer: AbstractStreamWriter) -> None: except StopAsyncIteration: self._iter = None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + class StreamReaderPayload(AsyncIterablePayload): def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: diff --git a/aiohttp/payload_streamer.py b/aiohttp/payload_streamer.py index 364f763ae74..831fdc0a77f 100644 --- a/aiohttp/payload_streamer.py +++ b/aiohttp/payload_streamer.py @@ -65,6 +65,9 @@ class StreamWrapperPayload(Payload): async def write(self, writer: AbstractStreamWriter) -> None: await self._value(writer) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + @payload_type(streamer) class StreamPayload(StreamWrapperPayload): diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 7074542621b..f583789d82e 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -72,6 +72,8 @@ class StreamResponse(BaseClass, HeadersMixin): _length_check = True + _body: Union[None, bytes, bytearray, Payload] + def __init__( self, *, @@ -650,21 +652,17 @@ def body(self) -> Optional[Union[bytes, Payload]]: return self._body @body.setter - def body(self, body: bytes) -> None: + def body(self, body: Any) -> None: if body is None: - self._body: Optional[bytes] = None - self._body_payload: bool = False + self._body = None elif isinstance(body, (bytes, bytearray)): self._body = body - self._body_payload = False else: try: self._body = body = payload.PAYLOAD_REGISTRY.get(body) except payload.LookupError: raise ValueError("Unsupported body type %r" % type(body)) - self._body_payload = True - headers = self._headers # set content-type @@ -697,7 +695,6 @@ def text(self, text: str) -> None: self.charset = "utf-8" self._body = text.encode(self.charset) - self._body_payload = False self._compressed_body = None @property @@ -711,7 +708,7 @@ def content_length(self) -> Optional[int]: if self._compressed_body is not None: # Return length of the compressed body return len(self._compressed_body) - elif self._body_payload: + elif isinstance(self._body, Payload): # A payload without content length, or a compressed payload return None elif self._body is not None: @@ -736,9 +733,8 @@ async def write_eof(self, data: bytes = b"") -> None: if body is not None: if self._must_be_empty_body: await super().write_eof() - elif self._body_payload: - payload = cast(Payload, body) - await payload.write(self._payload_writer) + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) await super().write_eof() else: await super().write_eof(cast(bytes, body)) @@ -750,10 +746,9 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: if hdrs.CONTENT_LENGTH in self._headers: del self._headers[hdrs.CONTENT_LENGTH] elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: - if self._body_payload: - size = cast(Payload, self._body).size - if size is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(size) + if isinstance(self._body, Payload): + if self._body.size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) else: body_len = len(self._body) if self._body else "0" # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 @@ -765,7 +760,7 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: return await super()._start(request) async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._body_payload or self._chunked: + if self._chunked or isinstance(self._body, Payload): return await super()._do_start_compression(coding) if coding != ContentCoding.identity: diff --git a/tests/test_payload.py b/tests/test_payload.py index c8681cb5ebe..0e2db91135b 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -17,6 +17,9 @@ def registry(): class Payload(payload.Payload): + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + assert False + async def write(self, writer): pass diff --git a/tests/test_web_response.py b/tests/test_web_response.py index c3dab10c310..2e1e332e0a5 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1,8 +1,10 @@ import collections.abc import datetime import gzip +import io import json from concurrent.futures import ThreadPoolExecutor +from typing import AsyncIterator, Optional from unittest import mock import aiosignal @@ -13,7 +15,8 @@ from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs from aiohttp.helpers import ETag from aiohttp.http_writer import StreamWriter, _serialize_headers -from aiohttp.payload import BytesPayload +from aiohttp.multipart import BodyPartReader, MultipartWriter +from aiohttp.payload import BytesPayload, StringPayload from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response @@ -1119,6 +1122,48 @@ def test_assign_nonstr_text() -> None: assert 4 == resp.content_length +mpwriter = MultipartWriter(boundary="x") +mpwriter.append_payload(StringPayload("test")) + + +async def async_iter() -> AsyncIterator[str]: + yield "foo" # pragma: no cover + + +class CustomIO(io.IOBase): + def __init__(self): + self._lines = [b"", b"", b"test"] + + def read(self, size: int = -1) -> bytes: + return self._lines.pop() + + +@pytest.mark.parametrize( + "payload,expected", + ( + ("test", "test"), + (CustomIO(), "test"), + (io.StringIO("test"), "test"), + (io.TextIOWrapper(io.BytesIO(b"test")), "test"), + (io.BytesIO(b"test"), "test"), + (io.BufferedReader(io.BytesIO(b"test")), "test"), + (async_iter(), None), + (BodyPartReader("x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None), + ( + mpwriter, + "--x\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", + ), + ), +) +def test_payload_body_get_text(payload, expected: Optional[str]) -> None: + resp = Response(body=payload) + if expected is None: + with pytest.raises(TypeError): + resp.text + else: + assert resp.text == expected + + def test_response_set_content_length() -> None: resp = Response() with pytest.raises(RuntimeError): From 948ca8c1063c4b9cb880137f11bf428d2e3d0de1 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 29 Aug 2024 22:40:37 +0100 Subject: [PATCH 0449/1511] Fix Response.text when body is Payload (#6485) (#8938) (cherry picked from commit 9418a4a1486beed0ae8a5c47277ecd67758eb5e2) --- CHANGES/6485.bugfix.rst | 1 + aiohttp/multipart.py | 17 ++++++++++++++ aiohttp/payload.py | 36 ++++++++++++++++++++++++++-- aiohttp/payload_streamer.py | 3 +++ aiohttp/web_response.py | 27 +++++++++------------ tests/test_payload.py | 3 +++ tests/test_web_response.py | 47 ++++++++++++++++++++++++++++++++++++- 7 files changed, 115 insertions(+), 19 deletions(-) create mode 100644 CHANGES/6485.bugfix.rst diff --git a/CHANGES/6485.bugfix.rst b/CHANGES/6485.bugfix.rst new file mode 100644 index 00000000000..b1d912f1579 --- /dev/null +++ b/CHANGES/6485.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e3680a7b2a1..965e4f279d3 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -561,6 +561,8 @@ def filename(self) -> Optional[str]: @payload_type(BodyPartReader, order=Order.try_first) class BodyPartReaderPayload(Payload): + _value: BodyPartReader + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: super().__init__(value, *args, **kwargs) @@ -573,6 +575,9 @@ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: if params: self.set_content_disposition("attachment", True, **params) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + async def write(self, writer: Any) -> None: field = self._value chunk = await field.read_chunk(size=2**16) @@ -790,6 +795,8 @@ async def _maybe_release_last_part(self) -> None: class MultipartWriter(Payload): """Multipart body writer.""" + _value: None + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: boundary = boundary if boundary is not None else uuid.uuid4().hex # The underlying Payload API demands a str (utf-8), not bytes, @@ -970,6 +977,16 @@ def size(self) -> Optional[int]: total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join( + "--" + + self.boundary + + "\n" + + part._binary_headers.decode(encoding, errors) + + part.decode() + for part, _e, _te in self._parts + ) + async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 5271393612a..e7039b46d1f 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -207,6 +207,13 @@ def set_content_disposition( disptype, quote_fields=quote_fields, _charset=_charset, **params ) + @abstractmethod + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return string representation of the value. + + This is named decode() to allow compatibility with bytes objects. + """ + @abstractmethod async def write(self, writer: AbstractStreamWriter) -> None: """Write payload. @@ -216,6 +223,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesPayload(Payload): + _value: bytes + def __init__( self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any ) -> None: @@ -242,6 +251,9 @@ def __init__( **kwargs, ) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.decode(encoding, errors) + async def write(self, writer: AbstractStreamWriter) -> None: await writer.write(self._value) @@ -283,7 +295,7 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): - _value: IO[Any] + _value: io.IOBase def __init__( self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any @@ -307,9 +319,12 @@ async def write(self, writer: AbstractStreamWriter) -> None: finally: await loop.run_in_executor(None, self._value.close) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join(r.decode(encoding, errors) for r in self._value.readlines()) + class TextIOPayload(IOBasePayload): - _value: TextIO + _value: io.TextIOBase def __init__( self, @@ -346,6 +361,9 @@ def size(self) -> Optional[int]: except OSError: return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read() + async def write(self, writer: AbstractStreamWriter) -> None: loop = asyncio.get_event_loop() try: @@ -363,6 +381,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesIOPayload(IOBasePayload): + _value: io.BytesIO + @property def size(self) -> int: position = self._value.tell() @@ -370,8 +390,13 @@ def size(self) -> int: self._value.seek(position) return end - position + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class BufferedReaderPayload(IOBasePayload): + _value: io.BufferedIOBase + @property def size(self) -> Optional[int]: try: @@ -381,6 +406,9 @@ def size(self) -> Optional[int]: # io.BufferedReader(io.BytesIO(b'data')) return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class JsonPayload(BytesPayload): def __init__( @@ -417,6 +445,7 @@ def __init__( class AsyncIterablePayload(Payload): _iter: Optional[_AsyncIterator] = None + _value: _AsyncIterable def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: if not isinstance(value, AsyncIterable): @@ -444,6 +473,9 @@ async def write(self, writer: AbstractStreamWriter) -> None: except StopAsyncIteration: self._iter = None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + class StreamReaderPayload(AsyncIterablePayload): def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: diff --git a/aiohttp/payload_streamer.py b/aiohttp/payload_streamer.py index 364f763ae74..831fdc0a77f 100644 --- a/aiohttp/payload_streamer.py +++ b/aiohttp/payload_streamer.py @@ -65,6 +65,9 @@ class StreamWrapperPayload(Payload): async def write(self, writer: AbstractStreamWriter) -> None: await self._value(writer) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + @payload_type(streamer) class StreamPayload(StreamWrapperPayload): diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 7074542621b..f583789d82e 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -72,6 +72,8 @@ class StreamResponse(BaseClass, HeadersMixin): _length_check = True + _body: Union[None, bytes, bytearray, Payload] + def __init__( self, *, @@ -650,21 +652,17 @@ def body(self) -> Optional[Union[bytes, Payload]]: return self._body @body.setter - def body(self, body: bytes) -> None: + def body(self, body: Any) -> None: if body is None: - self._body: Optional[bytes] = None - self._body_payload: bool = False + self._body = None elif isinstance(body, (bytes, bytearray)): self._body = body - self._body_payload = False else: try: self._body = body = payload.PAYLOAD_REGISTRY.get(body) except payload.LookupError: raise ValueError("Unsupported body type %r" % type(body)) - self._body_payload = True - headers = self._headers # set content-type @@ -697,7 +695,6 @@ def text(self, text: str) -> None: self.charset = "utf-8" self._body = text.encode(self.charset) - self._body_payload = False self._compressed_body = None @property @@ -711,7 +708,7 @@ def content_length(self) -> Optional[int]: if self._compressed_body is not None: # Return length of the compressed body return len(self._compressed_body) - elif self._body_payload: + elif isinstance(self._body, Payload): # A payload without content length, or a compressed payload return None elif self._body is not None: @@ -736,9 +733,8 @@ async def write_eof(self, data: bytes = b"") -> None: if body is not None: if self._must_be_empty_body: await super().write_eof() - elif self._body_payload: - payload = cast(Payload, body) - await payload.write(self._payload_writer) + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) await super().write_eof() else: await super().write_eof(cast(bytes, body)) @@ -750,10 +746,9 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: if hdrs.CONTENT_LENGTH in self._headers: del self._headers[hdrs.CONTENT_LENGTH] elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: - if self._body_payload: - size = cast(Payload, self._body).size - if size is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(size) + if isinstance(self._body, Payload): + if self._body.size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) else: body_len = len(self._body) if self._body else "0" # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 @@ -765,7 +760,7 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: return await super()._start(request) async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._body_payload or self._chunked: + if self._chunked or isinstance(self._body, Payload): return await super()._do_start_compression(coding) if coding != ContentCoding.identity: diff --git a/tests/test_payload.py b/tests/test_payload.py index c8681cb5ebe..0e2db91135b 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -17,6 +17,9 @@ def registry(): class Payload(payload.Payload): + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + assert False + async def write(self, writer): pass diff --git a/tests/test_web_response.py b/tests/test_web_response.py index c3dab10c310..2e1e332e0a5 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1,8 +1,10 @@ import collections.abc import datetime import gzip +import io import json from concurrent.futures import ThreadPoolExecutor +from typing import AsyncIterator, Optional from unittest import mock import aiosignal @@ -13,7 +15,8 @@ from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs from aiohttp.helpers import ETag from aiohttp.http_writer import StreamWriter, _serialize_headers -from aiohttp.payload import BytesPayload +from aiohttp.multipart import BodyPartReader, MultipartWriter +from aiohttp.payload import BytesPayload, StringPayload from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response @@ -1119,6 +1122,48 @@ def test_assign_nonstr_text() -> None: assert 4 == resp.content_length +mpwriter = MultipartWriter(boundary="x") +mpwriter.append_payload(StringPayload("test")) + + +async def async_iter() -> AsyncIterator[str]: + yield "foo" # pragma: no cover + + +class CustomIO(io.IOBase): + def __init__(self): + self._lines = [b"", b"", b"test"] + + def read(self, size: int = -1) -> bytes: + return self._lines.pop() + + +@pytest.mark.parametrize( + "payload,expected", + ( + ("test", "test"), + (CustomIO(), "test"), + (io.StringIO("test"), "test"), + (io.TextIOWrapper(io.BytesIO(b"test")), "test"), + (io.BytesIO(b"test"), "test"), + (io.BufferedReader(io.BytesIO(b"test")), "test"), + (async_iter(), None), + (BodyPartReader("x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None), + ( + mpwriter, + "--x\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", + ), + ), +) +def test_payload_body_get_text(payload, expected: Optional[str]) -> None: + resp = Response(body=payload) + if expected is None: + with pytest.raises(TypeError): + resp.text + else: + assert resp.text == expected + + def test_response_set_content_length() -> None: resp = Response() with pytest.raises(RuntimeError): From 817393235285ac0fd497ee2f14549471929a68df Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 23:02:01 +0100 Subject: [PATCH 0450/1511] [PR #8920/5cf5db56 backport][3.11] Re-enable keep-alive on proxies (#8939) **This is a backport of PR #8920 as merged into master (5cf5db569c8f32b5ecd6a1586e4b8c349767d941).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8920.misc.rst | 1 + aiohttp/client_proto.py | 3 --- aiohttp/connector.py | 5 ----- 3 files changed, 1 insertion(+), 8 deletions(-) create mode 100644 CHANGES/8920.misc.rst diff --git a/CHANGES/8920.misc.rst b/CHANGES/8920.misc.rst new file mode 100644 index 00000000000..2e8640593a4 --- /dev/null +++ b/CHANGES/8920.misc.rst @@ -0,0 +1 @@ +Enabled keep-alive support on proxies (which was originally disabled several years ago) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index f8c83240209..e612450c746 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -62,9 +62,6 @@ def should_close(self) -> bool: or bool(self._tail) ) - def force_close(self) -> None: - self._should_close = True - def close(self) -> None: transport = self.transport if transport is not None: diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 04115c36a24..91174e319ab 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1376,11 +1376,6 @@ async def _create_proxy_connection( proxy_req, [], timeout, client_error=ClientProxyConnectionError ) - # Many HTTP proxies has buggy keepalive support. Let's not - # reuse connection but close it after processing every - # response. - proto.force_close() - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) if auth is not None: if not req.is_ssl(): From 297ddaec57bbe07c1c51bbe5909a01eedab2ff87 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 30 Aug 2024 12:07:30 +0100 Subject: [PATCH 0451/1511] Fix tarfile file-like objects used as data (#6747) (#8940) (cherry picked from commit 768123537ec988ea629a829623e8d72e7aec1c41) Co-authored-by: Xavier Halloran <75104372+ReallyReivax@users.noreply.github.com> --- CHANGES/6732.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/payload.py | 4 ++- tests/test_client_functional.py | 57 +++++++++++++++++++++++++++++++++ 4 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 CHANGES/6732.bugfix diff --git a/CHANGES/6732.bugfix b/CHANGES/6732.bugfix new file mode 100644 index 00000000000..a460d7cd695 --- /dev/null +++ b/CHANGES/6732.bugfix @@ -0,0 +1 @@ +Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 57a4d2dbcf3..8f387459948 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -353,6 +353,7 @@ William Grzybowski William S. Wilson Ong wouter bolsterlee +Xavier Halloran Xiang Li Yang Zhou Yannick Koechlin diff --git a/aiohttp/payload.py b/aiohttp/payload.py index e7039b46d1f..27636977774 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -401,9 +401,11 @@ class BufferedReaderPayload(IOBasePayload): def size(self) -> Optional[int]: try: return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: + except (OSError, AttributeError): # data.fileno() is not supported, e.g. # io.BufferedReader(io.BytesIO(b'data')) + # For some file-like objects (e.g. tarfile), the fileno() attribute may + # not exist at all, and will instead raise an AttributeError. return None def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 18fb5fe9f86..1f9173bd3f7 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -9,7 +9,9 @@ import socket import ssl import sys +import tarfile import time +import zipfile from typing import Any, AsyncIterator, Type from unittest import mock @@ -511,6 +513,61 @@ async def handler(request): assert 200 == resp.status +async def test_post_data_zipfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a zip file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted zipfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with zipfile.ZipFile(file=buf, mode="w") as zf: + with zf.open("payload1.txt", mode="w") as zip_filelike_writing: + zip_filelike_writing.write(data) + + buf.seek(0) + zf = zipfile.ZipFile(file=buf, mode="r") + resp = await client.post("/", data=zf.open("payload1.txt")) + assert 200 == resp.status + + +async def test_post_data_tarfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a tar file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted tarfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode="w") as tf: + ti = tarfile.TarInfo(name="payload1.txt") + ti.size = len(data) + tf.addfile(tarinfo=ti, fileobj=io.BytesIO(data)) + + # Random-access tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r:") + resp = await client.post("/", data=tf.extractfile("payload1.txt")) + assert 200 == resp.status + + # Streaming tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r|") + for entry in tf: + resp = await client.post("/", data=tf.extractfile(entry)) + assert 200 == resp.status + + async def test_ssl_client( aiohttp_server, ssl_ctx, From 950f72c18266b55a9fcb93717b32828eae37719c Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 30 Aug 2024 12:07:46 +0100 Subject: [PATCH 0452/1511] Fix tarfile file-like objects used as data (#6747) (#8941) (cherry picked from commit 768123537ec988ea629a829623e8d72e7aec1c41) Co-authored-by: Xavier Halloran <75104372+ReallyReivax@users.noreply.github.com> --- CHANGES/6732.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/payload.py | 4 ++- tests/test_client_functional.py | 57 +++++++++++++++++++++++++++++++++ 4 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 CHANGES/6732.bugfix diff --git a/CHANGES/6732.bugfix b/CHANGES/6732.bugfix new file mode 100644 index 00000000000..a460d7cd695 --- /dev/null +++ b/CHANGES/6732.bugfix @@ -0,0 +1 @@ +Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 57a4d2dbcf3..8f387459948 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -353,6 +353,7 @@ William Grzybowski William S. Wilson Ong wouter bolsterlee +Xavier Halloran Xiang Li Yang Zhou Yannick Koechlin diff --git a/aiohttp/payload.py b/aiohttp/payload.py index e7039b46d1f..27636977774 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -401,9 +401,11 @@ class BufferedReaderPayload(IOBasePayload): def size(self) -> Optional[int]: try: return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: + except (OSError, AttributeError): # data.fileno() is not supported, e.g. # io.BufferedReader(io.BytesIO(b'data')) + # For some file-like objects (e.g. tarfile), the fileno() attribute may + # not exist at all, and will instead raise an AttributeError. return None def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 18fb5fe9f86..1f9173bd3f7 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -9,7 +9,9 @@ import socket import ssl import sys +import tarfile import time +import zipfile from typing import Any, AsyncIterator, Type from unittest import mock @@ -511,6 +513,61 @@ async def handler(request): assert 200 == resp.status +async def test_post_data_zipfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a zip file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted zipfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with zipfile.ZipFile(file=buf, mode="w") as zf: + with zf.open("payload1.txt", mode="w") as zip_filelike_writing: + zip_filelike_writing.write(data) + + buf.seek(0) + zf = zipfile.ZipFile(file=buf, mode="r") + resp = await client.post("/", data=zf.open("payload1.txt")) + assert 200 == resp.status + + +async def test_post_data_tarfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a tar file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted tarfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode="w") as tf: + ti = tarfile.TarInfo(name="payload1.txt") + ti.size = len(data) + tf.addfile(tarinfo=ti, fileobj=io.BytesIO(data)) + + # Random-access tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r:") + resp = await client.post("/", data=tf.extractfile("payload1.txt")) + assert 200 == resp.status + + # Streaming tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r|") + for entry in tf: + resp = await client.post("/", data=tf.extractfile(entry)) + assert 200 == resp.status + + async def test_ssl_client( aiohttp_server, ssl_ctx, From 45f8212d75bdfb0f02d4b005e7167c8933bf9fc5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:23:25 +0000 Subject: [PATCH 0453/1511] Bump certifi from 2024.7.4 to 2024.8.30 (#8943) Bumps [certifi](https://github.com/certifi/python-certifi) from 2024.7.4 to 2024.8.30. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/certifi/python-certifi/commit/325c2fde4f8eec10d682b09f3b0414dc05e69a81"><code>325c2fd</code></a> 2024.08.30 (<a href="https://redirect.github.com/certifi/python-certifi/issues/304">#304</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/d66bf5fccbb2b13b033841ef86ad261ab9915833"><code>d66bf5f</code></a> Bump actions/upload-artifact from 4.3.5 to 4.3.6 (<a href="https://redirect.github.com/certifi/python-certifi/issues/302">#302</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/2150f23ee178c923fb05913e516d168dd841f9e3"><code>2150f23</code></a> Bump actions/upload-artifact from 4.3.4 to 4.3.5 (<a href="https://redirect.github.com/certifi/python-certifi/issues/301">#301</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/fc9b771c1e5bd5f0f97534464c16a6ab785d5592"><code>fc9b771</code></a> Bump actions/setup-python from 5.1.0 to 5.1.1 (<a href="https://redirect.github.com/certifi/python-certifi/issues/300">#300</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/965b2391df4bdce03fb07bf8cc19003585b43599"><code>965b239</code></a> Bump actions/download-artifact from 4.1.7 to 4.1.8 (<a href="https://redirect.github.com/certifi/python-certifi/issues/297">#297</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/c1f50ccd010b428caeb105255638e67be7c64f5c"><code>c1f50cc</code></a> Bump actions/upload-artifact from 4.3.3 to 4.3.4 (<a href="https://redirect.github.com/certifi/python-certifi/issues/296">#296</a>)</li> <li>See full diff in <a href="https://github.com/certifi/python-certifi/compare/2024.07.04...2024.08.30">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2024.7.4&new-version=2024.8.30)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 30e89bf4ba9..479a31fd4b8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.1 # via pip-tools -certifi==2024.7.4 +certifi==2024.8.30 # via requests cffi==1.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index a3eea2d8d88..91c137d42e0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.1 # via pip-tools -certifi==2024.7.4 +certifi==2024.8.30 # via requests cffi==1.17.0 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index ae5f3a95597..b0b6bb6d469 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag -certifi==2024.7.4 +certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index 6e247c15b6e..172dedd5016 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag -certifi==2024.7.4 +certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests diff --git a/requirements/lint.txt b/requirements/lint.txt index 6d0647239ee..82efbf9a4ee 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -12,7 +12,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 # via aioredis -certifi==2024.7.4 +certifi==2024.8.30 # via requests cffi==1.17.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index 2c63a08a37f..ae1b806487f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -18,7 +18,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -certifi==2024.7.4 +certifi==2024.8.30 # via requests cffi==1.17.0 # via From aa90362bca7a93c71d7a39a009c50dc60d8bb2ae Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 15:12:28 +0100 Subject: [PATCH 0454/1511] [PR #8936/76a00d1e backport][3.11] Test coverage of TextIOWrapper (#8945) **This is a backport of PR #8936 as merged into master (76a00d1ed405c87cf33c66f91d410c95e57c0312).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_formdata.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_formdata.py b/tests/test_formdata.py index 4bb8aa07587..db1a3861c56 100644 --- a/tests/test_formdata.py +++ b/tests/test_formdata.py @@ -1,3 +1,4 @@ +import io from unittest import mock import pytest @@ -46,6 +47,16 @@ def test_invalid_formdata_params2() -> None: FormData("as") # 2-char str is not allowed +async def test_formdata_textio_charset(buf: bytearray, writer) -> None: + form = FormData() + body = io.TextIOWrapper(io.BytesIO(b"\xe6\x97\xa5\xe6\x9c\xac"), encoding="utf-8") + form.add_field("foo", body, content_type="text/plain; charset=shift-jis") + payload = form() + await payload.write(writer) + assert b"charset=shift-jis" in buf + assert b"\x93\xfa\x96{" in buf + + def test_invalid_formdata_content_type() -> None: form = FormData() invalid_vals = [0, 0.1, {}, [], b"foo"] From 181c042ed7e2c2a7fac9d1a23c7d580eee354200 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 15:12:40 +0100 Subject: [PATCH 0455/1511] [PR #8936/76a00d1e backport][3.10] Test coverage of TextIOWrapper (#8944) **This is a backport of PR #8936 as merged into master (76a00d1ed405c87cf33c66f91d410c95e57c0312).** --------- Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_formdata.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_formdata.py b/tests/test_formdata.py index 4bb8aa07587..db1a3861c56 100644 --- a/tests/test_formdata.py +++ b/tests/test_formdata.py @@ -1,3 +1,4 @@ +import io from unittest import mock import pytest @@ -46,6 +47,16 @@ def test_invalid_formdata_params2() -> None: FormData("as") # 2-char str is not allowed +async def test_formdata_textio_charset(buf: bytearray, writer) -> None: + form = FormData() + body = io.TextIOWrapper(io.BytesIO(b"\xe6\x97\xa5\xe6\x9c\xac"), encoding="utf-8") + form.add_field("foo", body, content_type="text/plain; charset=shift-jis") + payload = form() + await payload.write(writer) + assert b"charset=shift-jis" in buf + assert b"\x93\xfa\x96{" in buf + + def test_invalid_formdata_content_type() -> None: form = FormData() invalid_vals = [0, 0.1, {}, [], b"foo"] From a40dbad6f9ab2562892259716ff3fcda77ec759a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 19:55:32 +0000 Subject: [PATCH 0456/1511] [PR #8948/cc6d7632 backport][3.10] Make n argument clearer (#8949) Co-authored-by: Sam Bull <git@sambull.org> --- docs/streams.rst | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/docs/streams.rst b/docs/streams.rst index 10eec6d6a43..9d49a80f1b6 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -26,13 +26,17 @@ Reading Methods .. method:: StreamReader.read(n=-1) :async: - Read up to *n* bytes. If *n* is not provided, or set to ``-1``, read until - EOF and return all read bytes. + Read up to a maximum of *n* bytes. If *n* is not provided, or set to ``-1``, + read until EOF and return all read bytes. + + When *n* is provided, data will be returned as soon as it is available. + Therefore it will return less than *n* bytes if there are less than *n* + bytes in the buffer. If the EOF was received and the internal buffer is empty, return an empty bytes object. - :param int n: how many bytes to read, ``-1`` for the whole stream. + :param int n: maximum number of bytes to read, ``-1`` for the whole stream. :return bytes: the given data @@ -127,6 +131,14 @@ size limit and over any available data. async for data in response.content.iter_chunked(1024): print(data) + To get chunks that are exactly *n* bytes, you could use the + `asyncstdlib.itertools <https://asyncstdlib.readthedocs.io/en/stable/source/api/itertools.html>`_ + module:: + + chunks = batched(chain.from_iterable(response.content.iter_chunked(n)), n) + async for data in chunks: + print(data) + .. method:: StreamReader.iter_any() :async: From 784604e480eef695179afd9f97bc5fb948db6244 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 20:01:47 +0000 Subject: [PATCH 0457/1511] [PR #8948/cc6d7632 backport][3.11] Make n argument clearer (#8950) Co-authored-by: Sam Bull <git@sambull.org> --- docs/streams.rst | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/docs/streams.rst b/docs/streams.rst index 10eec6d6a43..9d49a80f1b6 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -26,13 +26,17 @@ Reading Methods .. method:: StreamReader.read(n=-1) :async: - Read up to *n* bytes. If *n* is not provided, or set to ``-1``, read until - EOF and return all read bytes. + Read up to a maximum of *n* bytes. If *n* is not provided, or set to ``-1``, + read until EOF and return all read bytes. + + When *n* is provided, data will be returned as soon as it is available. + Therefore it will return less than *n* bytes if there are less than *n* + bytes in the buffer. If the EOF was received and the internal buffer is empty, return an empty bytes object. - :param int n: how many bytes to read, ``-1`` for the whole stream. + :param int n: maximum number of bytes to read, ``-1`` for the whole stream. :return bytes: the given data @@ -127,6 +131,14 @@ size limit and over any available data. async for data in response.content.iter_chunked(1024): print(data) + To get chunks that are exactly *n* bytes, you could use the + `asyncstdlib.itertools <https://asyncstdlib.readthedocs.io/en/stable/source/api/itertools.html>`_ + module:: + + chunks = batched(chain.from_iterable(response.content.iter_chunked(n)), n) + async for data in chunks: + print(data) + .. method:: StreamReader.iter_any() :async: From e8d66381f44d36d628a9e19d7c116be6909ce639 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 31 Aug 2024 16:38:40 +0100 Subject: [PATCH 0458/1511] [PR #8953/51ea3b39 backport][3.11] Add clarification about auth parameter in ClientSession (#8955) **This is a backport of PR #8953 as merged into master (51ea3b39a454993dcea4bb9d3d3baa4f1a515eb0).** Co-authored-by: Maxim Zemskov <m.zemskov1@gmail.com> --- CHANGES/6764.doc.rst | 1 + docs/client_reference.rst | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 CHANGES/6764.doc.rst diff --git a/CHANGES/6764.doc.rst b/CHANGES/6764.doc.rst new file mode 100644 index 00000000000..dea2019fc76 --- /dev/null +++ b/CHANGES/6764.doc.rst @@ -0,0 +1 @@ +Clarified that auth parameter in ClientSession will persist and be included with any request to any origin, even during redirects to different origins. -- by :user:`MaximZemskov`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 03e812ff611..4e63552cd5c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -99,7 +99,10 @@ The client session supports the context manager protocol for self closing. Iterable of :class:`str` or :class:`~multidict.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic - Authorization (optional) + Authorization (optional). It will be included + with any request to any origin and will not be + removed, event during redirect to a different + origin. :param version: supported HTTP version, ``HTTP 1.1`` by default. From 96c8e3e7edd7b68fd8f582dda19bad238f4f5a3c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 31 Aug 2024 16:38:54 +0100 Subject: [PATCH 0459/1511] [PR #8953/51ea3b39 backport][3.10] Add clarification about auth parameter in ClientSession (#8954) **This is a backport of PR #8953 as merged into master (51ea3b39a454993dcea4bb9d3d3baa4f1a515eb0).** Co-authored-by: Maxim Zemskov <m.zemskov1@gmail.com> --- CHANGES/6764.doc.rst | 1 + docs/client_reference.rst | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 CHANGES/6764.doc.rst diff --git a/CHANGES/6764.doc.rst b/CHANGES/6764.doc.rst new file mode 100644 index 00000000000..dea2019fc76 --- /dev/null +++ b/CHANGES/6764.doc.rst @@ -0,0 +1 @@ +Clarified that auth parameter in ClientSession will persist and be included with any request to any origin, even during redirects to different origins. -- by :user:`MaximZemskov`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 03e812ff611..4e63552cd5c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -99,7 +99,10 @@ The client session supports the context manager protocol for self closing. Iterable of :class:`str` or :class:`~multidict.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic - Authorization (optional) + Authorization (optional). It will be included + with any request to any origin and will not be + removed, event during redirect to a different + origin. :param version: supported HTTP version, ``HTTP 1.1`` by default. From 747750379ac3c4d39a666306778f4adb3e2abce7 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sat, 31 Aug 2024 19:50:55 +0100 Subject: [PATCH 0460/1511] Add parameters to del_cookie() (#8956) (#8959) (cherry picked from commit 97384260b53ac7d77ee4e5d00456a5a1962972a9) --- CHANGES/8956.feature.rst | 1 + aiohttp/web_response.py | 12 +++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8956.feature.rst diff --git a/CHANGES/8956.feature.rst b/CHANGES/8956.feature.rst new file mode 100644 index 00000000000..245b481089a --- /dev/null +++ b/CHANGES/8956.feature.rst @@ -0,0 +1 @@ +Added ``secure``/``httponly``/``samesite`` parameters to ``.del_cookie()`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index f583789d82e..0020afd46c8 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -251,7 +251,14 @@ def set_cookie( c["samesite"] = samesite def del_cookie( - self, name: str, *, domain: Optional[str] = None, path: str = "/" + self, + name: str, + *, + domain: Optional[str] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + samesite: Optional[str] = None, ) -> None: """Delete cookie. @@ -266,6 +273,9 @@ def del_cookie( expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path, + secure=secure, + httponly=httponly, + samesite=samesite, ) @property From 9ac6853114cde3ff0865fd5039dc2c19d1c25859 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sat, 31 Aug 2024 20:28:37 +0100 Subject: [PATCH 0461/1511] Fix router matching pre-encoded URLs (#8898) (#8960) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit 6be94520ea46fe1829e6c9d986e7fc9f7db50cad) --- CHANGES/8898.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 10 +++++----- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- tests/test_urldispatch.py | 29 +++++++++++++++++------------ tests/test_web_urldispatcher.py | 12 ++++-------- 9 files changed, 32 insertions(+), 30 deletions(-) create mode 100644 CHANGES/8898.bugfix.rst diff --git a/CHANGES/8898.bugfix.rst b/CHANGES/8898.bugfix.rst new file mode 100644 index 00000000000..0de6646c8cb --- /dev/null +++ b/CHANGES/8898.bugfix.rst @@ -0,0 +1 @@ +Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index aee7aecd2a9..a1df64b8e61 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -381,7 +381,7 @@ def register_route(self, route: "ResourceRoute") -> None: async def resolve(self, request: Request) -> _Resolve: allowed_methods: Set[str] = set() - match_dict = self._match(request.rel_url.raw_path) + match_dict = self._match(request.rel_url.path) if match_dict is None: return None, allowed_methods @@ -650,7 +650,7 @@ def set_options_route(self, handler: Handler) -> None: ) async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.raw_path + path = request.rel_url.path method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: @@ -1040,7 +1040,7 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: # candidates for a given url part because there are multiple resources # registered for the same canonical path, we resolve them in a linear # fashion to ensure registration order is respected. - url_part = request.rel_url.raw_path + url_part = request.rel_url.path while url_part: for candidate in resource_index.get(url_part, ()): match_dict, allowed = await candidate.resolve(request) @@ -1165,7 +1165,7 @@ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: if resource.name == name and resource.raw_match(path): return cast(Resource, resource) if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(_requote_path(path), name=name) + resource = PlainResource(path, name=name) self.register_resource(resource) return resource resource = DynamicResource(path, name=name) @@ -1292,7 +1292,7 @@ def _quote_path(value: str) -> str: def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path + return URL.build(path=value, encoded=True).path.replace("%2F", "/") def _requote_path(value: str) -> str: diff --git a/requirements/base.txt b/requirements/base.txt index 50817d4db17..18b5f471150 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 479a31fd4b8..869bb3d1b34 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -286,7 +286,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 91c137d42e0..9555838a4bf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -278,7 +278,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a8c2eebe1c8..c876071fc19 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index ae1b806487f..3caac33849b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -136,5 +136,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 2453ab5a235..f06f73edc21 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -457,7 +457,7 @@ def test_add_static_quoting(router) -> None: ) assert router["static"] is resource url = resource.url_for(filename="/1 2/файл%2F.txt") - assert url.path == "/пре /фикс/1 2/файл%2F.txt" + assert url.path == "/пре %2Fфикс/1 2/файл%2F.txt" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%84%D0%B0%D0%B9%D0%BB%252F.txt" @@ -530,19 +530,24 @@ def test_static_remove_trailing_slash(router) -> None: assert "/prefix" == route._prefix -async def test_add_route_with_re(router) -> None: +@pytest.mark.parametrize( + "pattern,url,expected", + ( + (r"{to:\d+}", r"1234", {"to": "1234"}), + ("{name}.html", "test.html", {"name": "test"}), + (r"{fn:\w+ \d+}", "abc 123", {"fn": "abc 123"}), + (r"{fn:\w+\s\d+}", "abc 123", {"fn": "abc 123"}), + ), +) +async def test_add_route_with_re( + router: web.UrlDispatcher, pattern: str, url: str, expected +) -> None: handler = make_handler() - router.add_route("GET", r"/handler/{to:\d+}", handler) - - req = make_mocked_request("GET", "/handler/1234") + router.add_route("GET", f"/handler/{pattern}", handler) + req = make_mocked_request("GET", f"/handler/{url}") info = await router.resolve(req) assert info is not None - assert {"to": "1234"} == info - - router.add_route("GET", r"/handler/{name}.html", handler) - req = make_mocked_request("GET", "/handler/test.html") - info = await router.resolve(req) - assert {"name": "test"} == info + assert info == expected async def test_add_route_with_re_and_slashes(router) -> None: @@ -625,7 +630,7 @@ def test_route_dynamic_quoting(router) -> None: route = router.add_route("GET", r"/пре %2Fфикс/{arg}", handler) url = route.url_for(arg="1 2/текст%2F") - assert url.path == "/пре /фикс/1 2/текст%2F" + assert url.path == "/пре %2Fфикс/1 2/текст%2F" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%82%D0%B5%D0%BA%D1%81%D1%82%252F" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 3a45b9355f5..8a97acf504d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -856,18 +856,15 @@ async def get_foobar(request: web.Request) -> web.Response: assert (await resp.text()) == "success!" -@pytest.mark.xfail( - raises=AssertionError, - reason="Regression in v3.7: https://github.com/aio-libs/aiohttp/issues/5621", -) @pytest.mark.parametrize( ("route_definition", "urlencoded_path", "expected_http_resp_status"), ( ("/467,802,24834/hello", "/467%2C802%2C24834/hello", 200), ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467%2C802%2C24834/hello", 200), + ("/467,802,24834/hello", "/467,802,24834/hello", 200), + ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467,802,24834/hello", 200), ("/1%2C3/hello", "/1%2C3/hello", 404), ), - ids=("urldecoded_route", "urldecoded_route_with_regex", "urlencoded_route"), ) async def test_decoded_url_match( aiohttp_client: AiohttpClient, @@ -883,9 +880,8 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get(route_definition, handler) client = await aiohttp_client(app) - r = await client.get(yarl.URL(urlencoded_path, encoded=True)) - assert r.status == expected_http_resp_status - await r.release() + async with client.get(yarl.URL(urlencoded_path, encoded=True)) as resp: + assert resp.status == expected_http_resp_status async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: From b8f1b1272fe04dad41a3807199f8e1331301c7b1 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sat, 31 Aug 2024 20:36:19 +0100 Subject: [PATCH 0462/1511] Fix router matching pre-encoded URLs (#8898) (#8961) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit 6be94520ea46fe1829e6c9d986e7fc9f7db50cad) --- CHANGES/8898.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 10 +++++----- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- tests/test_urldispatch.py | 29 +++++++++++++++++------------ tests/test_web_urldispatcher.py | 12 ++++-------- 9 files changed, 32 insertions(+), 30 deletions(-) create mode 100644 CHANGES/8898.bugfix.rst diff --git a/CHANGES/8898.bugfix.rst b/CHANGES/8898.bugfix.rst new file mode 100644 index 00000000000..0de6646c8cb --- /dev/null +++ b/CHANGES/8898.bugfix.rst @@ -0,0 +1 @@ +Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index aee7aecd2a9..a1df64b8e61 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -381,7 +381,7 @@ def register_route(self, route: "ResourceRoute") -> None: async def resolve(self, request: Request) -> _Resolve: allowed_methods: Set[str] = set() - match_dict = self._match(request.rel_url.raw_path) + match_dict = self._match(request.rel_url.path) if match_dict is None: return None, allowed_methods @@ -650,7 +650,7 @@ def set_options_route(self, handler: Handler) -> None: ) async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.raw_path + path = request.rel_url.path method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: @@ -1040,7 +1040,7 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: # candidates for a given url part because there are multiple resources # registered for the same canonical path, we resolve them in a linear # fashion to ensure registration order is respected. - url_part = request.rel_url.raw_path + url_part = request.rel_url.path while url_part: for candidate in resource_index.get(url_part, ()): match_dict, allowed = await candidate.resolve(request) @@ -1165,7 +1165,7 @@ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: if resource.name == name and resource.raw_match(path): return cast(Resource, resource) if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(_requote_path(path), name=name) + resource = PlainResource(path, name=name) self.register_resource(resource) return resource resource = DynamicResource(path, name=name) @@ -1292,7 +1292,7 @@ def _quote_path(value: str) -> str: def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path + return URL.build(path=value, encoded=True).path.replace("%2F", "/") def _requote_path(value: str) -> str: diff --git a/requirements/base.txt b/requirements/base.txt index fbdc31772a4..0ab66407cde 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.21 # via cffi uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d638e1c2a46..45a2ce5dea1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -274,7 +274,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index a4e341be912..d7b0fbc81c7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -261,7 +261,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4c48153d142..3a0b956d3de 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index c25b3b8ea44..daae1ddb6ef 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -125,5 +125,5 @@ uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.4 +yarl==1.9.6 # via -r requirements/runtime-deps.in diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 2453ab5a235..f06f73edc21 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -457,7 +457,7 @@ def test_add_static_quoting(router) -> None: ) assert router["static"] is resource url = resource.url_for(filename="/1 2/файл%2F.txt") - assert url.path == "/пре /фикс/1 2/файл%2F.txt" + assert url.path == "/пре %2Fфикс/1 2/файл%2F.txt" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%84%D0%B0%D0%B9%D0%BB%252F.txt" @@ -530,19 +530,24 @@ def test_static_remove_trailing_slash(router) -> None: assert "/prefix" == route._prefix -async def test_add_route_with_re(router) -> None: +@pytest.mark.parametrize( + "pattern,url,expected", + ( + (r"{to:\d+}", r"1234", {"to": "1234"}), + ("{name}.html", "test.html", {"name": "test"}), + (r"{fn:\w+ \d+}", "abc 123", {"fn": "abc 123"}), + (r"{fn:\w+\s\d+}", "abc 123", {"fn": "abc 123"}), + ), +) +async def test_add_route_with_re( + router: web.UrlDispatcher, pattern: str, url: str, expected +) -> None: handler = make_handler() - router.add_route("GET", r"/handler/{to:\d+}", handler) - - req = make_mocked_request("GET", "/handler/1234") + router.add_route("GET", f"/handler/{pattern}", handler) + req = make_mocked_request("GET", f"/handler/{url}") info = await router.resolve(req) assert info is not None - assert {"to": "1234"} == info - - router.add_route("GET", r"/handler/{name}.html", handler) - req = make_mocked_request("GET", "/handler/test.html") - info = await router.resolve(req) - assert {"name": "test"} == info + assert info == expected async def test_add_route_with_re_and_slashes(router) -> None: @@ -625,7 +630,7 @@ def test_route_dynamic_quoting(router) -> None: route = router.add_route("GET", r"/пре %2Fфикс/{arg}", handler) url = route.url_for(arg="1 2/текст%2F") - assert url.path == "/пре /фикс/1 2/текст%2F" + assert url.path == "/пре %2Fфикс/1 2/текст%2F" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%82%D0%B5%D0%BA%D1%81%D1%82%252F" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 3a45b9355f5..8a97acf504d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -856,18 +856,15 @@ async def get_foobar(request: web.Request) -> web.Response: assert (await resp.text()) == "success!" -@pytest.mark.xfail( - raises=AssertionError, - reason="Regression in v3.7: https://github.com/aio-libs/aiohttp/issues/5621", -) @pytest.mark.parametrize( ("route_definition", "urlencoded_path", "expected_http_resp_status"), ( ("/467,802,24834/hello", "/467%2C802%2C24834/hello", 200), ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467%2C802%2C24834/hello", 200), + ("/467,802,24834/hello", "/467,802,24834/hello", 200), + ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467,802,24834/hello", 200), ("/1%2C3/hello", "/1%2C3/hello", 404), ), - ids=("urldecoded_route", "urldecoded_route_with_regex", "urlencoded_route"), ) async def test_decoded_url_match( aiohttp_client: AiohttpClient, @@ -883,9 +880,8 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get(route_definition, handler) client = await aiohttp_client(app) - r = await client.get(yarl.URL(urlencoded_path, encoded=True)) - assert r.status == expected_http_resp_status - await r.release() + async with client.get(yarl.URL(urlencoded_path, encoded=True)) as resp: + assert resp.status == expected_http_resp_status async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: From 4614dea10aafd1c778fd5239144b4838fd6345e3 Mon Sep 17 00:00:00 2001 From: Hadock <49624805+Hadock-is-ok@users.noreply.github.com> Date: Sat, 31 Aug 2024 21:08:59 +0100 Subject: [PATCH 0463/1511] =?UTF-8?q?Add=20aiohttp.tracing.TraceRequestHea?= =?UTF-8?q?dersSentParams=20into=20the=20top=20level=20=E2=80=A6=20(3.11)?= =?UTF-8?q?=20(#8963)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …file. (#8947) (cherry picked from commit e2437a48ec31f830100606a62bb822e44bab1114) --- CHANGES/8947.misc.rst | 1 + aiohttp/__init__.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 CHANGES/8947.misc.rst diff --git a/CHANGES/8947.misc.rst b/CHANGES/8947.misc.rst new file mode 100644 index 00000000000..277ba915c50 --- /dev/null +++ b/CHANGES/8947.misc.rst @@ -0,0 +1 @@ +Exported ``aiohttp.TraceRequestHeadersSentParams`` -- by :user:`Hadock-is-ok`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 79f40d6f8f3..043a83eef68 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -106,6 +106,7 @@ TraceRequestChunkSentParams as TraceRequestChunkSentParams, TraceRequestEndParams as TraceRequestEndParams, TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestHeadersSentParams as TraceRequestHeadersSentParams, TraceRequestRedirectParams as TraceRequestRedirectParams, TraceRequestStartParams as TraceRequestStartParams, TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, @@ -224,6 +225,7 @@ "TraceRequestChunkSentParams", "TraceRequestEndParams", "TraceRequestExceptionParams", + "TraceRequestHeadersSentParams", "TraceRequestRedirectParams", "TraceRequestStartParams", "TraceResponseChunkReceivedParams", From 6881e0de763de8220a02ea9c504a2241ce26e42c Mon Sep 17 00:00:00 2001 From: Hadock <49624805+Hadock-is-ok@users.noreply.github.com> Date: Sat, 31 Aug 2024 21:09:34 +0100 Subject: [PATCH 0464/1511] =?UTF-8?q?Add=20aiohttp.tracing.TraceRequestHea?= =?UTF-8?q?dersSentParams=20into=20the=20top=20level=20=E2=80=A6=20(3.10)?= =?UTF-8?q?=20(#8964)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …file. (#8947) (cherry picked from commit e2437a48ec31f830100606a62bb822e44bab1114) --- CHANGES/8947.misc.rst | 1 + aiohttp/__init__.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 CHANGES/8947.misc.rst diff --git a/CHANGES/8947.misc.rst b/CHANGES/8947.misc.rst new file mode 100644 index 00000000000..277ba915c50 --- /dev/null +++ b/CHANGES/8947.misc.rst @@ -0,0 +1 @@ +Exported ``aiohttp.TraceRequestHeadersSentParams`` -- by :user:`Hadock-is-ok`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index bcc73f51ccd..f321cdaba45 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -106,6 +106,7 @@ TraceRequestChunkSentParams as TraceRequestChunkSentParams, TraceRequestEndParams as TraceRequestEndParams, TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestHeadersSentParams as TraceRequestHeadersSentParams, TraceRequestRedirectParams as TraceRequestRedirectParams, TraceRequestStartParams as TraceRequestStartParams, TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, @@ -224,6 +225,7 @@ "TraceRequestChunkSentParams", "TraceRequestEndParams", "TraceRequestExceptionParams", + "TraceRequestHeadersSentParams", "TraceRequestRedirectParams", "TraceRequestStartParams", "TraceResponseChunkReceivedParams", From b73a4c170c64e2691331542d30133df3d54032c0 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 1 Sep 2024 02:24:32 +0100 Subject: [PATCH 0465/1511] Deprecate obsolete timeout in ClientSession.ws_connect() (#3946) (#8965) (cherry picked from commit c09c538e036619ae549280fb9051fd6084e8252c) --------- Co-authored-by: Artem Yushkovskiy <ajuszkowski@ya.ru> --- CHANGES/3945.deprecation.rst | 1 + CHANGES/8612.feature.rst | 1 + aiohttp/__init__.py | 2 + aiohttp/client.py | 46 +++++++++++++++---- aiohttp/client_ws.py | 19 +++++--- docs/client_reference.rst | 34 ++++++++++---- tests/test_client_ws.py | 7 +-- tests/test_client_ws_functional.py | 71 +++++++++++++++++++++++++++--- 8 files changed, 150 insertions(+), 31 deletions(-) create mode 100644 CHANGES/3945.deprecation.rst create mode 100644 CHANGES/8612.feature.rst diff --git a/CHANGES/3945.deprecation.rst b/CHANGES/3945.deprecation.rst new file mode 100644 index 00000000000..07f8566881a --- /dev/null +++ b/CHANGES/3945.deprecation.rst @@ -0,0 +1 @@ +Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in `ClientSession.ws_connect()`. Change default websocket receive timeout from `None` to `10.0`. diff --git a/CHANGES/8612.feature.rst b/CHANGES/8612.feature.rst new file mode 100644 index 00000000000..96adcf6dc4c --- /dev/null +++ b/CHANGES/8612.feature.rst @@ -0,0 +1 @@ +Exported ``ClientWSTimeout`` to top-level namespace -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 043a83eef68..15602a7dc85 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -21,6 +21,7 @@ ClientSSLError, ClientTimeout, ClientWebSocketResponse, + ClientWSTimeout, ConnectionTimeoutError, ContentTypeError, Fingerprint, @@ -139,6 +140,7 @@ "ClientSession", "ClientTimeout", "ClientWebSocketResponse", + "ClientWSTimeout", "ConnectionTimeoutError", "ContentTypeError", "Fingerprint", diff --git a/aiohttp/client.py b/aiohttp/client.py index 8edd14d01ff..f9e3a5c5f65 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -73,7 +73,11 @@ RequestInfo as RequestInfo, _merge_ssl_params, ) -from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse +from .client_ws import ( + DEFAULT_WS_CLIENT_TIMEOUT, + ClientWebSocketResponse as ClientWebSocketResponse, + ClientWSTimeout as ClientWSTimeout, +) from .connector import ( HTTP_AND_EMPTY_SCHEMA_SET, BaseConnector as BaseConnector, @@ -142,6 +146,7 @@ # client "ClientSession", "ClientTimeout", + "ClientWSTimeout", "request", ) @@ -820,7 +825,7 @@ def ws_connect( *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), - timeout: float = 10.0, + timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, @@ -872,7 +877,7 @@ async def _ws_connect( *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), - timeout: float = 10.0, + timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, @@ -891,6 +896,29 @@ async def _ws_connect( compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, ) -> ClientWebSocketResponse: + if timeout is not sentinel: + if isinstance(timeout, ClientWSTimeout): + ws_timeout = timeout + else: + warnings.warn( + "parameter 'timeout' of type 'float' " + "is deprecated, please use " + "'timeout=ClientWSTimeout(ws_close=...)'", + DeprecationWarning, + stacklevel=2, + ) + ws_timeout = ClientWSTimeout(ws_close=timeout) + else: + ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT + if receive_timeout is not None: + warnings.warn( + "float parameter 'receive_timeout' " + "is deprecated, please use parameter " + "'timeout=ClientWSTimeout(ws_receive=...)'", + DeprecationWarning, + stacklevel=2, + ) + ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout) if headers is None: real_headers: CIMultiDict[str] = CIMultiDict() @@ -1021,12 +1049,13 @@ async def _ws_connect( # For WS connection the read_timeout must be either receive_timeout or greater # None == no timeout, i.e. infinite timeout, so None is the max timeout possible - if receive_timeout is None: + if ws_timeout.ws_receive is None: # Reset regardless - conn_proto.read_timeout = receive_timeout + conn_proto.read_timeout = None elif conn_proto.read_timeout is not None: - # If read_timeout was set check which wins - conn_proto.read_timeout = max(receive_timeout, conn_proto.read_timeout) + conn_proto.read_timeout = max( + ws_timeout.ws_receive, conn_proto.read_timeout + ) transport = conn.transport assert transport is not None @@ -1050,11 +1079,10 @@ async def _ws_connect( writer, protocol, resp, - timeout, + ws_timeout, autoclose, autoping, self._loop, - receive_timeout=receive_timeout, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover, diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 7b3a5bf952d..6246234b8e0 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -4,6 +4,8 @@ import sys from typing import Any, Optional, cast +import attr + from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse from .helpers import calculate_timeout_when, set_result @@ -30,6 +32,15 @@ import async_timeout +@attr.s(frozen=True, slots=True) +class ClientWSTimeout: + ws_receive = attr.ib(type=Optional[float], default=None) + ws_close = attr.ib(type=Optional[float], default=None) + + +DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0) + + class ClientWebSocketResponse: def __init__( self, @@ -37,12 +48,11 @@ def __init__( writer: WebSocketWriter, protocol: Optional[str], response: ClientResponse, - timeout: float, + timeout: ClientWSTimeout, autoclose: bool, autoping: bool, loop: asyncio.AbstractEventLoop, *, - receive_timeout: Optional[float] = None, heartbeat: Optional[float] = None, compress: int = 0, client_notakeover: bool = False, @@ -57,7 +67,6 @@ def __init__( self._closing = False self._close_code: Optional[int] = None self._timeout = timeout - self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat @@ -268,7 +277,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo while True: try: - async with async_timeout.timeout(self._timeout): + async with async_timeout.timeout(self._timeout.ws_close): msg = await self._reader.read() except asyncio.CancelledError: self._close_code = WSCloseCode.ABNORMAL_CLOSURE @@ -288,7 +297,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo return False async def receive(self, timeout: Optional[float] = None) -> WSMessage: - receive_timeout = timeout or self._receive_timeout + receive_timeout = timeout or self._timeout.ws_receive while True: if self._waiting: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 4e63552cd5c..f64df336755 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -690,8 +690,8 @@ The client session supports the context manager protocol for self closing. <ClientResponse>` object. .. method:: ws_connect(url, *, method='GET', \ - protocols=(), timeout=10.0,\ - receive_timeout=None,\ + protocols=(), \ + timeout=sentinel,\ auth=None,\ autoclose=True,\ autoping=True,\ @@ -714,12 +714,11 @@ The client session supports the context manager protocol for self closing. :param tuple protocols: Websocket protocols - :param float timeout: Timeout for websocket to close. ``10`` seconds - by default - - :param float receive_timeout: Timeout for websocket to receive - complete message. ``None`` (unlimited) - seconds by default + :param timeout: a :class:`ClientWSTimeout` timeout for websocket. + By default, the value + `ClientWSTimeout(ws_receive=None, ws_close=10.0)` is used + (``10.0`` seconds for the websocket to close). + ``None`` means no timeout will be used. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) @@ -1760,7 +1759,24 @@ Utilities :class:`float`, ``None`` by default. - .. versionadded:: 3.3 + +.. class:: ClientWSTimeout(*, ws_receive=None, ws_close=None) + + A data class for websocket client timeout settings. + + .. attribute:: ws_receive + + A timeout for websocket to receive a complete message. + + :class:`float`, ``None`` by default. + + .. attribute:: ws_close + + A timeout for the websocket to close. + + :class:`float`, ``10.0`` by default. + + .. versionadded:: 4.0 .. note:: diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index a790fba43ec..31ec7576c97 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -91,7 +91,7 @@ async def test_ws_connect_read_timeout_stays_inf( res = await aiohttp.ClientSession().ws_connect( "http://test.org", protocols=("t1", "t2", "chat"), - receive_timeout=0.5, + timeout=aiohttp.ClientWSTimeout(0.5), ) assert isinstance(res, client.ClientWebSocketResponse) @@ -122,7 +122,7 @@ async def test_ws_connect_read_timeout_reset_to_max( res = await aiohttp.ClientSession().ws_connect( "http://test.org", protocols=("t1", "t2", "chat"), - receive_timeout=1.0, + timeout=aiohttp.ClientWSTimeout(1.0), ) assert isinstance(res, client.ClientWebSocketResponse) @@ -600,8 +600,9 @@ async def test_reader_read_exception(ws_key, key_data, loop) -> None: async def test_receive_runtime_err(loop) -> None: + timeout = aiohttp.ClientWSTimeout(ws_receive=10.0) resp = client.ClientWebSocketResponse( - mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), 10.0, True, True, loop + mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), timeout, True, True, loop ) resp._waiting = True diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 274092a189a..30da0dca802 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -7,6 +7,7 @@ import aiohttp from aiohttp import ServerTimeoutError, WSMsgType, hdrs, web +from aiohttp.client_ws import ClientWSTimeout from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -394,7 +395,7 @@ async def handler(request): assert resp.closed -async def test_close_timeout(aiohttp_client) -> None: +async def test_close_timeout_sock_close_read(aiohttp_client) -> None: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) @@ -406,7 +407,39 @@ async def handler(request): app = web.Application() app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.ws_connect("/", timeout=0.2, autoclose=False) + timeout = ClientWSTimeout(ws_close=0.2) + resp = await client.ws_connect("/", timeout=timeout, autoclose=False) + + await resp.send_bytes(b"ask") + + msg = await resp.receive() + assert msg.data == "test" + assert msg.type == aiohttp.WSMsgType.TEXT + + msg = await resp.close() + assert resp.closed + assert isinstance(resp.exception(), asyncio.TimeoutError) + + +async def test_close_timeout_deprecated(aiohttp_client) -> None: + async def handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive_bytes() + await ws.send_str("test") + await asyncio.sleep(1) + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + with pytest.warns( + DeprecationWarning, + match="parameter 'timeout' of type 'float' " + "is deprecated, please use " + r"'timeout=ClientWSTimeout\(ws_close=...\)'", + ): + resp = await client.ws_connect("/", timeout=0.2, autoclose=False) await resp.send_bytes(b"ask") @@ -535,7 +568,7 @@ async def handler(request): await resp.close() -async def test_receive_timeout(aiohttp_client) -> None: +async def test_receive_timeout_sock_read(aiohttp_client) -> None: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) @@ -547,10 +580,38 @@ async def handler(request): app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.ws_connect("/", receive_timeout=0.1) + receive_timeout = ClientWSTimeout(ws_receive=0.1) + resp = await client.ws_connect("/", timeout=receive_timeout) with pytest.raises(asyncio.TimeoutError): - await resp.receive(0.05) + await resp.receive(timeout=0.05) + + await resp.close() + + +async def test_receive_timeout_deprecation(aiohttp_client) -> None: + + async def handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + with pytest.warns( + DeprecationWarning, + match="float parameter 'receive_timeout' " + "is deprecated, please use parameter " + r"'timeout=ClientWSTimeout\(ws_receive=...\)'", + ): + resp = await client.ws_connect("/", receive_timeout=0.1) + + with pytest.raises(asyncio.TimeoutError): + await resp.receive(timeout=0.05) await resp.close() From 9503cf7bc07bf23d9b53621f696c6e6c6867d25e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Sep 2024 19:00:44 +0100 Subject: [PATCH 0466/1511] [PR #7168/8a525d98 backport][3.11] Fix `test_utils.make_mocked_request` behaviour for empty payload (#8970) **This is a backport of PR #7168 as merged into master (8a525d98c512e624311b16e9891cac9050cc8b8c).** Co-authored-by: Rahul Nahata <rahul.nahata96@gmail.com> --- CHANGES/7167.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/test_utils.py | 6 ++---- tests/test_test_utils.py | 5 +++++ 4 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 CHANGES/7167.bugfix.rst diff --git a/CHANGES/7167.bugfix.rst b/CHANGES/7167.bugfix.rst new file mode 100644 index 00000000000..766f1438b66 --- /dev/null +++ b/CHANGES/7167.bugfix.rst @@ -0,0 +1 @@ +Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 8f387459948..e7214dfedd4 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -274,6 +274,7 @@ Philipp A. Pieter van Beek Qiao Han Rafael Viotti +Rahul Nahata Raphael Bialon Raúl Cumplido Required Field diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 97c1469dd2a..328561fb6a7 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -31,6 +31,7 @@ from .client_ws import ClientWebSocketResponse from .helpers import sentinel from .http import HttpVersion, RawRequestMessage +from .streams import EMPTY_PAYLOAD, StreamReader from .typedefs import StrOrURL from .web import ( Application, @@ -631,7 +632,7 @@ def make_mocked_request( writer: Any = sentinel, protocol: Any = sentinel, transport: Any = sentinel, - payload: Any = sentinel, + payload: StreamReader = EMPTY_PAYLOAD, sslcontext: Optional[SSLContext] = None, client_max_size: int = 1024**2, loop: Any = ..., @@ -700,9 +701,6 @@ def make_mocked_request( protocol.transport = transport protocol.writer = writer - if payload is sentinel: - payload = mock.Mock() - req = Request( message, payload, protocol, writer, task, loop, client_max_size=client_max_size ) diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 328f83c3fd4..77349246616 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -239,6 +239,11 @@ def test_make_mocked_request_content() -> None: assert req.content is payload +async def test_make_mocked_request_empty_payload() -> None: + req = make_mocked_request("GET", "/") + assert await req.read() == b"" + + def test_make_mocked_request_transport() -> None: transport = mock.Mock() req = make_mocked_request("GET", "/", transport=transport) From d6d0a22cc6c33df97dcb1b6165d53bf529107124 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Sep 2024 19:01:05 +0100 Subject: [PATCH 0467/1511] [PR #7168/8a525d98 backport][3.10] Fix `test_utils.make_mocked_request` behaviour for empty payload (#8969) **This is a backport of PR #7168 as merged into master (8a525d98c512e624311b16e9891cac9050cc8b8c).** Co-authored-by: Rahul Nahata <rahul.nahata96@gmail.com> --- CHANGES/7167.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/test_utils.py | 6 ++---- tests/test_test_utils.py | 5 +++++ 4 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 CHANGES/7167.bugfix.rst diff --git a/CHANGES/7167.bugfix.rst b/CHANGES/7167.bugfix.rst new file mode 100644 index 00000000000..766f1438b66 --- /dev/null +++ b/CHANGES/7167.bugfix.rst @@ -0,0 +1 @@ +Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 8f387459948..e7214dfedd4 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -274,6 +274,7 @@ Philipp A. Pieter van Beek Qiao Han Rafael Viotti +Rahul Nahata Raphael Bialon Raúl Cumplido Required Field diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 97c1469dd2a..328561fb6a7 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -31,6 +31,7 @@ from .client_ws import ClientWebSocketResponse from .helpers import sentinel from .http import HttpVersion, RawRequestMessage +from .streams import EMPTY_PAYLOAD, StreamReader from .typedefs import StrOrURL from .web import ( Application, @@ -631,7 +632,7 @@ def make_mocked_request( writer: Any = sentinel, protocol: Any = sentinel, transport: Any = sentinel, - payload: Any = sentinel, + payload: StreamReader = EMPTY_PAYLOAD, sslcontext: Optional[SSLContext] = None, client_max_size: int = 1024**2, loop: Any = ..., @@ -700,9 +701,6 @@ def make_mocked_request( protocol.transport = transport protocol.writer = writer - if payload is sentinel: - payload = mock.Mock() - req = Request( message, payload, protocol, writer, task, loop, client_max_size=client_max_size ) diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 328f83c3fd4..77349246616 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -239,6 +239,11 @@ def test_make_mocked_request_content() -> None: assert req.content is payload +async def test_make_mocked_request_empty_payload() -> None: + req = make_mocked_request("GET", "/") + assert await req.read() == b"" + + def test_make_mocked_request_transport() -> None: transport = mock.Mock() req = make_mocked_request("GET", "/", transport=transport) From 0fdcc291d6f4a35352d8feba39a9acf5f822c1a9 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 1 Sep 2024 20:59:15 +0100 Subject: [PATCH 0468/1511] Fix CancelledError stopping other cleanup contexts completing (#8908) (#8972) (cherry picked from commit 032fb571f2c73605d855d4f17026b303c7441823) --- CHANGES/8908.bugfix.rst | 1 + aiohttp/web_app.py | 2 +- docs/web_advanced.rst | 14 ++++++-------- tests/test_web_app.py | 9 ++++++--- 4 files changed, 14 insertions(+), 12 deletions(-) create mode 100644 CHANGES/8908.bugfix.rst diff --git a/CHANGES/8908.bugfix.rst b/CHANGES/8908.bugfix.rst new file mode 100644 index 00000000000..0eb450431db --- /dev/null +++ b/CHANGES/8908.bugfix.rst @@ -0,0 +1 @@ +Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 3b4b6489e60..3510bffda60 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -579,7 +579,7 @@ async def _on_cleanup(self, app: Application) -> None: await it.__anext__() except StopAsyncIteration: pass - except Exception as exc: + except (Exception, asyncio.CancelledError) as exc: errors.append(exc) else: errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index dc94bea33bf..070bae34f10 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -1064,13 +1064,10 @@ below:: async with client.pubsub() as pubsub: await pubsub.subscribe(channel) while True: - try: - msg = await pubsub.get_message(ignore_subscribe_messages=True) - if msg is not None: - for ws in app["websockets"]: - await ws.send_str("{}: {}".format(channel, msg)) - except asyncio.CancelledError: - break + msg = await pubsub.get_message(ignore_subscribe_messages=True) + if msg is not None: + for ws in app["websockets"]: + await ws.send_str("{}: {}".format(channel, msg)) async def background_tasks(app): @@ -1079,7 +1076,8 @@ below:: yield app[redis_listener].cancel() - await app[redis_listener] + with contextlib.suppress(asyncio.CancelledError): + await app[redis_listener] app = web.Application() diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 3d3aa2479f6..6a86a3458a3 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -1,6 +1,6 @@ import asyncio import gc -from typing import AsyncIterator, Callable, Iterator, NoReturn +from typing import AsyncIterator, Callable, Iterator, NoReturn, Type from unittest import mock import pytest @@ -476,7 +476,10 @@ async def fail_ctx(app: web.Application) -> AsyncIterator[NoReturn]: assert ctx_state == "CLEAN" -async def test_cleanup_ctx_exception_on_cleanup_multiple() -> None: +@pytest.mark.parametrize("exc_cls", (Exception, asyncio.CancelledError)) +async def test_cleanup_ctx_exception_on_cleanup_multiple( + exc_cls: Type[BaseException], +) -> None: app = web.Application() out = [] @@ -488,7 +491,7 @@ async def inner(app: web.Application) -> AsyncIterator[None]: yield None out.append("post_" + str(num)) if fail: - raise Exception("fail_" + str(num)) + raise exc_cls("fail_" + str(num)) return inner From 87342c791dfd4c916877ba3dffafb9345bb0491f Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 1 Sep 2024 20:59:35 +0100 Subject: [PATCH 0469/1511] Fix CancelledError stopping other cleanup contexts completing (#8908) (#8973) (cherry picked from commit 032fb571f2c73605d855d4f17026b303c7441823) --- CHANGES/8908.bugfix.rst | 1 + aiohttp/web_app.py | 2 +- docs/web_advanced.rst | 14 ++++++-------- tests/test_web_app.py | 9 ++++++--- 4 files changed, 14 insertions(+), 12 deletions(-) create mode 100644 CHANGES/8908.bugfix.rst diff --git a/CHANGES/8908.bugfix.rst b/CHANGES/8908.bugfix.rst new file mode 100644 index 00000000000..0eb450431db --- /dev/null +++ b/CHANGES/8908.bugfix.rst @@ -0,0 +1 @@ +Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 3b4b6489e60..3510bffda60 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -579,7 +579,7 @@ async def _on_cleanup(self, app: Application) -> None: await it.__anext__() except StopAsyncIteration: pass - except Exception as exc: + except (Exception, asyncio.CancelledError) as exc: errors.append(exc) else: errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index dc94bea33bf..070bae34f10 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -1064,13 +1064,10 @@ below:: async with client.pubsub() as pubsub: await pubsub.subscribe(channel) while True: - try: - msg = await pubsub.get_message(ignore_subscribe_messages=True) - if msg is not None: - for ws in app["websockets"]: - await ws.send_str("{}: {}".format(channel, msg)) - except asyncio.CancelledError: - break + msg = await pubsub.get_message(ignore_subscribe_messages=True) + if msg is not None: + for ws in app["websockets"]: + await ws.send_str("{}: {}".format(channel, msg)) async def background_tasks(app): @@ -1079,7 +1076,8 @@ below:: yield app[redis_listener].cancel() - await app[redis_listener] + with contextlib.suppress(asyncio.CancelledError): + await app[redis_listener] app = web.Application() diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 3d3aa2479f6..6a86a3458a3 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -1,6 +1,6 @@ import asyncio import gc -from typing import AsyncIterator, Callable, Iterator, NoReturn +from typing import AsyncIterator, Callable, Iterator, NoReturn, Type from unittest import mock import pytest @@ -476,7 +476,10 @@ async def fail_ctx(app: web.Application) -> AsyncIterator[NoReturn]: assert ctx_state == "CLEAN" -async def test_cleanup_ctx_exception_on_cleanup_multiple() -> None: +@pytest.mark.parametrize("exc_cls", (Exception, asyncio.CancelledError)) +async def test_cleanup_ctx_exception_on_cleanup_multiple( + exc_cls: Type[BaseException], +) -> None: app = web.Application() out = [] @@ -488,7 +491,7 @@ async def inner(app: web.Application) -> AsyncIterator[None]: yield None out.append("post_" + str(num)) if fail: - raise Exception("fail_" + str(num)) + raise exc_cls("fail_" + str(num)) return inner From 306e42d8de2a9f4c2a911eab5fce6da36f949f11 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 1 Sep 2024 21:20:08 +0100 Subject: [PATCH 0470/1511] Avoid logging exception from run_app() that is also raised (#8951) (#8974) (cherry picked from commit 45d6e4f14572bfc4cfc3f32a2c7c72a9cc28f125) --- CHANGES/6807.bugfix.rst | 1 + aiohttp/web.py | 13 +++++++++---- tests/test_run_app.py | 19 ++++++++++++++++++- 3 files changed, 28 insertions(+), 5 deletions(-) create mode 100644 CHANGES/6807.bugfix.rst diff --git a/CHANGES/6807.bugfix.rst b/CHANGES/6807.bugfix.rst new file mode 100644 index 00000000000..4eb07b9e0da --- /dev/null +++ b/CHANGES/6807.bugfix.rst @@ -0,0 +1 @@ +Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web.py b/aiohttp/web.py index 8708f1fcbec..88bf14bf828 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -6,6 +6,7 @@ import warnings from argparse import ArgumentParser from collections.abc import Iterable +from contextlib import suppress from importlib import import_module from typing import ( Any, @@ -519,10 +520,14 @@ def run_app( except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: - _cancel_tasks({main_task}, loop) - _cancel_tasks(asyncio.all_tasks(loop), loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() + try: + main_task.cancel() + with suppress(asyncio.CancelledError): + loop.run_until_complete(main_task) + finally: + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() def main(argv: List[str]) -> None: diff --git a/tests/test_run_app.py b/tests/test_run_app.py index c1d5f8e14f4..74d8c79bf55 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,7 +9,7 @@ import subprocess import sys import time -from typing import Callable, NoReturn, Set +from typing import AsyncIterator, Callable, NoReturn, Set from unittest import mock from uuid import uuid4 @@ -906,6 +906,23 @@ async def init(): assert count == 3 +def test_run_app_raises_exception(patched_loop: asyncio.AbstractEventLoop) -> None: + async def context(app: web.Application) -> AsyncIterator[None]: + raise RuntimeError("foo") + yield # pragma: no cover + + app = web.Application() + app.cleanup_ctx.append(context) + + with mock.patch.object( + patched_loop, "call_exception_handler", autospec=True, spec_set=True + ) as m: + with pytest.raises(RuntimeError, match="foo"): + web.run_app(app, loop=patched_loop) + + assert not m.called + + class TestShutdown: def raiser(self) -> NoReturn: raise KeyboardInterrupt From 1c1c0ea353041c8814a6131c3a92978dc2373e52 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 1 Sep 2024 21:37:29 +0100 Subject: [PATCH 0471/1511] Avoid logging exception from run_app() that is also raised (#8951) (#8975) (cherry picked from commit 45d6e4f14572bfc4cfc3f32a2c7c72a9cc28f125) --- CHANGES/6807.bugfix.rst | 1 + aiohttp/web.py | 13 +++++++++---- tests/test_run_app.py | 19 ++++++++++++++++++- 3 files changed, 28 insertions(+), 5 deletions(-) create mode 100644 CHANGES/6807.bugfix.rst diff --git a/CHANGES/6807.bugfix.rst b/CHANGES/6807.bugfix.rst new file mode 100644 index 00000000000..4eb07b9e0da --- /dev/null +++ b/CHANGES/6807.bugfix.rst @@ -0,0 +1 @@ +Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web.py b/aiohttp/web.py index 8708f1fcbec..88bf14bf828 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -6,6 +6,7 @@ import warnings from argparse import ArgumentParser from collections.abc import Iterable +from contextlib import suppress from importlib import import_module from typing import ( Any, @@ -519,10 +520,14 @@ def run_app( except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: - _cancel_tasks({main_task}, loop) - _cancel_tasks(asyncio.all_tasks(loop), loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() + try: + main_task.cancel() + with suppress(asyncio.CancelledError): + loop.run_until_complete(main_task) + finally: + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() def main(argv: List[str]) -> None: diff --git a/tests/test_run_app.py b/tests/test_run_app.py index c1d5f8e14f4..74d8c79bf55 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,7 +9,7 @@ import subprocess import sys import time -from typing import Callable, NoReturn, Set +from typing import AsyncIterator, Callable, NoReturn, Set from unittest import mock from uuid import uuid4 @@ -906,6 +906,23 @@ async def init(): assert count == 3 +def test_run_app_raises_exception(patched_loop: asyncio.AbstractEventLoop) -> None: + async def context(app: web.Application) -> AsyncIterator[None]: + raise RuntimeError("foo") + yield # pragma: no cover + + app = web.Application() + app.cleanup_ctx.append(context) + + with mock.patch.object( + patched_loop, "call_exception_handler", autospec=True, spec_set=True + ) as m: + with pytest.raises(RuntimeError, match="foo"): + web.run_app(app, loop=patched_loop) + + assert not m.called + + class TestShutdown: def raiser(self) -> NoReturn: raise KeyboardInterrupt From be27f7926f0da9b4129d98cf628eea99dc9e3411 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Sep 2024 22:18:00 +0100 Subject: [PATCH 0472/1511] [PR #8966/f569894c backport][3.11] Fix auth reset logic during redirects to different origin when _base_url set (#8976) **This is a backport of PR #8966 as merged into master (f569894caa7cfbc2ec03fb5eed6021b9899dc4b4).** --------- Co-authored-by: Maxim Zemskov <m.zemskov1@gmail.com> Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8966.feature.rst | 1 + aiohttp/client.py | 5 +- docs/client_reference.rst | 8 +- tests/test_client_functional.py | 132 ++++++++++++++++++++++++++++++++ 4 files changed, 142 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8966.feature.rst diff --git a/CHANGES/8966.feature.rst b/CHANGES/8966.feature.rst new file mode 100644 index 00000000000..ab1dc45b60e --- /dev/null +++ b/CHANGES/8966.feature.rst @@ -0,0 +1 @@ +Updated ClientSession's auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` diff --git a/aiohttp/client.py b/aiohttp/client.py index f9e3a5c5f65..f3c60d31f08 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -597,7 +597,10 @@ async def _request( if auth is None: auth = auth_from_url - if auth is None: + + if auth is None and ( + not self._base_url or self._base_url.origin() == url.origin() + ): auth = self._default_auth # It would be confusing if we support explicit # Authorization header with auth argument diff --git a/docs/client_reference.rst b/docs/client_reference.rst index f64df336755..afad40e2d83 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -100,9 +100,11 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional). It will be included - with any request to any origin and will not be - removed, event during redirect to a different - origin. + with any request. However, if the + ``_base_url`` parameter is set, the request + URL's origin must match the base URL's origin; + otherwise, the default auth will not be + included. :param version: supported HTTP version, ``HTTP 1.1`` by default. diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 1f9173bd3f7..c7c31c739b1 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2905,6 +2905,138 @@ async def close(self): assert resp.status == 200 +async def test_auth_persist_on_redirect_to_other_host_with_global_auth( + create_server_for_url_and_handler, +) -> None: + url_from = URL("http://host1.com/path1") + url_to = URL("http://host2.com/path2") + + async def srv_from(request: web.Request): + assert request.host == url_from.host + assert request.headers["Authorization"] == "Basic dXNlcjpwYXNz" + raise web.HTTPFound(url_to) + + async def srv_to(request: web.Request) -> web.Response: + assert request.host == url_to.host + assert "Authorization" in request.headers, "Header was dropped" + return web.Response() + + server_from = await create_server_for_url_and_handler(url_from, srv_from) + server_to = await create_server_for_url_and_handler(url_to, srv_to) + + assert ( + url_from.host != url_to.host or server_from.scheme != server_to.scheme + ), "Invalid test case, host or scheme must differ" + + protocol_port_map = { + "http": 80, + "https": 443, + } + etc_hosts = { + (url_from.host, protocol_port_map[server_from.scheme]): server_from, + (url_to.host, protocol_port_map[server_to.scheme]): server_to, + } + + class FakeResolver(AbstractResolver): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ): + server = etc_hosts[(host, port)] + assert server.port is not None + + return [ + { + "hostname": host, + "host": server.host, + "port": server.port, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ] + + async def close(self) -> None: + """Dummy""" + + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + + async with aiohttp.ClientSession( + connector=connector, auth=aiohttp.BasicAuth("user", "pass") + ) as client: + resp = await client.get(url_from) + assert resp.status == 200 + + +async def test_drop_auth_on_redirect_to_other_host_with_global_auth_and_base_url( + create_server_for_url_and_handler, +) -> None: + url_from = URL("http://host1.com/path1") + url_to = URL("http://host2.com/path2") + + async def srv_from(request: web.Request): + assert request.host == url_from.host + assert request.headers["Authorization"] == "Basic dXNlcjpwYXNz" + raise web.HTTPFound(url_to) + + async def srv_to(request: web.Request) -> web.Response: + assert request.host == url_to.host + assert "Authorization" not in request.headers, "Header was not dropped" + return web.Response() + + server_from = await create_server_for_url_and_handler(url_from, srv_from) + server_to = await create_server_for_url_and_handler(url_to, srv_to) + + assert ( + url_from.host != url_to.host or server_from.scheme != server_to.scheme + ), "Invalid test case, host or scheme must differ" + + protocol_port_map = { + "http": 80, + "https": 443, + } + etc_hosts = { + (url_from.host, protocol_port_map[server_from.scheme]): server_from, + (url_to.host, protocol_port_map[server_to.scheme]): server_to, + } + + class FakeResolver(AbstractResolver): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ): + server = etc_hosts[(host, port)] + assert server.port is not None + + return [ + { + "hostname": host, + "host": server.host, + "port": server.port, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ] + + async def close(self) -> None: + """Dummy""" + + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + + async with aiohttp.ClientSession( + connector=connector, + base_url="http://host1.com", + auth=aiohttp.BasicAuth("user", "pass"), + ) as client: + resp = await client.get("/path1") + assert resp.status == 200 + + async def test_async_with_session() -> None: async with aiohttp.ClientSession() as session: pass From 5dd9534ba860f1167ab4e84e1a5e65e5732f76d3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Sep 2024 13:36:27 -1000 Subject: [PATCH 0473/1511] [PR #8933/8f3b1f44 backport][3.10] Small cleanups to the websocket frame sender (#8978) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/http_websocket.py | 45 +++++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index db0cb429d83..2ea2c9191e1 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -115,6 +115,7 @@ class WSMsgType(IntEnum): PACK_RANDBITS = Struct("!L").pack MSG_SIZE: Final[int] = 2**14 DEFAULT_LIMIT: Final[int] = 2**16 +MASK_LEN: Final[int] = 4 class WSMessage(NamedTuple): @@ -625,12 +626,18 @@ async def _send_frame( if self._closing and not (opcode & WSMsgType.CLOSE): raise ConnectionResetError("Cannot write to closing transport") + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 rsv = 0 - # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: + # RSV1 (rsv = 0x40) is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + rsv = 0x40 + if compress: # Do not set self._compress if compressing is for this frame compressobj = self._make_compress_obj(compress) @@ -649,28 +656,39 @@ async def _send_frame( ) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] - rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask - if use_mask: - mask_bit = 0x80 - else: - mask_bit = 0 + mask_bit = 0x80 if use_mask else 0 + # Depending on the message length, the header is assembled differently. + # The first byte is reserved for the opcode and the RSV bits. + first_byte = 0x80 | rsv | opcode if msg_length < 126: - header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + header = PACK_LEN1(first_byte, msg_length | mask_bit) + header_len = 2 elif msg_length < (1 << 16): - header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) + header_len = 4 else: - header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) + header_len = 10 + + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 + # If we are using a mask, we need to generate it randomly + # and apply it to the message before sending it. A mask is + # a 32-bit value that is applied to the message using a + # bitwise XOR operation. It is used to prevent certain types + # of attacks on the websocket protocol. The mask is only used + # when aiohttp is acting as a client. Servers do not use a mask. if use_mask: mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) - self._output_size += len(header) + len(mask) + msg_length + self._output_size += header_len + MASK_LEN + msg_length + else: if msg_length > MSG_SIZE: self._write(header) @@ -678,11 +696,16 @@ async def _send_frame( else: self._write(header + message) - self._output_size += len(header) + msg_length + self._output_size += header_len + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper() From 2af23ab33a4ae3727e2345b2e60c4341243dee3d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Sep 2024 23:41:12 +0000 Subject: [PATCH 0474/1511] [PR #8933/8f3b1f44 backport][3.11] Small cleanups to the websocket frame sender (#8979) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/http_websocket.py | 45 +++++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index db0cb429d83..2ea2c9191e1 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -115,6 +115,7 @@ class WSMsgType(IntEnum): PACK_RANDBITS = Struct("!L").pack MSG_SIZE: Final[int] = 2**14 DEFAULT_LIMIT: Final[int] = 2**16 +MASK_LEN: Final[int] = 4 class WSMessage(NamedTuple): @@ -625,12 +626,18 @@ async def _send_frame( if self._closing and not (opcode & WSMsgType.CLOSE): raise ConnectionResetError("Cannot write to closing transport") + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 rsv = 0 - # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: + # RSV1 (rsv = 0x40) is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + rsv = 0x40 + if compress: # Do not set self._compress if compressing is for this frame compressobj = self._make_compress_obj(compress) @@ -649,28 +656,39 @@ async def _send_frame( ) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] - rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask - if use_mask: - mask_bit = 0x80 - else: - mask_bit = 0 + mask_bit = 0x80 if use_mask else 0 + # Depending on the message length, the header is assembled differently. + # The first byte is reserved for the opcode and the RSV bits. + first_byte = 0x80 | rsv | opcode if msg_length < 126: - header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + header = PACK_LEN1(first_byte, msg_length | mask_bit) + header_len = 2 elif msg_length < (1 << 16): - header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) + header_len = 4 else: - header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) + header_len = 10 + + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 + # If we are using a mask, we need to generate it randomly + # and apply it to the message before sending it. A mask is + # a 32-bit value that is applied to the message using a + # bitwise XOR operation. It is used to prevent certain types + # of attacks on the websocket protocol. The mask is only used + # when aiohttp is acting as a client. Servers do not use a mask. if use_mask: mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) - self._output_size += len(header) + len(mask) + msg_length + self._output_size += header_len + MASK_LEN + msg_length + else: if msg_length > MSG_SIZE: self._write(header) @@ -678,11 +696,16 @@ async def _send_frame( else: self._write(header + message) - self._output_size += len(header) + msg_length + self._output_size += header_len + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper() From 553e311e0e66548ab8a4b90f7e7561c0901b25c3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 01:57:51 +0100 Subject: [PATCH 0475/1511] [PR #8968/8daecf5c backport][3.11] List specific timeouts for each exception (#8982) **This is a backport of PR #8968 as merged into master (8daecf5cc357518957deec45a526dfd1703ec48b).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8968.doc.rst | 1 + docs/client_reference.rst | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8968.doc.rst diff --git a/CHANGES/8968.doc.rst b/CHANGES/8968.doc.rst new file mode 100644 index 00000000000..3420794586f --- /dev/null +++ b/CHANGES/8968.doc.rst @@ -0,0 +1 @@ +Clarified which timeout exceptions happen on which timeouts -- by :user:`Dreamsorcerer`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index afad40e2d83..887b196fa62 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2327,17 +2327,20 @@ Connection errors Server operation timeout: read timeout, etc. + To catch all timeouts, including the ``total`` timeout, use + :exc:`asyncio.TimeoutError`. + Derived from :exc:`ServerConnectionError` and :exc:`asyncio.TimeoutError` .. class:: ConnectionTimeoutError - Connection timeout on request: e.g. read timeout. + Connection timeout on ``connect`` and ``sock_connect`` timeouts. Derived from :exc:`ServerTimeoutError` .. class:: SocketTimeoutError - Reading from socket timeout. + Reading from socket timeout on ``sock_read`` timeout. Derived from :exc:`ServerTimeoutError` From c733c68d62220f304266bcf997b57f91a871b8a5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 01:58:11 +0100 Subject: [PATCH 0476/1511] [PR #8968/8daecf5c backport][3.10] List specific timeouts for each exception (#8981) **This is a backport of PR #8968 as merged into master (8daecf5cc357518957deec45a526dfd1703ec48b).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8968.doc.rst | 1 + docs/client_reference.rst | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8968.doc.rst diff --git a/CHANGES/8968.doc.rst b/CHANGES/8968.doc.rst new file mode 100644 index 00000000000..3420794586f --- /dev/null +++ b/CHANGES/8968.doc.rst @@ -0,0 +1 @@ +Clarified which timeout exceptions happen on which timeouts -- by :user:`Dreamsorcerer`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 4e63552cd5c..1af1cde68ba 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2309,17 +2309,20 @@ Connection errors Server operation timeout: read timeout, etc. + To catch all timeouts, including the ``total`` timeout, use + :exc:`asyncio.TimeoutError`. + Derived from :exc:`ServerConnectionError` and :exc:`asyncio.TimeoutError` .. class:: ConnectionTimeoutError - Connection timeout on request: e.g. read timeout. + Connection timeout on ``connect`` and ``sock_connect`` timeouts. Derived from :exc:`ServerTimeoutError` .. class:: SocketTimeoutError - Reading from socket timeout. + Reading from socket timeout on ``sock_read`` timeout. Derived from :exc:`ServerTimeoutError` From ea316336f0b247bb85fd7ee0e4e41f5eac682228 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 2 Sep 2024 02:20:25 +0100 Subject: [PATCH 0477/1511] Fix resolve_host "Task was destroyed but it is pending" errors (#8967) (#8980) (cherry picked from commit cd761a347be2609deca503646b9b5fb3585b2fda) --- CHANGES/8967.bugfix.rst | 1 + aiohttp/connector.py | 6 ++++++ tests/test_connector.py | 38 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8967.bugfix.rst diff --git a/CHANGES/8967.bugfix.rst b/CHANGES/8967.bugfix.rst new file mode 100644 index 00000000000..1046f36bd8b --- /dev/null +++ b/CHANGES/8967.bugfix.rst @@ -0,0 +1 @@ +Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 91174e319ab..93c78c62b08 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -824,12 +824,16 @@ def __init__( self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) self._happy_eyeballs_delay = happy_eyeballs_delay self._interleave = interleave + self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" for ev in self._throttle_dns_events.values(): ev.cancel() + for t in self._resolve_host_tasks: + t.cancel() + return super().close() @property @@ -907,6 +911,8 @@ async def _resolve_host( resolved_host_task = asyncio.create_task( self._resolve_host_with_throttle(key, host, port, traces) ) + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) try: return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: diff --git a/tests/test_connector.py b/tests/test_connector.py index 8dd7a294b30..0129f0cc330 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -9,7 +9,7 @@ import sys import uuid from collections import deque -from contextlib import closing +from contextlib import closing, suppress from typing import Any, List, Optional, Type from unittest import mock @@ -1667,7 +1667,41 @@ async def test_close_cancels_cleanup_handle(loop) -> None: assert conn._cleanup_handle is None -async def test_close_abort_closed_transports(loop) -> None: +async def test_close_cancels_resolve_host(loop: asyncio.AbstractEventLoop) -> None: + cancelled = False + + async def delay_resolve_host(*args: object) -> None: + """Delay _resolve_host() task in order to test cancellation.""" + nonlocal cancelled + try: + await asyncio.sleep(10) + except asyncio.CancelledError: + cancelled = True + raise + + conn = aiohttp.TCPConnector() + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn, "_resolve_host_with_throttle", delay_resolve_host): + t = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # We now have a task being tracked and can ensure that .close() cancels it. + assert len(conn._resolve_host_tasks) == 1 + await conn.close() + await asyncio.sleep(0.01) + assert cancelled + assert len(conn._resolve_host_tasks) == 0 + + with suppress(asyncio.CancelledError): + await t + + +async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None: tr = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) From 64a3d30a57952ec86caa2688d4a98696a486d3ea Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 11:48:42 +0100 Subject: [PATCH 0478/1511] [PR #8980/ea316336 backport][3.10] Fix resolve_host "Task was destroyed but it is pending" errors (#8967) (#8984) **This is a backport of PR #8980 as merged into 3.11 (ea316336f0b247bb85fd7ee0e4e41f5eac682228).** (cherry picked from commit cd761a347be2609deca503646b9b5fb3585b2fda) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8967.bugfix.rst | 1 + aiohttp/connector.py | 6 ++++++ tests/test_connector.py | 38 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8967.bugfix.rst diff --git a/CHANGES/8967.bugfix.rst b/CHANGES/8967.bugfix.rst new file mode 100644 index 00000000000..1046f36bd8b --- /dev/null +++ b/CHANGES/8967.bugfix.rst @@ -0,0 +1 @@ +Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 04115c36a24..7c6e747695e 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -824,12 +824,16 @@ def __init__( self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) self._happy_eyeballs_delay = happy_eyeballs_delay self._interleave = interleave + self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" for ev in self._throttle_dns_events.values(): ev.cancel() + for t in self._resolve_host_tasks: + t.cancel() + return super().close() @property @@ -907,6 +911,8 @@ async def _resolve_host( resolved_host_task = asyncio.create_task( self._resolve_host_with_throttle(key, host, port, traces) ) + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) try: return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: diff --git a/tests/test_connector.py b/tests/test_connector.py index 8dd7a294b30..0129f0cc330 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -9,7 +9,7 @@ import sys import uuid from collections import deque -from contextlib import closing +from contextlib import closing, suppress from typing import Any, List, Optional, Type from unittest import mock @@ -1667,7 +1667,41 @@ async def test_close_cancels_cleanup_handle(loop) -> None: assert conn._cleanup_handle is None -async def test_close_abort_closed_transports(loop) -> None: +async def test_close_cancels_resolve_host(loop: asyncio.AbstractEventLoop) -> None: + cancelled = False + + async def delay_resolve_host(*args: object) -> None: + """Delay _resolve_host() task in order to test cancellation.""" + nonlocal cancelled + try: + await asyncio.sleep(10) + except asyncio.CancelledError: + cancelled = True + raise + + conn = aiohttp.TCPConnector() + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn, "_resolve_host_with_throttle", delay_resolve_host): + t = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # We now have a task being tracked and can ensure that .close() cancels it. + assert len(conn._resolve_host_tasks) == 1 + await conn.close() + await asyncio.sleep(0.01) + assert cancelled + assert len(conn._resolve_host_tasks) == 0 + + with suppress(asyncio.CancelledError): + await t + + +async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None: tr = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) From a86689fb25754f239193c08e7f2afbaebd31c49d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 11:16:07 +0000 Subject: [PATCH 0479/1511] Bump yarl from 1.9.6 to 1.9.7 (#8986) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [yarl](https://github.com/aio-libs/yarl) from 1.9.6 to 1.9.7. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.9.7</h2> <h2>Removals and backward incompatible breaking changes</h2> <ul> <li> <p>Removed support :rfc:<code>3986#section-3.2.3</code> port normalization when the scheme is not one of <code>http</code>, <code>https</code>, <code>wss</code>, or <code>ws</code> -- by :user:<code>bdraco</code>.</p> <p>Support for port normalization was recently added in <a href="https://redirect.github.com/aio-libs/yarl/issues/1033">#1033</a> and contained code that would do blocking I/O if the scheme was not one of the four listed above. The code has been removed because this library is intended to be safe for usage with <code>asyncio</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1076">#1076</a>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of property caching -- by :user:<code>bdraco</code>.</p> <p>The <code>reify</code> implementation from <code>aiohttp</code> was adapted to replace the internal <code>cached_property</code> implementation.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1070">#1070</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.9.7</h1> <p><em>(2024-09-01)</em></p> <h2>Removals and backward incompatible breaking changes</h2> <ul> <li> <p>Removed support :rfc:<code>3986#section-3.2.3</code> port normalization when the scheme is not one of <code>http</code>, <code>https</code>, <code>wss</code>, or <code>ws</code> -- by :user:<code>bdraco</code>.</p> <p>Support for port normalization was recently added in :issue:<code>1033</code> and contained code that would do blocking I/O if the scheme was not one of the four listed above. The code has been removed because this library is intended to be safe for usage with :mod:<code>asyncio</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1076</code>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of property caching -- by :user:<code>bdraco</code>.</p> <p>The <code>reify</code> implementation from <code>aiohttp</code> was adapted to replace the internal <code>cached_property</code> implementation.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1070</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/845f017dc525151e2a02a0bc2e855d68d761d25a"><code>845f017</code></a> Release 1.9.7</li> <li><a href="https://github.com/aio-libs/yarl/commit/7c1220bc0930afbfe52021d47b83a3ea9995df60"><code>7c1220b</code></a> Remove fallback to getservbyname in _get_default_port (<a href="https://redirect.github.com/aio-libs/yarl/issues/1076">#1076</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/6339581f4fb2ad5d6d8a06470d7b09c69da0e8ef"><code>6339581</code></a> Adapt aiohttp reify implementation to replace internal cached_property (<a href="https://redirect.github.com/aio-libs/yarl/issues/1070">#1070</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/2a3235ebd5c33c7ce8ea4de76e47d489a1be8d60"><code>2a3235e</code></a> Add additional coverage for joining urls (<a href="https://redirect.github.com/aio-libs/yarl/issues/1066">#1066</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/94b6b518195adac8da9275e36535bea6a9be9aad"><code>94b6b51</code></a> 🧪🚑 Fix coverage.py-included paths @ XML (<a href="https://redirect.github.com/aio-libs/yarl/issues/1074">#1074</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/3d5ce652b5aab6cf3c5e1b42727420db1cd31298"><code>3d5ce65</code></a> 🧪 Bump MyPy to v1.11.2</li> <li><a href="https://github.com/aio-libs/yarl/commit/8aa3733b5cdda8658c1d30cdd627f577663a71ee"><code>8aa3733</code></a> 🧪 Add a MyPy run against Python 3.13 code paths</li> <li><a href="https://github.com/aio-libs/yarl/commit/cbf0b9ee12105cb15e02b012f2af1a699b7b2048"><code>cbf0b9e</code></a> 🧪💅 Lower-case bools in MyPy config</li> <li><a href="https://github.com/aio-libs/yarl/commit/b253381a87517a2cd78ffa56b8e539ce27478492"><code>b253381</code></a> 🧪 Drop leftover MyPy checked paths from config</li> <li><a href="https://github.com/aio-libs/yarl/commit/a7f80077d2da94c2ddc597cc2afbdde4246931dd"><code>a7f8007</code></a> 🧪 Stop auto-installing MyPy type stubs</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/yarl/compare/v1.9.6...v1.9.7">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.9.6&new-version=1.9.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 18b5f471150..5876b5881a7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.6 +yarl==1.9.7 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 869bb3d1b34..a2deb71956d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -286,7 +286,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.9.6 +yarl==1.9.7 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 9555838a4bf..2e40f0bb153 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -278,7 +278,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.9.6 +yarl==1.9.7 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index c876071fc19..00927852825 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -yarl==1.9.6 +yarl==1.9.7 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 3caac33849b..a2bfc72a0a9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -136,5 +136,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.6 +yarl==1.9.7 # via -r requirements/runtime-deps.in From ed5576dba118886c52d044dc2a73f291112ca30a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 16:12:21 +0100 Subject: [PATCH 0480/1511] [PR #8987/5c3d50f9 backport][3.11] Move ContentDisposition in reference docs (#8989) **This is a backport of PR #8987 as merged into master (5c3d50f9006a0e86927cfc3f092ec2a30b490871).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/client_reference.rst | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 887b196fa62..c2d6b6ac979 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1820,6 +1820,26 @@ Utilities .. versionadded:: 3.8 +.. class:: ContentDisposition + + A data class to represent the Content-Disposition header, + available as :attr:`ClientResponse.content_disposition` attribute. + + .. attribute:: type + + A :class:`str` instance. Value of Content-Disposition header + itself, e.g. ``attachment``. + + .. attribute:: filename + + A :class:`str` instance. Content filename extracted from + parameters. May be ``None``. + + .. attribute:: parameters + + Read-only mapping contains all parameters. + + .. class:: RequestInfo() A data class with request URL and headers from :class:`~aiohttp.ClientRequest` @@ -2170,25 +2190,6 @@ All exceptions are available as members of *aiohttp* module. Derived from :exc:`RedirectClientError` and :exc:`NonHttpUrlClientError` - -.. class:: ContentDisposition - - Represent Content-Disposition header - - .. attribute:: type - - A :class:`str` instance. Value of Content-Disposition header - itself, e.g. ``attachment``. - - .. attribute:: filename - - A :class:`str` instance. Content filename extracted from - parameters. May be ``None``. - - .. attribute:: parameters - - Read-only mapping contains all parameters. - Response errors ^^^^^^^^^^^^^^^ From 7c3ff649f66c45450c121e9f506b4a947861016f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 16:12:36 +0100 Subject: [PATCH 0481/1511] [PR #8987/5c3d50f9 backport][3.10] Move ContentDisposition in reference docs (#8988) **This is a backport of PR #8987 as merged into master (5c3d50f9006a0e86927cfc3f092ec2a30b490871).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/client_reference.rst | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 1af1cde68ba..9de1ea401c7 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1802,6 +1802,26 @@ Utilities .. versionadded:: 3.8 +.. class:: ContentDisposition + + A data class to represent the Content-Disposition header, + available as :attr:`ClientResponse.content_disposition` attribute. + + .. attribute:: type + + A :class:`str` instance. Value of Content-Disposition header + itself, e.g. ``attachment``. + + .. attribute:: filename + + A :class:`str` instance. Content filename extracted from + parameters. May be ``None``. + + .. attribute:: parameters + + Read-only mapping contains all parameters. + + .. class:: RequestInfo() A data class with request URL and headers from :class:`~aiohttp.ClientRequest` @@ -2152,25 +2172,6 @@ All exceptions are available as members of *aiohttp* module. Derived from :exc:`RedirectClientError` and :exc:`NonHttpUrlClientError` - -.. class:: ContentDisposition - - Represent Content-Disposition header - - .. attribute:: type - - A :class:`str` instance. Value of Content-Disposition header - itself, e.g. ``attachment``. - - .. attribute:: filename - - A :class:`str` instance. Content filename extracted from - parameters. May be ``None``. - - .. attribute:: parameters - - Read-only mapping contains all parameters. - Response errors ^^^^^^^^^^^^^^^ From effb57523e8cb705bc0d3f1cb1832c02b244f986 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 00:49:08 +0100 Subject: [PATCH 0482/1511] [PR #8991/1ba30112 backport][3.11] Update ClientSession reference (#8995) **This is a backport of PR #8991 as merged into master (1ba3011253e60e3bc35ea7dc93ab6a903e22cef0).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8991.doc.rst | 1 + docs/client_reference.rst | 145 ++++++++++++++------------------------ 2 files changed, 53 insertions(+), 93 deletions(-) create mode 100644 CHANGES/8991.doc.rst diff --git a/CHANGES/8991.doc.rst b/CHANGES/8991.doc.rst new file mode 100644 index 00000000000..c29850c4f3c --- /dev/null +++ b/CHANGES/8991.doc.rst @@ -0,0 +1 @@ +Updated ``ClientSession`` parameters to match current code -- by :user:`Dreamsorcerer`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index c2d6b6ac979..77230a755c6 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -41,17 +41,21 @@ The client session supports the context manager protocol for self closing. connector=None, cookies=None, \ headers=None, skip_auto_headers=None, \ auth=None, json_serialize=json.dumps, \ + request_class=ClientRequest, \ + response_class=ClientResponse, \ + ws_response_class=ClientWebSocketResponse, \ version=aiohttp.HttpVersion11, \ - cookie_jar=None, read_timeout=None, \ - conn_timeout=None, \ - timeout=sentinel, \ - raise_for_status=False, \ + cookie_jar=None, \ connector_owner=True, \ + raise_for_status=False, \ + timeout=sentinel, \ auto_decompress=True, \ - read_bufsize=2**16, \ - requote_redirect_url=True, \ trust_env=False, \ + requote_redirect_url=True, \ trace_configs=None, \ + read_bufsize=2**16, \ + max_line_size=8190, \ + max_field_size=8190, \ fallback_charset_resolver=lambda r, b: "utf-8") The class for creating client sessions and making requests. @@ -67,17 +71,6 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. - :param loop: :ref:`event loop<asyncio-event-loop>` used for - processing HTTP requests. - - If *loop* is ``None`` the constructor - borrows it from *connector* if specified. - - :func:`asyncio.get_event_loop` is used for getting default event - loop otherwise. - - .. deprecated:: 2.0 - :param dict cookies: Cookies to send with the request (optional) :param headers: HTTP Headers to send with every request (optional). @@ -106,6 +99,16 @@ The client session supports the context manager protocol for self closing. otherwise, the default auth will not be included. + :param collections.abc.Callable json_serialize: Json *serializer* callable. + + By default :func:`json.dumps` function. + + :param aiohttp.ClientRequest request_class: Custom class to use for client requests. + + :param ClientResponse response_class: Custom class to use for client responses. + + :param ClientWebSocketResponse ws_response_class: Custom class to use for websocket responses. + :param version: supported HTTP version, ``HTTP 1.1`` by default. :param cookie_jar: Cookie Jar, :class:`~aiohttp.abc.AbstractCookieJar` instance. @@ -121,9 +124,13 @@ The client session supports the context manager protocol for self closing. :class:`aiohttp.DummyCookieJar` instance can be provided. - :param collections.abc.Callable json_serialize: Json *serializer* callable. + :param bool connector_owner: - By default :func:`json.dumps` function. + Close connector instance on session closing. + + Setting the parameter to ``False`` allows to share + connection pool between sessions without sharing session state: + cookies etc. :param bool raise_for_status: @@ -163,39 +170,10 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.3 - :param float read_timeout: Request operations timeout. ``read_timeout`` is - cumulative for all request operations (request, redirects, responses, - data consuming). By default, the read timeout is 5*60 seconds. - Use ``None`` or ``0`` to disable timeout checks. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param float conn_timeout: timeout for connection establishing - (optional). Values ``0`` or ``None`` mean no timeout. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param bool connector_owner: - - Close connector instance on session closing. - - Setting the parameter to ``False`` allows to share - connection pool between sessions without sharing session state: - cookies etc. - :param bool auto_decompress: Automatically decompress response body (``True`` by default). .. versionadded:: 2.3 - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - 64 KiB by default. - - .. versionadded:: 3.7 - :param bool trust_env: Trust environment settings for proxy configuration if the parameter is ``True`` (``False`` by default). See :ref:`aiohttp-client-proxy-support` for more information. @@ -232,6 +210,15 @@ The client session supports the context manager protocol for self closing. disabling. See :ref:`aiohttp-client-tracing-reference` for more information. + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + 64 KiB by default. + + .. versionadded:: 3.7 + + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :param Callable[[ClientResponse,bytes],str] fallback_charset_resolver: A :term:`callable` that accepts a :class:`ClientResponse` and the :class:`bytes` contents, and returns a :class:`str` which will be used as @@ -376,12 +363,15 @@ The client session supports the context manager protocol for self closing. max_redirects=10,\ compress=None, chunked=None, expect100=False, raise_for_status=None,\ read_until_eof=True, \ - read_bufsize=None, \ proxy=None, proxy_auth=None,\ timeout=sentinel, ssl=True, \ - verify_ssl=None, fingerprint=None, \ - ssl_context=None, proxy_headers=None, \ - server_hostname=None, auto_decompress=None) + server_hostname=None, \ + proxy_headers=None, \ + trace_request_ctx=None, \ + read_bufsize=None, \ + auto_decompress=None, \ + max_line_size=None, \ + max_field_size=None) :async: :noindexentry: @@ -475,12 +465,6 @@ The client session supports the context manager protocol for self closing. does not have Content-Length header. ``True`` by default (optional). - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - ``None`` by default, - it means that the session global value is used. - - .. versionadded:: 3.7 - :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP @@ -508,29 +492,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 - :param bool verify_ssl: Perform SSL certificate validation for - *HTTPS* requests (enabled by default). May be disabled to - skip validation for sites with invalid certificates. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=False`` - - :param bytes fingerprint: Pass the SHA256 digest of the expected - certificate in DER format to verify that the certificate the - server presents matches. Useful for `certificate pinning - <https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning>`_. - - Warning: use of MD5 or SHA1 digests is insecure and removed. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=aiohttp.Fingerprint(digest)`` - :param str server_hostname: Sets or overrides the host name that the target server’s certificate will be matched against. @@ -538,18 +499,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.9 - :param ssl.SSLContext ssl_context: ssl context used for processing - *HTTPS* requests (optional). - - *ssl_context* may be used for configuring certification - authority channel, supported SSL options etc. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=ssl_context`` - :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. @@ -562,10 +511,20 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + ``None`` by default, + it means that the session global value is used. + + .. versionadded:: 3.7 + :param bool auto_decompress: Automatically decompress response body. Overrides :attr:`ClientSession.auto_decompress`. May be used to enable/disable auto decompression on a per-request basis. + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :return ClientResponse: a :class:`client response <ClientResponse>` object. From 1adf397e8b5a626bd4bb5553edb15d6297c6f764 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 00:49:26 +0100 Subject: [PATCH 0483/1511] [PR #8991/1ba30112 backport][3.10] Update ClientSession reference (#8994) **This is a backport of PR #8991 as merged into master (1ba3011253e60e3bc35ea7dc93ab6a903e22cef0).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8991.doc.rst | 1 + docs/client_reference.rst | 145 ++++++++++++++------------------------ 2 files changed, 53 insertions(+), 93 deletions(-) create mode 100644 CHANGES/8991.doc.rst diff --git a/CHANGES/8991.doc.rst b/CHANGES/8991.doc.rst new file mode 100644 index 00000000000..c29850c4f3c --- /dev/null +++ b/CHANGES/8991.doc.rst @@ -0,0 +1 @@ +Updated ``ClientSession`` parameters to match current code -- by :user:`Dreamsorcerer`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 9de1ea401c7..bcd2108c1eb 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -41,17 +41,21 @@ The client session supports the context manager protocol for self closing. connector=None, cookies=None, \ headers=None, skip_auto_headers=None, \ auth=None, json_serialize=json.dumps, \ + request_class=ClientRequest, \ + response_class=ClientResponse, \ + ws_response_class=ClientWebSocketResponse, \ version=aiohttp.HttpVersion11, \ - cookie_jar=None, read_timeout=None, \ - conn_timeout=None, \ - timeout=sentinel, \ - raise_for_status=False, \ + cookie_jar=None, \ connector_owner=True, \ + raise_for_status=False, \ + timeout=sentinel, \ auto_decompress=True, \ - read_bufsize=2**16, \ - requote_redirect_url=True, \ trust_env=False, \ + requote_redirect_url=True, \ trace_configs=None, \ + read_bufsize=2**16, \ + max_line_size=8190, \ + max_field_size=8190, \ fallback_charset_resolver=lambda r, b: "utf-8") The class for creating client sessions and making requests. @@ -67,17 +71,6 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. - :param loop: :ref:`event loop<asyncio-event-loop>` used for - processing HTTP requests. - - If *loop* is ``None`` the constructor - borrows it from *connector* if specified. - - :func:`asyncio.get_event_loop` is used for getting default event - loop otherwise. - - .. deprecated:: 2.0 - :param dict cookies: Cookies to send with the request (optional) :param headers: HTTP Headers to send with every request (optional). @@ -104,6 +97,16 @@ The client session supports the context manager protocol for self closing. removed, event during redirect to a different origin. + :param collections.abc.Callable json_serialize: Json *serializer* callable. + + By default :func:`json.dumps` function. + + :param aiohttp.ClientRequest request_class: Custom class to use for client requests. + + :param ClientResponse response_class: Custom class to use for client responses. + + :param ClientWebSocketResponse ws_response_class: Custom class to use for websocket responses. + :param version: supported HTTP version, ``HTTP 1.1`` by default. :param cookie_jar: Cookie Jar, :class:`~aiohttp.abc.AbstractCookieJar` instance. @@ -119,9 +122,13 @@ The client session supports the context manager protocol for self closing. :class:`aiohttp.DummyCookieJar` instance can be provided. - :param collections.abc.Callable json_serialize: Json *serializer* callable. + :param bool connector_owner: - By default :func:`json.dumps` function. + Close connector instance on session closing. + + Setting the parameter to ``False`` allows to share + connection pool between sessions without sharing session state: + cookies etc. :param bool raise_for_status: @@ -161,39 +168,10 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.3 - :param float read_timeout: Request operations timeout. ``read_timeout`` is - cumulative for all request operations (request, redirects, responses, - data consuming). By default, the read timeout is 5*60 seconds. - Use ``None`` or ``0`` to disable timeout checks. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param float conn_timeout: timeout for connection establishing - (optional). Values ``0`` or ``None`` mean no timeout. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param bool connector_owner: - - Close connector instance on session closing. - - Setting the parameter to ``False`` allows to share - connection pool between sessions without sharing session state: - cookies etc. - :param bool auto_decompress: Automatically decompress response body (``True`` by default). .. versionadded:: 2.3 - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - 64 KiB by default. - - .. versionadded:: 3.7 - :param bool trust_env: Trust environment settings for proxy configuration if the parameter is ``True`` (``False`` by default). See :ref:`aiohttp-client-proxy-support` for more information. @@ -230,6 +208,15 @@ The client session supports the context manager protocol for self closing. disabling. See :ref:`aiohttp-client-tracing-reference` for more information. + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + 64 KiB by default. + + .. versionadded:: 3.7 + + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :param Callable[[ClientResponse,bytes],str] fallback_charset_resolver: A :term:`callable` that accepts a :class:`ClientResponse` and the :class:`bytes` contents, and returns a :class:`str` which will be used as @@ -374,12 +361,15 @@ The client session supports the context manager protocol for self closing. max_redirects=10,\ compress=None, chunked=None, expect100=False, raise_for_status=None,\ read_until_eof=True, \ - read_bufsize=None, \ proxy=None, proxy_auth=None,\ timeout=sentinel, ssl=True, \ - verify_ssl=None, fingerprint=None, \ - ssl_context=None, proxy_headers=None, \ - server_hostname=None, auto_decompress=None) + server_hostname=None, \ + proxy_headers=None, \ + trace_request_ctx=None, \ + read_bufsize=None, \ + auto_decompress=None, \ + max_line_size=None, \ + max_field_size=None) :async: :noindexentry: @@ -473,12 +463,6 @@ The client session supports the context manager protocol for self closing. does not have Content-Length header. ``True`` by default (optional). - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - ``None`` by default, - it means that the session global value is used. - - .. versionadded:: 3.7 - :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP @@ -506,29 +490,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 - :param bool verify_ssl: Perform SSL certificate validation for - *HTTPS* requests (enabled by default). May be disabled to - skip validation for sites with invalid certificates. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=False`` - - :param bytes fingerprint: Pass the SHA256 digest of the expected - certificate in DER format to verify that the certificate the - server presents matches. Useful for `certificate pinning - <https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning>`_. - - Warning: use of MD5 or SHA1 digests is insecure and removed. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=aiohttp.Fingerprint(digest)`` - :param str server_hostname: Sets or overrides the host name that the target server’s certificate will be matched against. @@ -536,18 +497,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.9 - :param ssl.SSLContext ssl_context: ssl context used for processing - *HTTPS* requests (optional). - - *ssl_context* may be used for configuring certification - authority channel, supported SSL options etc. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=ssl_context`` - :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. @@ -560,10 +509,20 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + ``None`` by default, + it means that the session global value is used. + + .. versionadded:: 3.7 + :param bool auto_decompress: Automatically decompress response body. Overrides :attr:`ClientSession.auto_decompress`. May be used to enable/disable auto decompression on a per-request basis. + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :return ClientResponse: a :class:`client response <ClientResponse>` object. From fa628a2170cc03b82f32ea4e3812f41d015125e3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 00:49:45 +0100 Subject: [PATCH 0484/1511] [PR #8990/731ba4dd backport][3.11] Fix changing scheme/host in Response.clone() for absolute URLs (#8997) **This is a backport of PR #8990 as merged into master (731ba4dd399c26fd470dde5ea50e011e058b798a).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8990.bugfix.rst | 1 + aiohttp/web_request.py | 12 ++++++++---- tests/test_web_request.py | 21 +++++++++++++++++++++ 3 files changed, 30 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8990.bugfix.rst diff --git a/CHANGES/8990.bugfix.rst b/CHANGES/8990.bugfix.rst new file mode 100644 index 00000000000..9a9783103fd --- /dev/null +++ b/CHANGES/8990.bugfix.rst @@ -0,0 +1 @@ +Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a63d3074ea5..1d94c576794 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -175,6 +175,10 @@ def __init__( self._cache: Dict[str, Any] = {} url = message.url if url.is_absolute(): + if scheme is not None: + url = url.with_scheme(scheme) + if host is not None: + url = url.with_host(host) # absolute URL is given, # override auto-calculating url, host, and scheme # all other properties should be good @@ -184,6 +188,10 @@ def __init__( self._rel_url = url.relative() else: self._rel_url = message.url + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None self._read_bytes: Optional[bytes] = None @@ -197,10 +205,6 @@ def __init__( self._transport_sslcontext = transport.get_extra_info("sslcontext") self._transport_peername = transport.get_extra_info("peername") - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host if remote is not None: self._cache["remote"] = remote diff --git a/tests/test_web_request.py b/tests/test_web_request.py index c6398ac1836..ba12d6f54e7 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -169,6 +169,22 @@ def test_absolute_url() -> None: assert req.rel_url == URL.build(path="/path/to", query={"a": "1"}) +def test_clone_absolute_scheme() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.scheme == "https" + req2 = req.clone(scheme="http") + assert req2.scheme == "http" + assert req2.url.scheme == "http" + + +def test_clone_absolute_host() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.host == "example.com" + req2 = req.clone(host="foo.test") + assert req2.host == "foo.test" + assert req2.url.host == "foo.test" + + def test_content_length() -> None: req = make_mocked_request("Get", "/", CIMultiDict([("CONTENT-LENGTH", "123")])) @@ -684,18 +700,23 @@ def test_save_state_on_clone() -> None: def test_clone_scheme() -> None: req = make_mocked_request("GET", "/") + assert req.scheme == "http" req2 = req.clone(scheme="https") assert req2.scheme == "https" + assert req2.url.scheme == "https" def test_clone_host() -> None: req = make_mocked_request("GET", "/") + assert req.host != "example.com" req2 = req.clone(host="example.com") assert req2.host == "example.com" + assert req2.url.host == "example.com" def test_clone_remote() -> None: req = make_mocked_request("GET", "/") + assert req.remote != "11.11.11.11" req2 = req.clone(remote="11.11.11.11") assert req2.remote == "11.11.11.11" From af8900de0e06c187a1235acd465c4b1ed2e5e6a0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 00:57:48 +0100 Subject: [PATCH 0485/1511] [PR #8990/731ba4dd backport][3.10] Fix changing scheme/host in Response.clone() for absolute URLs (#8996) **This is a backport of PR #8990 as merged into master (731ba4dd399c26fd470dde5ea50e011e058b798a).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8990.bugfix.rst | 1 + aiohttp/web_request.py | 12 ++++++++---- tests/test_web_request.py | 21 +++++++++++++++++++++ 3 files changed, 30 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8990.bugfix.rst diff --git a/CHANGES/8990.bugfix.rst b/CHANGES/8990.bugfix.rst new file mode 100644 index 00000000000..9a9783103fd --- /dev/null +++ b/CHANGES/8990.bugfix.rst @@ -0,0 +1 @@ +Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a63d3074ea5..1d94c576794 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -175,6 +175,10 @@ def __init__( self._cache: Dict[str, Any] = {} url = message.url if url.is_absolute(): + if scheme is not None: + url = url.with_scheme(scheme) + if host is not None: + url = url.with_host(host) # absolute URL is given, # override auto-calculating url, host, and scheme # all other properties should be good @@ -184,6 +188,10 @@ def __init__( self._rel_url = url.relative() else: self._rel_url = message.url + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None self._read_bytes: Optional[bytes] = None @@ -197,10 +205,6 @@ def __init__( self._transport_sslcontext = transport.get_extra_info("sslcontext") self._transport_peername = transport.get_extra_info("peername") - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host if remote is not None: self._cache["remote"] = remote diff --git a/tests/test_web_request.py b/tests/test_web_request.py index c6398ac1836..ba12d6f54e7 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -169,6 +169,22 @@ def test_absolute_url() -> None: assert req.rel_url == URL.build(path="/path/to", query={"a": "1"}) +def test_clone_absolute_scheme() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.scheme == "https" + req2 = req.clone(scheme="http") + assert req2.scheme == "http" + assert req2.url.scheme == "http" + + +def test_clone_absolute_host() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.host == "example.com" + req2 = req.clone(host="foo.test") + assert req2.host == "foo.test" + assert req2.url.host == "foo.test" + + def test_content_length() -> None: req = make_mocked_request("Get", "/", CIMultiDict([("CONTENT-LENGTH", "123")])) @@ -684,18 +700,23 @@ def test_save_state_on_clone() -> None: def test_clone_scheme() -> None: req = make_mocked_request("GET", "/") + assert req.scheme == "http" req2 = req.clone(scheme="https") assert req2.scheme == "https" + assert req2.url.scheme == "https" def test_clone_host() -> None: req = make_mocked_request("GET", "/") + assert req.host != "example.com" req2 = req.clone(host="example.com") assert req2.host == "example.com" + assert req2.url.host == "example.com" def test_clone_remote() -> None: req = make_mocked_request("GET", "/") + assert req.remote != "11.11.11.11" req2 = req.clone(remote="11.11.11.11") assert req2.remote == "11.11.11.11" From ba201c4a89ed5f51bba55d255e22ffff8954b18a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:32:25 +0000 Subject: [PATCH 0486/1511] Bump setuptools from 74.0.0 to 74.1.0 (#9001) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 74.0.0 to 74.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v74.1.0</h1> <h2>Features</h2> <ul> <li>Added support for defining <code>ext-modules</code> via <code>pyproject.toml</code> (<strong>EXPERIMENTAL</strong>, may change in future releases). (<a href="https://redirect.github.com/pypa/setuptools/issues/4568">#4568</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Merge with pypa/distutils@3dcdf8567, removing the duplicate vendored copy of packaging. (<a href="https://redirect.github.com/pypa/setuptools/issues/4622">#4622</a>)</li> <li>Restored <code>setuptools.msvc.Environmentinfo</code> as it is used externally. (<a href="https://redirect.github.com/pypa/setuptools/issues/4625">#4625</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/1a9d87308dc0d8aabeaae0dce989b35dfb7699f0"><code>1a9d873</code></a> Bump version: 74.0.0 → 74.1.0</li> <li><a href="https://github.com/pypa/setuptools/commit/4d9a750695c08fae4d2a40a3b94b718f8ee28c2f"><code>4d9a750</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4626">#4626</a> from pypa/bugfix/msvc-EnvironmentInfo</li> <li><a href="https://github.com/pypa/setuptools/commit/a16582be2518bb53f413e1a4bf76f7014a55e806"><code>a16582b</code></a> Add a test for construction of EnvironmentInfo.</li> <li><a href="https://github.com/pypa/setuptools/commit/616d8735ebe890691b3c78470c40992a7532f11a"><code>616d873</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/242388806a0ac0f47f49020c67befd685a8e4a52"><code>2423888</code></a> Remove only the monkeypatching, leaving EnvironmentInfo in place.</li> <li><a href="https://github.com/pypa/setuptools/commit/7ecbcb0eb8a67a0408e1f6062f6591ea412f4e06"><code>7ecbcb0</code></a> Revert "Remove monkeypatching of _msvccompiler."</li> <li><a href="https://github.com/pypa/setuptools/commit/5d4473ed6f511ccfd14d65a908f1290e1300cbf5"><code>5d4473e</code></a> Implement declarative <code>ext-modules</code> in <code>pyproject.toml</code> ("experimental") (<a href="https://redirect.github.com/pypa/setuptools/issues/4568">#4568</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/592d089d2eb8b1e50fdd45d1939f77fe3832a307"><code>592d089</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/11731e2950342a3c1d0138fe27d23a4e1e652119"><code>11731e2</code></a> Add docs about ext-modules in pyproject.toml</li> <li><a href="https://github.com/pypa/setuptools/commit/bf768e0cfb2194e554bb268801e249c3e1af2ca6"><code>bf768e0</code></a> Add experimental warning to ext-modules in pyproject.toml</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v74.0.0...v74.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=74.0.0&new-version=74.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a2deb71956d..428cfff3d7f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -296,7 +296,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.0.0 +setuptools==74.1.0 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index 2e40f0bb153..55695f7eb64 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -288,7 +288,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.0.0 +setuptools==74.1.0 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index b0b6bb6d469..4ef41521136 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.0.0 +setuptools==74.1.0 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 172dedd5016..804cb6e129d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.0.0 +setuptools==74.1.0 # via # blockdiag # incremental From a3fa8d85ff227f18270d83af215b8974386989f0 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 3 Sep 2024 14:06:44 +0100 Subject: [PATCH 0487/1511] Fix cancelled payload send leading to hung connection (#8992) (#9002) (cherry picked from commit 5c0b8e4a7897f48063f684ac16e7dd18f6218274) --- CHANGES/8992.bugfix.rst | 1 + aiohttp/client_reqrep.py | 3 ++- tests/test_client_functional.py | 35 ++++++++++++++++++++------------- 3 files changed, 24 insertions(+), 15 deletions(-) create mode 100644 CHANGES/8992.bugfix.rst diff --git a/CHANGES/8992.bugfix.rst b/CHANGES/8992.bugfix.rst new file mode 100644 index 00000000000..bc41d5feb81 --- /dev/null +++ b/CHANGES/8992.bugfix.rst @@ -0,0 +1 @@ +Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 933f3275e28..d2c5f16df2b 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -659,7 +659,8 @@ async def write_bytes( set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: - await writer.write_eof() + # Body hasn't been fully sent, so connection can't be reused. + conn.close() except Exception as underlying_exc: set_exception( protocol, diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index c7c31c739b1..95f73d84ec0 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -341,10 +341,11 @@ async def data_gen(): async with client.get("/") as resp: assert 200 == resp.status - # Connection should have been reused + # First connection should have been closed, otherwise server won't know if it + # received the full message. conns = next(iter(client.session.connector._conns.values())) assert len(conns) == 1 - assert conns[0][0] is conn + assert conns[0][0] is not conn async def test_stream_request_on_server_eof_nested(aiohttp_client) -> None: @@ -362,14 +363,21 @@ async def data_gen(): yield b"just data" await asyncio.sleep(0.1) + assert client.session.connector is not None async with client.put("/", data=data_gen()) as resp: + first_conn = next(iter(client.session.connector._acquired)) assert 200 == resp.status - async with client.get("/") as resp: - assert 200 == resp.status + + async with client.get("/") as resp2: + assert 200 == resp2.status # Should be 2 separate connections conns = next(iter(client.session.connector._conns.values())) - assert len(conns) == 2 + assert len(conns) == 1 + + assert first_conn is not None + assert not first_conn.is_connected() + assert first_conn is not conns[0][0] async def test_HTTP_304_WITH_BODY(aiohttp_client) -> None: @@ -3783,9 +3791,10 @@ async def handler(request): assert resp.reason == "x" * 8191 -@pytest.mark.xfail(raises=asyncio.TimeoutError, reason="#7599") -async def test_rejected_upload(aiohttp_client, tmp_path) -> None: - async def ok_handler(request): +async def test_rejected_upload( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: + async def ok_handler(request: web.Request) -> web.Response: return web.Response() async def not_ok_handler(request): @@ -3802,13 +3811,11 @@ async def not_ok_handler(request): with open(file_path, "rb") as file: data = {"file": file} - async with await client.post("/not_ok", data=data) as resp_not_ok: - assert 400 == resp_not_ok.status + async with client.post("/not_ok", data=data) as resp_not_ok: + assert resp_not_ok.status == 400 - async with await client.get( - "/ok", timeout=aiohttp.ClientTimeout(total=0.01) - ) as resp_ok: - assert 200 == resp_ok.status + async with client.get("/ok", timeout=aiohttp.ClientTimeout(total=1)) as resp_ok: + assert resp_ok.status == 200 @pytest.mark.parametrize( From fecb85a9291d4cae6c06999c388421cde02c9860 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 3 Sep 2024 14:06:55 +0100 Subject: [PATCH 0488/1511] Fix cancelled payload send leading to hung connection (#8992) (#9003) (cherry picked from commit 5c0b8e4a7897f48063f684ac16e7dd18f6218274) --- CHANGES/8992.bugfix.rst | 1 + aiohttp/client_reqrep.py | 3 ++- tests/test_client_functional.py | 35 ++++++++++++++++++++------------- 3 files changed, 24 insertions(+), 15 deletions(-) create mode 100644 CHANGES/8992.bugfix.rst diff --git a/CHANGES/8992.bugfix.rst b/CHANGES/8992.bugfix.rst new file mode 100644 index 00000000000..bc41d5feb81 --- /dev/null +++ b/CHANGES/8992.bugfix.rst @@ -0,0 +1 @@ +Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 933f3275e28..d2c5f16df2b 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -659,7 +659,8 @@ async def write_bytes( set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: - await writer.write_eof() + # Body hasn't been fully sent, so connection can't be reused. + conn.close() except Exception as underlying_exc: set_exception( protocol, diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 1f9173bd3f7..7de195264ac 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -341,10 +341,11 @@ async def data_gen(): async with client.get("/") as resp: assert 200 == resp.status - # Connection should have been reused + # First connection should have been closed, otherwise server won't know if it + # received the full message. conns = next(iter(client.session.connector._conns.values())) assert len(conns) == 1 - assert conns[0][0] is conn + assert conns[0][0] is not conn async def test_stream_request_on_server_eof_nested(aiohttp_client) -> None: @@ -362,14 +363,21 @@ async def data_gen(): yield b"just data" await asyncio.sleep(0.1) + assert client.session.connector is not None async with client.put("/", data=data_gen()) as resp: + first_conn = next(iter(client.session.connector._acquired)) assert 200 == resp.status - async with client.get("/") as resp: - assert 200 == resp.status + + async with client.get("/") as resp2: + assert 200 == resp2.status # Should be 2 separate connections conns = next(iter(client.session.connector._conns.values())) - assert len(conns) == 2 + assert len(conns) == 1 + + assert first_conn is not None + assert not first_conn.is_connected() + assert first_conn is not conns[0][0] async def test_HTTP_304_WITH_BODY(aiohttp_client) -> None: @@ -3651,9 +3659,10 @@ async def handler(request): assert resp.reason == "x" * 8191 -@pytest.mark.xfail(raises=asyncio.TimeoutError, reason="#7599") -async def test_rejected_upload(aiohttp_client, tmp_path) -> None: - async def ok_handler(request): +async def test_rejected_upload( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: + async def ok_handler(request: web.Request) -> web.Response: return web.Response() async def not_ok_handler(request): @@ -3670,13 +3679,11 @@ async def not_ok_handler(request): with open(file_path, "rb") as file: data = {"file": file} - async with await client.post("/not_ok", data=data) as resp_not_ok: - assert 400 == resp_not_ok.status + async with client.post("/not_ok", data=data) as resp_not_ok: + assert resp_not_ok.status == 400 - async with await client.get( - "/ok", timeout=aiohttp.ClientTimeout(total=0.01) - ) as resp_ok: - assert 200 == resp_ok.status + async with client.get("/ok", timeout=aiohttp.ClientTimeout(total=1)) as resp_ok: + assert resp_ok.status == 200 @pytest.mark.parametrize( From 3ee1cd670c03b66a5ca68f627fb2320944712c68 Mon Sep 17 00:00:00 2001 From: "Justin \"J.R.\" Hill" <justin@so.dang.cool> Date: Tue, 3 Sep 2024 11:41:07 -0700 Subject: [PATCH 0489/1511] Add flake8-no-implicit-concat (#7731) (#9006) (cherry picked from commit 1d170d37f476df705a9dcc3588e192e8ccb871c0) --- .pre-commit-config.yaml | 1 + CHANGES/7731.misc.rst | 1 + aiohttp/client.py | 6 +-- aiohttp/client_reqrep.py | 12 +++-- aiohttp/connector.py | 6 +-- aiohttp/cookiejar.py | 2 +- aiohttp/formdata.py | 8 ++-- aiohttp/helpers.py | 10 ++--- aiohttp/http_websocket.py | 4 +- aiohttp/multipart.py | 4 +- aiohttp/streams.py | 2 +- aiohttp/test_utils.py | 2 +- aiohttp/web.py | 2 +- aiohttp/web_app.py | 6 +-- aiohttp/web_request.py | 4 +- aiohttp/web_response.py | 18 ++++---- aiohttp/web_routedef.py | 4 +- aiohttp/web_runner.py | 2 +- aiohttp/web_urldispatcher.py | 8 ++-- setup.cfg | 9 +++- tests/test_client_exceptions.py | 6 +-- tests/test_client_functional.py | 10 ++--- tests/test_client_proto.py | 2 +- tests/test_helpers.py | 3 +- tests/test_http_exceptions.py | 2 +- tests/test_http_parser.py | 62 +++++++++++++------------- tests/test_http_writer.py | 10 ++--- tests/test_multipart.py | 8 ++-- tests/test_multipart_helpers.py | 8 ++-- tests/test_urldispatch.py | 6 +-- tests/test_web_cli.py | 4 +- tests/test_web_functional.py | 8 +--- tests/test_web_request.py | 4 +- tests/test_web_websocket_functional.py | 2 +- tests/test_websocket_parser.py | 2 +- 35 files changed, 118 insertions(+), 130 deletions(-) create mode 100644 CHANGES/7731.misc.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dc3e65cf52f..0edf03d8db7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -107,6 +107,7 @@ repos: - id: flake8 additional_dependencies: - flake8-docstrings==1.6.0 + - flake8-no-implicit-concat==0.3.4 - flake8-requirements==1.7.8 exclude: "^docs/" - repo: https://github.com/Lucas-C/pre-commit-hooks-markup diff --git a/CHANGES/7731.misc.rst b/CHANGES/7731.misc.rst new file mode 100644 index 00000000000..f46ffa5816b --- /dev/null +++ b/CHANGES/7731.misc.rst @@ -0,0 +1 @@ +Added flake8 settings to avoid some forms of implicit concatenation. -- by :user:`booniepepper`. diff --git a/aiohttp/client.py b/aiohttp/client.py index f3c60d31f08..3c4a0f97c04 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -310,7 +310,7 @@ def __init__( self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: warnings.warn( - "read_timeout is deprecated, " "use timeout argument instead", + "read_timeout is deprecated, use timeout argument instead", DeprecationWarning, stacklevel=2, ) @@ -318,7 +318,7 @@ def __init__( if conn_timeout is not None: self._timeout = attr.evolve(self._timeout, connect=conn_timeout) warnings.warn( - "conn_timeout is deprecated, " "use timeout argument instead", + "conn_timeout is deprecated, use timeout argument instead", DeprecationWarning, stacklevel=2, ) @@ -1255,7 +1255,7 @@ def requote_redirect_url(self) -> bool: def requote_redirect_url(self, val: bool) -> None: """Do URL requoting on redirection handling.""" warnings.warn( - "session.requote_redirect_url modification " "is deprecated #2778", + "session.requote_redirect_url modification is deprecated #2778", DeprecationWarning, stacklevel=2, ) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d2c5f16df2b..d7d5f63ec18 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -127,9 +127,7 @@ def __init__(self, fingerprint: bytes) -> None: if not hashfunc: raise ValueError("fingerprint has invalid length") elif hashfunc is md5 or hashfunc is sha1: - raise ValueError( - "md5 and sha1 are insecure and " "not supported. Use sha256." - ) + raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.") self._hashfunc = hashfunc self._fingerprint = fingerprint @@ -190,7 +188,7 @@ def _merge_ssl_params( ssl = ssl_context if fingerprint is not None: warnings.warn( - "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", + "fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead", DeprecationWarning, stacklevel=3, ) @@ -505,7 +503,7 @@ def update_content_encoding(self, data: Any) -> None: if enc: if self.compress: raise ValueError( - "compress can not be set " "if Content-Encoding header is set" + "compress can not be set if Content-Encoding header is set" ) elif self.compress: if not isinstance(self.compress, str): @@ -527,7 +525,7 @@ def update_transfer_encoding(self) -> None: elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( - "chunked can not be set " "if Content-Length header is set" + "chunked can not be set if Content-Length header is set" ) self.headers[hdrs.TRANSFER_ENCODING] = "chunked" @@ -1205,7 +1203,7 @@ async def json( self.history, status=self.status, message=( - "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype + "Attempt to decode JSON with unexpected mimetype: %s" % ctype ), headers=self.headers, ) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 93c78c62b08..360eabc7bb2 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -243,7 +243,7 @@ def __init__( if force_close: if keepalive_timeout is not None and keepalive_timeout is not sentinel: raise ValueError( - "keepalive_timeout cannot " "be set if force_close is True" + "keepalive_timeout cannot be set if force_close is True" ) else: if keepalive_timeout is sentinel: @@ -853,7 +853,7 @@ def clear_dns_cache( if host is not None and port is not None: self._cached_hosts.remove((host, port)) elif host is not None or port is not None: - raise ValueError("either both host and port " "or none of them are allowed") + raise ValueError("either both host and port or none of them are allowed") else: self._cached_hosts.clear() @@ -1575,7 +1575,7 @@ def __init__( self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] ): raise RuntimeError( - "Named Pipes only available in proactor " "loop under windows" + "Named Pipes only available in proactor loop under windows" ) self._path = path diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index e3eefc9c656..c57604b5e59 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -54,7 +54,7 @@ class CookieJar(AbstractCookieJar): DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") DATE_MONTH_RE = re.compile( - "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)", re.I, ) diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index 2b75b3de72c..39ca8539acc 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -64,9 +64,7 @@ def add_field( type_options: MultiDict[str] = MultiDict({"name": name}) if filename is not None and not isinstance(filename, str): - raise TypeError( - "filename must be an instance of str. " "Got: %s" % filename - ) + raise TypeError("filename must be an instance of str. Got: %s" % filename) if filename is None and isinstance(value, io.IOBase): filename = guess_filename(value, name) if filename is not None: @@ -77,7 +75,7 @@ def add_field( if content_type is not None: if not isinstance(content_type, str): raise TypeError( - "content_type must be an instance of str. " "Got: %s" % content_type + "content_type must be an instance of str. Got: %s" % content_type ) headers[hdrs.CONTENT_TYPE] = content_type self._is_multipart = True @@ -131,7 +129,7 @@ def _gen_form_urlencoded(self) -> payload.BytesPayload: if charset == "utf-8": content_type = "application/x-www-form-urlencoded" else: - content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset + content_type = "application/x-www-form-urlencoded; charset=%s" % charset return payload.BytesPayload( urlencode(data, doseq=True, encoding=charset).encode(), diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 0327d31d961..bf9e135bb3c 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -393,16 +393,14 @@ def content_disposition_header( params is a dict with disposition params. """ if not disptype or not (TOKEN > set(disptype)): - raise ValueError("bad content disposition type {!r}" "".format(disptype)) + raise ValueError(f"bad content disposition type {disptype!r}") value = disptype if params: lparams = [] for key, val in params.items(): if not key or not (TOKEN > set(key)): - raise ValueError( - "bad content disposition parameter" " {!r}={!r}".format(key, val) - ) + raise ValueError(f"bad content disposition parameter {key!r}={val!r}") if quote_fields: if key.lower() == "filename": qval = quote(val, "", encoding=_charset) @@ -690,9 +688,7 @@ def __enter__(self) -> BaseTimerContext: task = asyncio.current_task(loop=self._loop) if task is None: - raise RuntimeError( - "Timeout context manager should be used " "inside a task" - ) + raise RuntimeError("Timeout context manager should be used inside a task") if self._cancelled: raise asyncio.TimeoutError from None diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 2ea2c9191e1..9d03d2773c7 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -267,7 +267,7 @@ def ws_ext_gen( # compress wbit 8 does not support in zlib if compress < 9 or compress > 15: raise ValueError( - "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" + "Compress wbits must between 9 and 15, zlib does not support wbits=8" ) enabledext = ["permessage-deflate"] if not isserver: @@ -512,7 +512,7 @@ def parse_frame( if opcode > 0x7 and length > 125: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", + "Control frame payload cannot be larger than 125 bytes", ) # Set compress status if last package is FIN diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 965e4f279d3..e0bcce07449 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -530,9 +530,7 @@ def _decode_content_transfer(self, data: bytes) -> bytes: elif encoding in ("binary", "8bit", "7bit"): return data else: - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(encoding) - ) + raise RuntimeError(f"unknown content transfer encoding: {encoding}") def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" diff --git a/aiohttp/streams.py b/aiohttp/streams.py index c927cfbb1b3..1ed78ce5db0 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -261,7 +261,7 @@ def begin_http_chunk_receiving(self) -> None: if self._http_chunk_splits is None: if self.total_bytes: raise RuntimeError( - "Called begin_http_chunk_receiving when" "some data was already fed" + "Called begin_http_chunk_receiving when some data was already fed" ) self._http_chunk_splits = [] diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 328561fb6a7..13b6f4d9c50 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -266,7 +266,7 @@ def __init__( ) -> None: if not isinstance(server, BaseTestServer): raise TypeError( - "server must be TestServer " "instance, found type: %r" % type(server) + "server must be TestServer instance, found type: %r" % type(server) ) self._server = server self._loop = loop diff --git a/aiohttp/web.py b/aiohttp/web.py index 88bf14bf828..1d18691f401 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -581,7 +581,7 @@ def main(argv: List[str]) -> None: # Compatibility logic if args.path is not None and not hasattr(socket, "AF_UNIX"): arg_parser.error( - "file system paths not supported by your operating" " environment" + "file system paths not supported by your operating environment" ) logging.basicConfig(level=logging.DEBUG) diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 3510bffda60..8403bbbc826 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -195,7 +195,7 @@ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: def _check_frozen(self) -> None: if self._frozen: warnings.warn( - "Changing state of started or joined " "application is deprecated", + "Changing state of started or joined application is deprecated", DeprecationWarning, stacklevel=3, ) @@ -433,7 +433,7 @@ def make_handler( ) -> Server: warnings.warn( - "Application.make_handler(...) is deprecated, " "use AppRunner API instead", + "Application.make_handler(...) is deprecated, use AppRunner API instead", DeprecationWarning, stacklevel=2, ) @@ -492,7 +492,7 @@ def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: yield m, True else: warnings.warn( - 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), + f'old-style middleware "{m!r}" deprecated, see #2252', DeprecationWarning, stacklevel=2, ) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 1d94c576794..2465e6655ad 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -226,7 +226,7 @@ def clone( will reuse the one from the current request object. """ if self._read_bytes: - raise RuntimeError("Cannot clone request " "after reading its content") + raise RuntimeError("Cannot clone request after reading its content") dct: Dict[str, Any] = {} if method is not sentinel: @@ -773,7 +773,7 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": ) else: raise ValueError( - "To decode nested multipart you need " "to use custom reader", + "To decode nested multipart you need to use custom reader", ) field = await multipart.next() diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 0020afd46c8..24ea9f5b46b 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -134,9 +134,9 @@ def set_status( status: int, reason: Optional[str] = None, ) -> None: - assert not self.prepared, ( - "Cannot change the response status code after " "the headers have been sent" - ) + assert ( + not self.prepared + ), "Cannot change the response status code after the headers have been sent" self._status = int(status) if reason is None: try: @@ -168,7 +168,7 @@ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError( - "You can't enable chunked encoding when " "a content length is set" + "You can't enable chunked encoding when a content length is set" ) if chunk_size is not None: warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) @@ -184,9 +184,9 @@ def enable_compression( "Using boolean for force is deprecated #3318", DeprecationWarning ) elif force is not None: - assert isinstance(force, ContentCoding), ( - "force should one of " "None, bool or " "ContentEncoding" - ) + assert isinstance( + force, ContentCoding + ), "force should one of None, bool or ContentEncoding" self._compression = True self._compression_force = force @@ -289,7 +289,7 @@ def content_length(self, value: Optional[int]) -> None: value = int(value) if self._chunked: raise RuntimeError( - "You can't set content length when " "chunked encoding is enable" + "You can't set content length when chunked encoding is enable" ) self._headers[hdrs.CONTENT_LENGTH] = str(value) else: @@ -611,7 +611,7 @@ def __init__( real_headers = headers # = cast('CIMultiDict[str]', headers) if content_type is not None and "charset" in content_type: - raise ValueError("charset must not be in content_type " "argument") + raise ValueError("charset must not be in content_type argument") if text is not None: if hdrs.CONTENT_TYPE in real_headers: diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index 93802141c56..f51b6cd0081 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -66,7 +66,7 @@ def __repr__(self) -> str: info = [] for name, value in sorted(self.kwargs.items()): info.append(f", {name}={value!r}") - return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format( + return "<RouteDef {method} {path} -> {handler.__name__!r}{info}>".format( method=self.method, path=self.path, handler=self.handler, info="".join(info) ) @@ -90,7 +90,7 @@ def __repr__(self) -> str: info = [] for name, value in sorted(self.kwargs.items()): info.append(f", {name}={value!r}") - return "<StaticDef {prefix} -> {path}" "{info}>".format( + return "<StaticDef {prefix} -> {path}{info}>".format( prefix=self.prefix, path=self.path, info="".join(info) ) diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 0a237ede2c5..f8933383435 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -176,7 +176,7 @@ def __init__( loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] ): raise RuntimeError( - "Named Pipes only available in proactor" "loop under windows" + "Named Pipes only available in proactor loop under windows" ) super().__init__(runner, shutdown_timeout=shutdown_timeout) self._path = path diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index a1df64b8e61..765f8500c0e 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -194,14 +194,14 @@ def __init__( pass elif inspect.isgeneratorfunction(handler): warnings.warn( - "Bare generators are deprecated, " "use @coroutine wrapper", + "Bare generators are deprecated, use @coroutine wrapper", DeprecationWarning, ) elif isinstance(handler, type) and issubclass(handler, AbstractView): pass else: warnings.warn( - "Bare functions are deprecated, " "use async ones", DeprecationWarning + "Bare functions are deprecated, use async ones", DeprecationWarning ) @wraps(handler) @@ -777,7 +777,7 @@ def _add_prefix_to_resources(self, prefix: str) -> None: router.index_resource(resource) def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not supported " "by sub-application root") + raise RuntimeError(".url_for() is not supported by sub-application root") def get_info(self) -> _InfoDict: return {"app": self._app, "prefix": self._prefix} @@ -900,7 +900,7 @@ async def resolve(self, request: Request) -> _Resolve: return match_info, methods def __repr__(self) -> str: - return "<MatchedSubAppResource -> {app!r}>" "".format(app=self._app) + return f"<MatchedSubAppResource -> {self._app!r}>" class ResourceRoute(AbstractRoute): diff --git a/setup.cfg b/setup.cfg index c058fc2f05f..cd1602880e6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -86,9 +86,14 @@ max-line-length=79 zip_ok = false [flake8] -extend-select = B950 +extend-select = + B950, + # NIC001 -- "Implicitly concatenated str literals on one line" + NIC001, + # NIC101 -- "Implicitly concatenated bytes literals on one line" + NIC101, # TODO: don't disable D*, fix up issues instead -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 +ignore = N801,N802,N803,NIC002,NIC102,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index 85e71a3508b..f14cfefde51 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -83,9 +83,7 @@ def test_str(self) -> None: message="Something wrong", headers=CIMultiDict(), ) - assert str(err) == ( - "400, message='Something wrong', " "url='http://example.com'" - ) + assert str(err) == ("400, message='Something wrong', url='http://example.com'") def test_response_status() -> None: @@ -254,7 +252,7 @@ def test_pickle(self) -> None: def test_repr(self) -> None: err = client.ServerDisconnectedError() - assert repr(err) == ("ServerDisconnectedError" "('Server disconnected')") + assert repr(err) == ("ServerDisconnectedError('Server disconnected')") err = client.ServerDisconnectedError(message="No connection") assert repr(err) == "ServerDisconnectedError('No connection')" diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 95f73d84ec0..74c4d99765e 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2361,7 +2361,7 @@ async def handler(request): ret.set_cookie("c2", "cookie2") ret.headers.add( "Set-Cookie", - "c3=cookie3; " "HttpOnly; Path=/" " Expires=Tue, 1 Jan 1980 12:00:00 GMT; ", + "c3=cookie3; HttpOnly; Path=/ Expires=Tue, 1 Jan 1980 12:00:00 GMT; ", ) return ret @@ -2380,7 +2380,7 @@ async def handler(request): ret = web.Response() ret.set_cookie("c1", "cookie1") ret.set_cookie("c2", "cookie2") - ret.headers.add("Set-Cookie", "c3=cookie3; " "HttpOnly; Path=/" " Max-Age=1; ") + ret.headers.add("Set-Cookie", "c3=cookie3; HttpOnly; Path=/ Max-Age=1; ") return ret app = web.Application() @@ -2401,7 +2401,7 @@ async def handler(request): ret = web.Response() ret.headers.add( "Set-Cookie", - "overflow=overflow; " "HttpOnly; Path=/" " Max-Age=" + str(overflow) + "; ", + "overflow=overflow; HttpOnly; Path=/ Max-Age=" + str(overflow) + "; ", ) return ret @@ -3379,9 +3379,7 @@ def connection_made(self, transport): def data_received(self, data): self.data += data if data.endswith(b"\r\n\r\n"): - self.transp.write( - b"HTTP/1.1 200 OK\r\n" b"CONTENT-LENGTH: 2\r\n" b"\r\n" b"ok" - ) + self.transp.write(b"HTTP/1.1 200 OK\r\nCONTENT-LENGTH: 2\r\n\r\nok") self.transp.close() def connection_lost(self, exc): diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index d8ffac0059c..ba45d6a6839 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -50,7 +50,7 @@ async def test_uncompleted_message(loop) -> None: proto.set_response_params(read_until_eof=True) proto.data_received( - b"HTTP/1.1 301 Moved Permanently\r\n" b"Location: http://python.org/" + b"HTTP/1.1 301 Moved Permanently\r\nLocation: http://python.org/" ) proto.connection_lost(None) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 827a417c299..656364f43aa 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -325,7 +325,8 @@ def test_ipv6_addresses() -> None: def test_host_addresses() -> None: hosts = [ - "www.four.part.host" "www.python.org", + "www.four.part.host", + "www.python.org", "foo.bar", "localhost", ] diff --git a/tests/test_http_exceptions.py b/tests/test_http_exceptions.py index 24944d9fc4e..cd3b08f59db 100644 --- a/tests/test_http_exceptions.py +++ b/tests/test_http_exceptions.py @@ -81,7 +81,7 @@ def test_pickle(self) -> None: pickled = pickle.dumps(err, proto) err2 = pickle.loads(pickled) assert err2.code == 400 - assert err2.message == ("Got more than 10 bytes (12) " "when reading spam.") + assert err2.message == ("Got more than 10 bytes (12) when reading spam.") assert err2.headers is None assert err2.foo == "bar" diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 78abe528cb0..75276df1a07 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -443,49 +443,49 @@ def test_conn_default_1_1(parser) -> None: def test_conn_close(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"connection: close\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nconnection: close\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_close_1_0(parser) -> None: - text = b"GET /test HTTP/1.0\r\n" b"connection: close\r\n\r\n" + text = b"GET /test HTTP/1.0\r\nconnection: close\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_keep_alive_1_0(parser) -> None: - text = b"GET /test HTTP/1.0\r\n" b"connection: keep-alive\r\n\r\n" + text = b"GET /test HTTP/1.0\r\nconnection: keep-alive\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_keep_alive_1_1(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"connection: keep-alive\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nconnection: keep-alive\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_other_1_0(parser) -> None: - text = b"GET /test HTTP/1.0\r\n" b"connection: test\r\n\r\n" + text = b"GET /test HTTP/1.0\r\nconnection: test\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_other_1_1(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"connection: test\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nconnection: test\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_request_chunked(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert msg.chunked @@ -507,7 +507,7 @@ def test_request_te_chunked_with_content_length(parser: Any) -> None: def test_request_te_chunked123(parser: Any) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked123\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked123\r\n\r\n" with pytest.raises( http_exceptions.BadHttpMessage, match="Request has invalid `Transfer-Encoding`", @@ -555,21 +555,21 @@ def test_bad_upgrade(parser) -> None: def test_compression_empty(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: \r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: \r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_compression_deflate(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: deflate\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: deflate\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "deflate" def test_compression_gzip(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: gzip\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: gzip\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "gzip" @@ -577,21 +577,21 @@ def test_compression_gzip(parser) -> None: @pytest.mark.skipif(brotli is None, reason="brotli is not installed") def test_compression_brotli(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: br\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: br\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "br" def test_compression_unknown(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: compress\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_url_connect(parser: Any) -> None: - text = b"CONNECT www.google.com HTTP/1.1\r\n" b"content-length: 0\r\n\r\n" + text = b"CONNECT www.google.com HTTP/1.1\r\ncontent-length: 0\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert upgrade @@ -599,7 +599,7 @@ def test_url_connect(parser: Any) -> None: def test_headers_connect(parser: Any) -> None: - text = b"CONNECT www.google.com HTTP/1.1\r\n" b"content-length: 0\r\n\r\n" + text = b"CONNECT www.google.com HTTP/1.1\r\ncontent-length: 0\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert upgrade @@ -619,21 +619,21 @@ def test_url_absolute(parser: Any) -> None: def test_headers_old_websocket_key1(parser: Any) -> None: - text = b"GET /test HTTP/1.1\r\n" b"SEC-WEBSOCKET-KEY1: line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nSEC-WEBSOCKET-KEY1: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_1(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-length: line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-length: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_2(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-length: -1\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-length: -1\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) @@ -656,7 +656,7 @@ def test_headers_content_length_err_2(parser) -> None: @pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) @pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: - text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) + text = b"GET /test HTTP/1.1\r\n%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) expectation = pytest.raises(http_exceptions.BadHttpMessage) if pad1 == pad2 == b"" and hdr != b"": # one entry in param matrix is correct: non-empty name, not padded @@ -666,19 +666,19 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> def test_empty_header_name(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" + text = b"GET /test HTTP/1.1\r\n:test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_header(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"test line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntest line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_name(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"test[]: line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntest[]: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) @@ -715,7 +715,7 @@ def test_max_header_field_size_under_limit(parser) -> None: @pytest.mark.parametrize("size", [40960, 8191]) def test_max_header_value_size(parser, size) -> None: name = b"t" * size - text = b"GET /test HTTP/1.1\r\n" b"data:" + name + b"\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ndata:" + name + b"\r\n\r\n" match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): @@ -724,7 +724,7 @@ def test_max_header_value_size(parser, size) -> None: def test_max_header_value_size_under_limit(parser) -> None: value = b"A" * 8190 - text = b"GET /test HTTP/1.1\r\n" b"data:" + value + b"\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ndata:" + value + b"\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -1216,7 +1216,7 @@ def test_http_response_parser_code_not_ascii(response, nonascii_digit: bytes) -> def test_http_request_chunked_payload(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] assert msg.chunked @@ -1231,7 +1231,7 @@ def test_http_request_chunked_payload(parser) -> None: def test_http_request_chunked_payload_and_next_message(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] messages, upgraded, tail = parser.feed_data( @@ -1253,7 +1253,7 @@ def test_http_request_chunked_payload_and_next_message(parser) -> None: def test_http_request_chunked_payload_chunks(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] parser.feed_data(b"4\r\ndata\r") @@ -1275,7 +1275,7 @@ def test_http_request_chunked_payload_chunks(parser) -> None: def test_parse_chunked_payload_chunk_extension(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] parser.feed_data(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n") @@ -1295,14 +1295,14 @@ def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: An def test_parse_payload_response_without_body(loop, protocol, response_cls) -> None: parser = response_cls(protocol, loop, 2**16, response_with_body=False) - text = b"HTTP/1.1 200 Ok\r\n" b"content-length: 10\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ncontent-length: 10\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_parse_length_payload(response) -> None: - text = b"HTTP/1.1 200 Ok\r\n" b"content-length: 4\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ncontent-length: 4\r\n\r\n" msg, payload = response.feed_data(text)[0][0] assert not payload.is_eof() @@ -1627,7 +1627,7 @@ async def test_parse_chunked_payload_split_end_trailers3(self, protocol) -> None async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) - p.feed_data(b"4\r\nasdf\r\n0\r\n" b"C") + p.feed_data(b"4\r\nasdf\r\n0\r\nC") p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n") assert out.is_eof() diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 5649f32f792..db50ad65f67 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -108,7 +108,7 @@ async def test_write_payload_chunked_filter_mutiple_chunks(protocol, transport, await msg.write_eof() content = b"".join([c[1][0] for c in list(write.mock_calls)]) assert content.endswith( - b"2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n" b"2\r\na2\r\n0\r\n\r\n" + b"2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n2\r\na2\r\n0\r\n\r\n" ) @@ -136,7 +136,7 @@ async def test_write_payload_deflate_and_chunked(buf, protocol, transport, loop) await msg.write(b"ta") await msg.write_eof() - thing = b"2\r\nx\x9c\r\n" b"a\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n" b"0\r\n\r\n" + thing = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n" assert thing == buf @@ -163,8 +163,8 @@ async def test_write_payload_short_ints_memoryview(buf, protocol, transport, loo await msg.write_eof() endians = ( - (b"6\r\n" b"\x00A\x00B\x00C\r\n" b"0\r\n\r\n"), - (b"6\r\n" b"A\x00B\x00C\x00\r\n" b"0\r\n\r\n"), + (b"6\r\n\x00A\x00B\x00C\r\n0\r\n\r\n"), + (b"6\r\nA\x00B\x00C\x00\r\n0\r\n\r\n"), ) assert buf in endians @@ -179,7 +179,7 @@ async def test_write_payload_2d_shape_memoryview(buf, protocol, transport, loop) await msg.write(payload) await msg.write_eof() - thing = b"6\r\n" b"ABCDEF\r\n" b"0\r\n\r\n" + thing = b"6\r\nABCDEF\r\n0\r\n\r\n" assert thing == buf diff --git a/tests/test_multipart.py b/tests/test_multipart.py index bbbc1c666ca..8576998962e 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -463,7 +463,7 @@ async def test_read_text_guess_encoding(self) -> None: assert data == result async def test_read_text_compressed(self) -> None: - data = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00" b"%s--:--" % newline + data = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00%s--:--" % newline with Stream(data) as stream: obj = aiohttp.BodyPartReader( BOUNDARY, @@ -515,9 +515,7 @@ async def test_read_json_guess_encoding(self) -> None: assert {"тест": "пассед"} == result async def test_read_json_compressed(self) -> None: - with Stream( - b"\xabV*I-.Q\xb2RP*H,.NMQ\xaa\x05\x00" b"%s--:--" % newline - ) as stream: + with Stream(b"\xabV*I-.Q\xb2RP*H,.NMQ\xaa\x05\x00%s--:--" % newline) as stream: obj = aiohttp.BodyPartReader( BOUNDARY, {CONTENT_ENCODING: "deflate", CONTENT_TYPE: "application/json"}, @@ -712,7 +710,7 @@ def test_dispatch_multipart(self) -> None: b"----:--", b"", b"passed", - b"----:----" b"--:--", + b"----:------:--", ] ) ) as stream: diff --git a/tests/test_multipart_helpers.py b/tests/test_multipart_helpers.py index 9516751cba9..d4fb610a22c 100644 --- a/tests/test_multipart_helpers.py +++ b/tests/test_multipart_helpers.py @@ -555,10 +555,10 @@ def test_attfncontqs(self) -> None: def test_attfncontenc(self) -> None: disptype, params = parse_content_disposition( - "attachment; filename*0*=UTF-8" 'foo-%c3%a4; filename*1=".html"' + "attachment; filename*0*=UTF-8" + 'foo-%c3%a4; filename*1=".html"' ) assert "attachment" == disptype - assert {"filename*0*": "UTF-8" "foo-%c3%a4", "filename*1": ".html"} == params + assert {"filename*0*": "UTF-8foo-%c3%a4", "filename*1": ".html"} == params def test_attfncontlz(self) -> None: disptype, params = parse_content_disposition( @@ -590,14 +590,14 @@ def test_attfncontord(self) -> None: def test_attfnboth(self) -> None: disptype, params = parse_content_disposition( - 'attachment; filename="foo-ae.html";' " filename*=UTF-8''foo-%c3%a4.html" + 'attachment; filename="foo-ae.html";' + " filename*=UTF-8''foo-%c3%a4.html" ) assert "attachment" == disptype assert {"filename": "foo-ae.html", "filename*": "foo-ä.html"} == params def test_attfnboth2(self) -> None: disptype, params = parse_content_disposition( - "attachment; filename*=UTF-8''foo-%c3%a4.html;" ' filename="foo-ae.html"' + "attachment; filename*=UTF-8''foo-%c3%a4.html;" + ' filename="foo-ae.html"' ) assert "attachment" == disptype assert {"filename": "foo-ae.html", "filename*": "foo-ä.html"} == params diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index f06f73edc21..d0efa91593e 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -368,7 +368,7 @@ def test_add_static_append_version(router) -> None: resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="/data.unknown_mime_type", append_version=True) expect_url = ( - "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" + "/st/data.unknown_mime_type?v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" ) assert expect_url == str(url) @@ -379,7 +379,7 @@ def test_add_static_append_version_set_from_constructor(router) -> None: ) url = resource.url_for(filename="/data.unknown_mime_type") expect_url = ( - "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" + "/st/data.unknown_mime_type?v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" ) assert expect_url == str(url) @@ -397,7 +397,7 @@ def test_add_static_append_version_filename_without_slash(router) -> None: resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="data.unknown_mime_type", append_version=True) expect_url = ( - "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" + "/st/data.unknown_mime_type?v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" ) assert expect_url == str(url) diff --git a/tests/test_web_cli.py b/tests/test_web_cli.py index 12a01dff577..381aaf6cd82 100644 --- a/tests/test_web_cli.py +++ b/tests/test_web_cli.py @@ -90,7 +90,7 @@ def test_path_when_unsupported(mocker, monkeypatch) -> None: web.main(argv) error.assert_called_with( - "file system paths not supported by your" " operating environment" + "file system paths not supported by your operating environment" ) @@ -107,7 +107,7 @@ def test_entry_func_call(mocker) -> None: web.main(argv) module.func.assert_called_with( - ("--extra-optional-eins --extra-optional-zwei extra positional " "args").split() + ("--extra-optional-eins --extra-optional-zwei extra positional args").split() ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 6f612ffc011..ad9e7c288fc 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -1651,9 +1651,7 @@ async def handler(request): resp = await client.post("/", data=data) assert 413 == resp.status resp_text = await resp.text() - assert ( - "Maximum request body size 1048576 exceeded, " "actual body size" in resp_text - ) + assert "Maximum request body size 1048576 exceeded, actual body size" in resp_text # Maximum request body size X exceeded, actual body size X body_size = int(resp_text.split()[-1]) assert body_size >= max_size @@ -1685,9 +1683,7 @@ async def handler(request): resp = await client.post("/", data=too_large_data) assert 413 == resp.status resp_text = await resp.text() - assert ( - "Maximum request body size 2097152 exceeded, " "actual body size" in resp_text - ) + assert "Maximum request body size 2097152 exceeded, actual body size" in resp_text # Maximum request body size X exceeded, actual body size X body_size = int(resp_text.split()[-1]) assert body_size >= custom_max_size diff --git a/tests/test_web_request.py b/tests/test_web_request.py index ba12d6f54e7..ff22e19d5b4 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -628,7 +628,7 @@ async def test_multipart_formdata(protocol) -> None: b"-----------------------------326931944431359--\r\n" ) content_type = ( - "multipart/form-data; boundary=" "---------------------------326931944431359" + "multipart/form-data; boundary=---------------------------326931944431359" ) payload.feed_eof() req = make_mocked_request( @@ -649,7 +649,7 @@ async def test_multipart_formdata_file(protocol) -> None: b"-----------------------------326931944431359--\r\n" ) content_type = ( - "multipart/form-data; boundary=" "---------------------------326931944431359" + "multipart/form-data; boundary=---------------------------326931944431359" ) payload.feed_eof() req = make_mocked_request( diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 2be54486ee9..0ebd41db502 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -273,7 +273,7 @@ async def handler(request): await asyncio.sleep(0.08) assert await aborted - assert elapsed < 0.25, "close() should have returned before " "at most 2x timeout." + assert elapsed < 0.25, "close() should have returned before at most 2x timeout." await ws.close() diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 3bdd8108e35..13b46803a76 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -382,7 +382,7 @@ def test_continuation_with_close_empty(out, parser) -> None: websocket_mask_data = b"some very long data for masking by websocket" websocket_mask_mask = b"1234" websocket_mask_masked = ( - b"B]^Q\x11DVFH\x12_[_U\x13PPFR\x14W]A\x14\\S@_X" b"\\T\x14SK\x13CTP@[RYV@" + b"B]^Q\x11DVFH\x12_[_U\x13PPFR\x14W]A\x14\\S@_X\\T\x14SK\x13CTP@[RYV@" ) From ff902d7977994f89138080a2e0603fcef45ca0ac Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 3 Sep 2024 19:41:23 +0100 Subject: [PATCH 0490/1511] Fix test_client_session_timeout_zero to not use internet connection (#9004) (#9007) (cherry picked from commit 3bbe1a5d9229f66a3f1a723d22e1c8bc0779230d) --- CHANGES/9004.packaging.rst | 1 + tests/conftest.py | 3 ++- tests/test_client_session.py | 31 +++++++++++++++++++++++++------ 3 files changed, 28 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9004.packaging.rst diff --git a/CHANGES/9004.packaging.rst b/CHANGES/9004.packaging.rst new file mode 100644 index 00000000000..f6b0f8ff2a3 --- /dev/null +++ b/CHANGES/9004.packaging.rst @@ -0,0 +1 @@ +Fixed ``test_client_session_timeout_zero`` to not require internet access -- by :user:`Dreamsorcerer`. diff --git a/tests/conftest.py b/tests/conftest.py index 1cb64b3a6f8..85fcac94138 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,7 @@ import pytest +from aiohttp.client_proto import ResponseHandler from aiohttp.http import WS_KEY from aiohttp.test_utils import loop_context @@ -174,7 +175,7 @@ def pipe_name(): @pytest.fixture def create_mocked_conn(loop: Any): def _proto_factory(conn_closing_result=None, **kwargs): - proto = mock.Mock(**kwargs) + proto = mock.create_autospec(ResponseHandler, **kwargs) proto.closed = loop.create_future() proto.closed.set_result(conn_closing_result) return proto diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 051c0aeba24..86f3a1b6c6e 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -20,6 +20,7 @@ from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG +from aiohttp.http import RawResponseMessage from aiohttp.test_utils import make_mocked_coro from aiohttp.tracing import Trace @@ -934,13 +935,31 @@ async def test_client_session_timeout_default_args(loop) -> None: await session1.close() -async def test_client_session_timeout_zero() -> None: +async def test_client_session_timeout_zero( + create_mocked_conn: Callable[[], ResponseHandler] +) -> None: + async def create_connection( + req: object, traces: object, timeout: object + ) -> ResponseHandler: + await asyncio.sleep(0.01) + conn = create_mocked_conn() + conn.connected = True # type: ignore[misc] + assert conn.transport is not None + conn.transport.is_closing.return_value = False # type: ignore[attr-defined] + msg = mock.create_autospec(RawResponseMessage, spec_set=True, code=200) + conn.read.return_value = (msg, mock.Mock()) # type: ignore[attr-defined] + return conn + timeout = client.ClientTimeout(total=10, connect=0, sock_connect=0, sock_read=0) - try: - async with ClientSession(timeout=timeout) as session: - await session.get("http://example.com") - except asyncio.TimeoutError: - pytest.fail("0 should disable timeout.") + async with ClientSession(timeout=timeout) as session: + with mock.patch.object( + session._connector, "_create_connection", create_connection + ): + try: + resp = await session.get("http://example.com") + except asyncio.TimeoutError: # pragma: no cover + pytest.fail("0 should disable timeout.") + resp.close() async def test_client_session_timeout_bad_argument() -> None: From 9b60e5984d8ea7c92fd43824d5be37956dded9d6 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 3 Sep 2024 19:41:33 +0100 Subject: [PATCH 0491/1511] Fix test_client_session_timeout_zero to not use internet connection (#9004) (#9008) (cherry picked from commit 3bbe1a5d9229f66a3f1a723d22e1c8bc0779230d) --- CHANGES/9004.packaging.rst | 1 + tests/conftest.py | 3 ++- tests/test_client_session.py | 31 +++++++++++++++++++++++++------ 3 files changed, 28 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9004.packaging.rst diff --git a/CHANGES/9004.packaging.rst b/CHANGES/9004.packaging.rst new file mode 100644 index 00000000000..f6b0f8ff2a3 --- /dev/null +++ b/CHANGES/9004.packaging.rst @@ -0,0 +1 @@ +Fixed ``test_client_session_timeout_zero`` to not require internet access -- by :user:`Dreamsorcerer`. diff --git a/tests/conftest.py b/tests/conftest.py index 1cb64b3a6f8..85fcac94138 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,7 @@ import pytest +from aiohttp.client_proto import ResponseHandler from aiohttp.http import WS_KEY from aiohttp.test_utils import loop_context @@ -174,7 +175,7 @@ def pipe_name(): @pytest.fixture def create_mocked_conn(loop: Any): def _proto_factory(conn_closing_result=None, **kwargs): - proto = mock.Mock(**kwargs) + proto = mock.create_autospec(ResponseHandler, **kwargs) proto.closed = loop.create_future() proto.closed.set_result(conn_closing_result) return proto diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 051c0aeba24..86f3a1b6c6e 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -20,6 +20,7 @@ from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG +from aiohttp.http import RawResponseMessage from aiohttp.test_utils import make_mocked_coro from aiohttp.tracing import Trace @@ -934,13 +935,31 @@ async def test_client_session_timeout_default_args(loop) -> None: await session1.close() -async def test_client_session_timeout_zero() -> None: +async def test_client_session_timeout_zero( + create_mocked_conn: Callable[[], ResponseHandler] +) -> None: + async def create_connection( + req: object, traces: object, timeout: object + ) -> ResponseHandler: + await asyncio.sleep(0.01) + conn = create_mocked_conn() + conn.connected = True # type: ignore[misc] + assert conn.transport is not None + conn.transport.is_closing.return_value = False # type: ignore[attr-defined] + msg = mock.create_autospec(RawResponseMessage, spec_set=True, code=200) + conn.read.return_value = (msg, mock.Mock()) # type: ignore[attr-defined] + return conn + timeout = client.ClientTimeout(total=10, connect=0, sock_connect=0, sock_read=0) - try: - async with ClientSession(timeout=timeout) as session: - await session.get("http://example.com") - except asyncio.TimeoutError: - pytest.fail("0 should disable timeout.") + async with ClientSession(timeout=timeout) as session: + with mock.patch.object( + session._connector, "_create_connection", create_connection + ): + try: + resp = await session.get("http://example.com") + except asyncio.TimeoutError: # pragma: no cover + pytest.fail("0 should disable timeout.") + resp.close() async def test_client_session_timeout_bad_argument() -> None: From ff9212b8c2c4eb28774ee83cf8eea8b750e5e8a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 10:46:31 +0000 Subject: [PATCH 0492/1511] Bump setuptools from 74.1.0 to 74.1.1 (#9011) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 74.1.0 to 74.1.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v74.1.1</h1> <h2>Bugfixes</h2> <ul> <li>Fixed TypeError in <code>msvc.EnvironmentInfo.return_env</code> when no runtime redistributables are installed. (<a href="https://redirect.github.com/pypa/setuptools/issues/1902">#1902</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/7ee29bd9ade19515c9521911c2d243817c021ee8"><code>7ee29bd</code></a> Bump version: 74.1.0 → 74.1.1</li> <li><a href="https://github.com/pypa/setuptools/commit/bf5d08c5994e0b4999f1062128e1fe74dd6fffb2"><code>bf5d08c</code></a> In msvc, use os.path namespace.</li> <li><a href="https://github.com/pypa/setuptools/commit/60d2d560193a11d89d5a641ab1d6ed40b2b43282"><code>60d2d56</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4628">#4628</a> from pypa/bugfix/1902-msvc-vcruntimeredist</li> <li><a href="https://github.com/pypa/setuptools/commit/13bd961b52a16c5baba67da0e6603e4f3f0126a5"><code>13bd961</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/1a810033bb65790af48ebdd2f1f8944d28400fb2"><code>1a81003</code></a> Add type annotation for VCRuntimeRedist and update the docstring to reflect t...</li> <li><a href="https://github.com/pypa/setuptools/commit/f4adb80536eec233ee4a017303b9298bc253573b"><code>f4adb80</code></a> In return_env, avoid checking <code>isfile</code>.</li> <li><a href="https://github.com/pypa/setuptools/commit/26292805c3e624fd771ffb7bb61f73c9ac5ce422"><code>2629280</code></a> Prefer generator expression in VCRuntimeRedist</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v74.1.0...v74.1.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=74.1.0&new-version=74.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 428cfff3d7f..fe25fac4393 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -296,7 +296,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.1.0 +setuptools==74.1.1 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index 55695f7eb64..b84fcf6ec74 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -288,7 +288,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.1.0 +setuptools==74.1.1 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4ef41521136..086fadafc7a 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.1.0 +setuptools==74.1.1 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 804cb6e129d..82dd26a5e5b 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.1.0 +setuptools==74.1.1 # via # blockdiag # incremental From 13f899e6ee1f14cef7a68cc8639774f1f75301fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 11:07:00 +0000 Subject: [PATCH 0493/1511] Bump cryptography from 43.0.0 to 43.0.1 (#9012) Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.0 to 43.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>43.0.1 - 2024-09-03</p> <pre><code> * Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.3.2. <p>.. _v43-0-0:<br /> </code></pre></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/a7733878281ca261c4ada04022fc706ba5de9d8b"><code>a773387</code></a> bump for 43.0.1 (<a href="https://redirect.github.com/pyca/cryptography/issues/11533">#11533</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/0393fef5758e55e3c7b3a3e6e5b77821c594a87f"><code>0393fef</code></a> Backport setuptools version ban (<a href="https://redirect.github.com/pyca/cryptography/issues/11526">#11526</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/6687bab97aef31d6ee6cc94ecc87a972137b5d4a"><code>6687bab</code></a> Bump openssl from 0.10.65 to 0.10.66 in /src/rust (<a href="https://redirect.github.com/pyca/cryptography/issues/11320">#11320</a>) (<a href="https://redirect.github.com/pyca/cryptography/issues/11324">#11324</a>)</li> <li>See full diff in <a href="https://github.com/pyca/cryptography/compare/43.0.0...43.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=43.0.0&new-version=43.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fe25fac4393..ce4a3459572 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -58,7 +58,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index b84fcf6ec74..5d8116c9d2f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -58,7 +58,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index 82efbf9a4ee..00015ec3b96 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -26,7 +26,7 @@ click==8.1.7 # via # slotscheck # typer -cryptography==43.0.0 +cryptography==43.0.1 # via trustme distlib==0.3.8 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index a2bfc72a0a9..6f34e0b9bf3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -34,7 +34,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.0 +cryptography==43.0.1 # via trustme exceptiongroup==1.2.2 # via pytest From 37ca369453a0f4badee85c5b8476f5a22db2d452 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 14:03:36 +0100 Subject: [PATCH 0494/1511] [PR #8998/875f23d5 backport][3.11] Fix resource reuse with regex paths (#9017) **This is a backport of PR #8998 as merged into master (875f23d516220a1cf849eb4a6d758edb8bff1f55).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8998.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 3 ++- tests/test_web_urldispatcher.py | 5 +++-- 3 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8998.bugfix.rst diff --git a/CHANGES/8998.bugfix.rst b/CHANGES/8998.bugfix.rst new file mode 100644 index 00000000000..1b6b189e7ea --- /dev/null +++ b/CHANGES/8998.bugfix.rst @@ -0,0 +1 @@ +Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 765f8500c0e..7d74dd7ab16 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -456,6 +456,7 @@ class DynamicResource(Resource): def __init__(self, path: str, *, name: Optional[str] = None) -> None: super().__init__(name=name) + self._orig_path = path pattern = "" formatter = "" for part in ROUTE_RE.split(path): @@ -508,7 +509,7 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: } def raw_match(self, path: str) -> bool: - return self._formatter == path + return self._orig_path == path def get_info(self) -> _InfoDict: return {"formatter": self._formatter, "pattern": self._pattern} diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 8a97acf504d..7991cfe821e 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -703,10 +703,11 @@ async def handler(request: web.Request) -> web.Response: @pytest.mark.parametrize( "path", - [ + ( "/a", "/{a}", - ], + "/{a:.*}", + ), ) def test_reuse_last_added_resource(path: str) -> None: # Test that adding a route with the same name and path of the last added From f5bf86ac8429c4731f5d0783ea495e259c1eb19c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 14:03:49 +0100 Subject: [PATCH 0495/1511] [PR #8998/875f23d5 backport][3.10] Fix resource reuse with regex paths (#9016) **This is a backport of PR #8998 as merged into master (875f23d516220a1cf849eb4a6d758edb8bff1f55).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8998.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 3 ++- tests/test_web_urldispatcher.py | 5 +++-- 3 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8998.bugfix.rst diff --git a/CHANGES/8998.bugfix.rst b/CHANGES/8998.bugfix.rst new file mode 100644 index 00000000000..1b6b189e7ea --- /dev/null +++ b/CHANGES/8998.bugfix.rst @@ -0,0 +1 @@ +Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index a1df64b8e61..0b300e84da1 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -456,6 +456,7 @@ class DynamicResource(Resource): def __init__(self, path: str, *, name: Optional[str] = None) -> None: super().__init__(name=name) + self._orig_path = path pattern = "" formatter = "" for part in ROUTE_RE.split(path): @@ -508,7 +509,7 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: } def raw_match(self, path: str) -> bool: - return self._formatter == path + return self._orig_path == path def get_info(self) -> _InfoDict: return {"formatter": self._formatter, "pattern": self._pattern} diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 8a97acf504d..7991cfe821e 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -703,10 +703,11 @@ async def handler(request: web.Request) -> web.Response: @pytest.mark.parametrize( "path", - [ + ( "/a", "/{a}", - ], + "/{a:.*}", + ), ) def test_reuse_last_added_resource(path: str) -> None: # Test that adding a route with the same name and path of the last added From e62cc28161cc782065cb8434da93f647dbe13d41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 11:21:16 +0000 Subject: [PATCH 0496/1511] Bump cffi from 1.17.0 to 1.17.1 (#9024) Bumps [cffi](https://github.com/python-cffi/cffi) from 1.17.0 to 1.17.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python-cffi/cffi/releases">cffi's releases</a>.</em></p> <blockquote> <h2>v1.17.1</h2> <ul> <li>Fix failing <code>distutils.msvc9compiler</code> imports under Windows (<a href="https://redirect.github.com/python-cffi/cffi/issues/118">#118</a>).</li> <li><code>ffibuilder.emit_python_code()</code> and <code>ffibuiler.emit_c_code()</code> accept file-like objects (<a href="https://redirect.github.com/python-cffi/cffi/issues/115">#115</a>).</li> <li><code>ffiplatform</code> calls are bypassed by <code>ffibuilder.emit_python_code()</code> and <code>ffibuilder.emit_c_code()</code> (<a href="https://redirect.github.com/python-cffi/cffi/issues/81">#81</a>).</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/python-cffi/cffi/compare/v1.17.0...v1.17.1">https://github.com/python-cffi/cffi/compare/v1.17.0...v1.17.1</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python-cffi/cffi/commit/38bd6be6b94a65182f4bffb45c78e230e9290f51"><code>38bd6be</code></a> release 1.17.1</li> <li><a href="https://github.com/python-cffi/cffi/commit/ba101807be0f1b6a9a4e19b2cb036007591cbf10"><code>ba10180</code></a> update whatsnew.rst for 1.17.1 (<a href="https://redirect.github.com/python-cffi/cffi/issues/121">#121</a>)</li> <li><a href="https://github.com/python-cffi/cffi/commit/61deb5f4fec16a2763204655e9719308b645a95e"><code>61deb5f</code></a> add yet another flag to recompile() to avoid calling ffiplatform (<a href="https://redirect.github.com/python-cffi/cffi/issues/81">#81</a>)</li> <li><a href="https://github.com/python-cffi/cffi/commit/1c292c12d5e6595b6576ae0aa4935ea1e39552bf"><code>1c292c1</code></a> Handle distutils without distutils.msvc9compiler.MSVCCompiler class (<a href="https://redirect.github.com/python-cffi/cffi/issues/118">#118</a>)</li> <li><a href="https://github.com/python-cffi/cffi/commit/182ffc444e21b797ef8c2175a041dbc8ae92eb57"><code>182ffc4</code></a> Allow writing generated code to a file-like object. (<a href="https://redirect.github.com/python-cffi/cffi/issues/115">#115</a>)</li> <li>See full diff in <a href="https://github.com/python-cffi/cffi/compare/v1.17.0...v1.17.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cffi&package-manager=pip&previous-version=1.17.0&new-version=1.17.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 5876b5881a7..5c629050db9 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -16,7 +16,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.17.0 +cffi==1.17.1 # via pycares frozenlist==1.4.1 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ce4a3459572..7969d0936f0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -36,7 +36,7 @@ build==1.2.1 # via pip-tools certifi==2024.8.30 # via requests -cffi==1.17.0 +cffi==1.17.1 # via # cryptography # pycares diff --git a/requirements/dev.txt b/requirements/dev.txt index 5d8116c9d2f..da9c650ff25 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -36,7 +36,7 @@ build==1.2.1 # via pip-tools certifi==2024.8.30 # via requests -cffi==1.17.0 +cffi==1.17.1 # via # cryptography # pycares diff --git a/requirements/lint.txt b/requirements/lint.txt index 00015ec3b96..38347013947 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -14,7 +14,7 @@ async-timeout==4.0.3 # via aioredis certifi==2024.8.30 # via requests -cffi==1.17.0 +cffi==1.17.1 # via # cryptography # pycares diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 00927852825..be37aa14544 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -16,7 +16,7 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -cffi==1.17.0 +cffi==1.17.1 # via pycares frozenlist==1.4.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index 6f34e0b9bf3..db0bd943d21 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -20,7 +20,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in certifi==2024.8.30 # via requests -cffi==1.17.0 +cffi==1.17.1 # via # cryptography # pycares From c6fa90ace2032a72990b0ab227802f46be07112b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 11:28:07 +0000 Subject: [PATCH 0497/1511] Bump setuptools from 74.1.1 to 74.1.2 (#9025) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 74.1.1 to 74.1.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v74.1.2</h1> <h2>Bugfixes</h2> <ul> <li>Fixed TypeError in sdist filelist processing by adding support for pathlib Paths for the build_base. (<a href="https://redirect.github.com/pypa/setuptools/issues/4615">#4615</a>)</li> <li>Removed degraded and deprecated <code>test_integration</code> (easy_install) from the test suite. (<a href="https://redirect.github.com/pypa/setuptools/issues/4632">#4632</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/3b2ef1e72a704d7a2528408c190b39ed71560341"><code>3b2ef1e</code></a> Removed test_integration tests (for easy_install).</li> <li><a href="https://github.com/pypa/setuptools/commit/c484f9edb936049cf5958a7ce004b18a74158c13"><code>c484f9e</code></a> Bump version: 74.1.1 → 74.1.2</li> <li><a href="https://github.com/pypa/setuptools/commit/d8933c5be225137916cf459e0f34a148aa69fd84"><code>d8933c5</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4630">#4630</a> from pypa/bugfix/4615</li> <li><a href="https://github.com/pypa/setuptools/commit/a07de2b9364d5aa618c78c3ad60312963abfa7ba"><code>a07de2b</code></a> Skip test on stdlib distutils</li> <li><a href="https://github.com/pypa/setuptools/commit/6bf20d96aaeb3bce0d24a45198be4c9bf286a6b4"><code>6bf20d9</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/9d4b288a2643df4872036d06d6b14f933db8cebc"><code>9d4b288</code></a> Enable the test</li> <li><a href="https://github.com/pypa/setuptools/commit/40ee2212124595a4c036c995705f4e56d9345fca"><code>40ee221</code></a> Merge <a href="https://github.com/pypa/distutils">https://github.com/pypa/distutils</a> into bugfix/4615</li> <li><a href="https://github.com/pypa/setuptools/commit/d901698dc01e18b4ebdb04e9a65df98883f7108b"><code>d901698</code></a> Add test capturing missed expectation.</li> <li><a href="https://github.com/pypa/setuptools/commit/91bc99ac821731fc8b594d38c0b5500f8da0819f"><code>91bc99a</code></a> In sdist.prune_file_list, support build.build_base as a pathlib.Path.</li> <li><a href="https://github.com/pypa/setuptools/commit/971074d4a2f6e222cdbc43ce881586412c7ab8a1"><code>971074d</code></a> Disable TRY400 so it doesn't cause problems in other branches. Disable RUF100...</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v74.1.1...v74.1.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=74.1.1&new-version=74.1.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7969d0936f0..256c65bcb4d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -296,7 +296,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.1.1 +setuptools==74.1.2 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index da9c650ff25..e066e86e3a4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -288,7 +288,7 @@ zipp==3.20.1 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.1.1 +setuptools==74.1.2 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 086fadafc7a..a54c0f9224e 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.1.1 +setuptools==74.1.2 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 82dd26a5e5b..fd36d67bc1a 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.1 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.1.1 +setuptools==74.1.2 # via # blockdiag # incremental From 5eaf8cae369883d17052b1705b185c74911bf5a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 11:15:26 +0000 Subject: [PATCH 0498/1511] Bump python-on-whales from 0.72.0 to 0.73.0 (#9038) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.72.0 to 0.73.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.73.0</h2> <h2>What's Changed</h2> <ul> <li>:bug: Fix wrong links in <code>docker_client.md</code> by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/613">gabrieldemarmiesse/python-on-whales#613</a></li> <li>Add bootstrap cli flag for create/inspect by <a href="https://github.com/fizzgig1888"><code>@​fizzgig1888</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/600">gabrieldemarmiesse/python-on-whales#600</a></li> <li>Add support for 'podman container init' by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/615">gabrieldemarmiesse/python-on-whales#615</a></li> <li>Replace isort, black and flake8 with ruff by <a href="https://github.com/einarwar"><code>@​einarwar</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/617">gabrieldemarmiesse/python-on-whales#617</a></li> <li>Add Tuple[Union[str, int]] to ValidPortMapping by <a href="https://github.com/eclark0426"><code>@​eclark0426</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/619">gabrieldemarmiesse/python-on-whales#619</a></li> <li>Fix handling of 'podman pod create --share=...' by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/620">gabrieldemarmiesse/python-on-whales#620</a></li> <li>Allow for multiple env-files by <a href="https://github.com/einarwar"><code>@​einarwar</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/616">gabrieldemarmiesse/python-on-whales#616</a></li> <li>Fix "Render docs" CI job by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/628">gabrieldemarmiesse/python-on-whales#628</a></li> <li>Drop support for pydantic v1 by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/630">gabrieldemarmiesse/python-on-whales#630</a></li> <li>add auto/light/dark mode for docs by <a href="https://github.com/s-light"><code>@​s-light</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/625">gabrieldemarmiesse/python-on-whales#625</a></li> <li>Update pod component to use generic Iterable and Mapping types by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/599">gabrieldemarmiesse/python-on-whales#599</a></li> <li>Update container component to accept args as Iterable and Mapping by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/627">gabrieldemarmiesse/python-on-whales#627</a></li> <li>added missing docstrings for docker.network.inspect(), docker.network.list(), docker.network.prune() by <a href="https://github.com/stock90975"><code>@​stock90975</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/631">gabrieldemarmiesse/python-on-whales#631</a></li> <li>Add "network exists" command by <a href="https://github.com/eclark0426"><code>@​eclark0426</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/632">gabrieldemarmiesse/python-on-whales#632</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/fizzgig1888"><code>@​fizzgig1888</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/600">gabrieldemarmiesse/python-on-whales#600</a></li> <li><a href="https://github.com/eclark0426"><code>@​eclark0426</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/619">gabrieldemarmiesse/python-on-whales#619</a></li> <li><a href="https://github.com/s-light"><code>@​s-light</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/625">gabrieldemarmiesse/python-on-whales#625</a></li> <li><a href="https://github.com/stock90975"><code>@​stock90975</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/631">gabrieldemarmiesse/python-on-whales#631</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.72.0...v0.73.0">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.72.0...v0.73.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/86a5c8d1987de3514ceabc5b6ba3b894940581c6"><code>86a5c8d</code></a> Bump version to 0.73.0</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/4a518a5719d4d2e979538a6d232f42cce141ecef"><code>4a518a5</code></a> Add "network exists" command (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/632">#632</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/f58af9e0e43f3127b2f519b70a55ad0850024284"><code>f58af9e</code></a> Added missing docstrings for network functions (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/631">#631</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/6ac7b6f34a5dddee531eaadf9c11addd5dd45146"><code>6ac7b6f</code></a> Update container component to accept args as Iterable and Mapping (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/627">#627</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/7bd00cbca75ff11c99b32c64df893b13189717e5"><code>7bd00cb</code></a> Update pod component to use generic Iterable and Mapping types (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/599">#599</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/cfd6920692807bf5babc533b55a7de8f3e379f93"><code>cfd6920</code></a> add auto/light/dark mode for docs (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/625">#625</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/1a7f3eaa55e7446d472aaee1c05362a2132eafd3"><code>1a7f3ea</code></a> Drop support for pydantic v1 (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/630">#630</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/9da040b7fa7bd76831c5817d411c55f53acd8ee2"><code>9da040b</code></a> Fix "Render docs" CI job (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/628">#628</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/69b2ac30146d56598bc42b22a8d015e327c2f234"><code>69b2ac3</code></a> Allow for multiple env-files (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/616">#616</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/338feb5738264e7247298fffbf2c3bf81fa7f4f0"><code>338feb5</code></a> Fix handling of 'podman pod create --share=...' (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/620">#620</a>)</li> <li>Additional commits viewable in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.72.0...v0.73.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.72.0&new-version=0.73.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 256c65bcb4d..a4708392bd5 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -184,7 +184,7 @@ pytest-mock==3.14.0 # -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.72.0 +python-on-whales==0.73.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index e066e86e3a4..197af31a45d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -179,7 +179,7 @@ pytest-mock==3.14.0 # -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.72.0 +python-on-whales==0.73.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 38347013947..d3b5b35892d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -80,7 +80,7 @@ pytest-mock==3.14.0 # via -r requirements/lint.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.72.0 +python-on-whales==0.73.0 # via -r requirements/lint.in pyyaml==6.0.2 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index db0bd943d21..2e9a0e4abb1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -94,7 +94,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.72.0 +python-on-whales==0.73.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From aca99bc3c73eb6b2ae1eccd7ef76bbe1df96e3f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 11:46:11 +0000 Subject: [PATCH 0499/1511] Bump pydantic from 2.8.2 to 2.9.0 (#9041) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.8.2 to 2.9.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.9.0 (2024-09-05)</h2> <p>The code released in v2.9.0 is practically identical to that of v2.9.0b2.</p> <p>Check out our <a href="https://pydantic.dev/articles/pydantic-v2-9-release">blog post</a> to learn more about the release highlights!</p> <h2>What's Changed</h2> <h3>Packaging</h3> <ul> <li>Bump <code>ruff</code> to <code>v0.5.0</code> and <code>pyright</code> to <code>v1.1.369</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9801">#9801</a></li> <li>Bump <code>pydantic-extra-types</code> to <code>v2.9.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9832">#9832</a></li> <li>Support compatibility with <code>pdm v2.18.1</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10138">#10138</a></li> <li>Bump <code>v1</code> version stub to <code>v1.10.18</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10214">#10214</a></li> <li>Bump <code>pydantic-core</code> to <code>v2.23.2</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10311">#10311</a></li> </ul> <h3>New Features</h3> <ul> <li>Add support for <code>ZoneInfo</code> by <a href="https://github.com/Youssefares"><code>@​Youssefares</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9896">#9896</a></li> <li>Add <code>Config.val_json_bytes</code> by <a href="https://github.com/josh-newman"><code>@​josh-newman</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9770">#9770</a></li> <li>Add DSN for Snowflake by <a href="https://github.com/aditkumar72"><code>@​aditkumar72</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10128">#10128</a></li> <li>Support <code>complex</code> number by <a href="https://github.com/changhc"><code>@​changhc</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9654">#9654</a></li> <li>Add support for <code>annotated_types.Not</code> by <a href="https://github.com/aditkumar72"><code>@​aditkumar72</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10210">#10210</a></li> <li>Allow <code>WithJsonSchema</code> to inject <code>$ref</code>s w/ <code>http</code> or <code>https</code> links by <a href="https://github.com/dAIsySHEng1"><code>@​dAIsySHEng1</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9863">#9863</a></li> <li>Allow validators to customize validation JSON schema by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10094">#10094</a></li> <li>Support parametrized <code>PathLike</code> types by <a href="https://github.com/nix010"><code>@​nix010</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9764">#9764</a></li> <li>Add tagged union serializer that attempts to use <code>str</code> or <code>callable</code> discriminators to select the correct serializer by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1397">pydantic/pydantic-core#1397</a></li> </ul> <h3>Changes</h3> <ul> <li>Breaking Change: Merge <code>dict</code> type <code>json_schema_extra</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9792">#9792</a> <ul> <li>For more info (how to replicate old behavior) on this change, see <a href="https://docs.pydantic.dev/dev/concepts/json_schema/#merging-json_schema_extra">here</a></li> </ul> </li> <li>Refactor annotation injection for known (often generic) types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9979">#9979</a></li> <li>Move annotation compatibility errors to validation phase by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9999">#9999</a></li> <li>Improve runtime errors for string constraints like <code>pattern</code> for incompatible types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10158">#10158</a></li> <li>Remove <code>'allOf'</code> JSON schema workarounds by <a href="https://github.com/dpeachey"><code>@​dpeachey</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10029">#10029</a></li> <li>Remove <code>typed_dict_cls</code> data from <code>CoreMetadata</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10180">#10180</a></li> <li>Deprecate passing a dict to the <code>Examples</code> class by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10181">#10181</a></li> <li>Remove <code>initial_metadata</code> from internal metadata construct by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10194">#10194</a></li> <li>Use <code>re.Pattern.search</code> instead of <code>re.Pattern.match</code> for consistency with <code>rust</code> behavior by <a href="https://github.com/tinez"><code>@​tinez</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1368">pydantic/pydantic-core#1368</a></li> <li>Show value of wrongly typed data in <code>pydantic-core</code> serialization warning by <a href="https://github.com/BoxyUwU"><code>@​BoxyUwU</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1377">pydantic/pydantic-core#1377</a></li> <li>Breaking Change: in <code>pydantic-core</code>, change <code>metadata</code> type hint in core schemas from <code>Any</code> -> <code>Dict[str, Any] | None</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1411">pydantic/pydantic-core#1411</a></li> <li>Raise helpful warning when <code>self</code> isn't returned from model validator by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10255">#10255</a></li> </ul> <h3>Performance</h3> <ul> <li>Initial start at improving import times for modules, using caching primarily by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10009">#10009</a></li> <li>Using cached internal import for <code>BaseModel</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10013">#10013</a></li> <li>Simplify internal generics logic - remove generator overhead by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10059">#10059</a></li> <li>Remove default module globals from types namespace by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10123">#10123</a></li> <li>Performance boost: skip caching parent namespaces in most cases by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10113">#10113</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.9.0 (2024-09-05)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.9.0">GitHub release</a></p> <p>The code released in v2.9.0 is practically identical to that of v2.9.0b2.</p> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Bump <code>ruff</code> to <code>v0.5.0</code> and <code>pyright</code> to <code>v1.1.369</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9801">#9801</a></li> <li>Bump <code>pydantic-extra-types</code> to <code>v2.9.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9832">#9832</a></li> <li>Support compatibility with <code>pdm v2.18.1</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10138">#10138</a></li> <li>Bump <code>v1</code> version stub to <code>v1.10.18</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10214">#10214</a></li> <li>Bump <code>pydantic-core</code> to <code>v2.23.2</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10311">#10311</a></li> </ul> <h4>New Features</h4> <ul> <li>Add support for <code>ZoneInfo</code> by <a href="https://github.com/Youssefares"><code>@​Youssefares</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9896">#9896</a></li> <li>Add <code>Config.val_json_bytes</code> by <a href="https://github.com/josh-newman"><code>@​josh-newman</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9770">#9770</a></li> <li>Add DSN for Snowflake by <a href="https://github.com/aditkumar72"><code>@​aditkumar72</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10128">#10128</a></li> <li>Support <code>complex</code> number by <a href="https://github.com/changhc"><code>@​changhc</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9654">#9654</a></li> <li>Add support for <code>annotated_types.Not</code> by <a href="https://github.com/aditkumar72"><code>@​aditkumar72</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10210">#10210</a></li> <li>Allow <code>WithJsonSchema</code> to inject <code>$ref</code>s w/ <code>http</code> or <code>https</code> links by <a href="https://github.com/dAIsySHEng1"><code>@​dAIsySHEng1</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9863">#9863</a></li> <li>Allow validators to customize validation JSON schema by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10094">#10094</a></li> <li>Support parametrized <code>PathLike</code> types by <a href="https://github.com/nix010"><code>@​nix010</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9764">#9764</a></li> <li>Add tagged union serializer that attempts to use <code>str</code> or <code>callable</code> discriminators to select the correct serializer by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1397">pydantic/pydantic-core#1397</a></li> </ul> <h4>Changes</h4> <ul> <li>Breaking Change: Merge <code>dict</code> type <code>json_schema_extra</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9792">#9792</a> <ul> <li>For more info (how to replicate old behavior) on this change, see <a href="https://docs.pydantic.dev/dev/concepts/json_schema/#merging-json_schema_extra">here</a></li> </ul> </li> <li>Refactor annotation injection for known (often generic) types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9979">#9979</a></li> <li>Move annotation compatibility errors to validation phase by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/9999">#9999</a></li> <li>Improve runtime errors for string constraints like <code>pattern</code> for incompatible types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10158">#10158</a></li> <li>Remove <code>'allOf'</code> JSON schema workarounds by <a href="https://github.com/dpeachey"><code>@​dpeachey</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10029">#10029</a></li> <li>Remove <code>typed_dict_cls</code> data from <code>CoreMetadata</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10180">#10180</a></li> <li>Deprecate passing a dict to the <code>Examples</code> class by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10181">#10181</a></li> <li>Remove <code>initial_metadata</code> from internal metadata construct by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10194">#10194</a></li> <li>Use <code>re.Pattern.search</code> instead of <code>re.Pattern.match</code> for consistency with <code>rust</code> behavior by <a href="https://github.com/tinez"><code>@​tinez</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1368">pydantic/pydantic-core#1368</a></li> <li>Show value of wrongly typed data in <code>pydantic-core</code> serialization warning by <a href="https://github.com/BoxyUwU"><code>@​BoxyUwU</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1377">pydantic/pydantic-core#1377</a></li> <li>Breaking Change: in <code>pydantic-core</code>, change <code>metadata</code> type hint in core schemas from <code>Any</code> -> <code>Dict[str, Any] | None</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1411">pydantic/pydantic-core#1411</a></li> <li>Raise helpful warning when <code>self</code> isn't returned from model validator by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10255">#10255</a></li> </ul> <h4>Performance</h4> <ul> <li>Initial start at improving import times for modules, using caching primarily by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10009">#10009</a></li> <li>Using cached internal import for <code>BaseModel</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10013">#10013</a></li> <li>Simplify internal generics logic - remove generator overhead by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10059">#10059</a></li> <li>Remove default module globals from types namespace by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10123">#10123</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/ed92d0a921d3464f08c5aa67dcbd262bf67110b1"><code>ed92d0a</code></a> Prep for v2.9 release (<a href="https://redirect.github.com/pydantic/pydantic/issues/10311">#10311</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/eecde7b7f73c1ecd4fc12ee867b8b53d2f86b73f"><code>eecde7b</code></a> Fix lifecycle docs formatting (<a href="https://redirect.github.com/pydantic/pydantic/issues/10309">#10309</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/447879b44ab8a9871193d6aef1b0846288929495"><code>447879b</code></a> Add schema generation benchmarks for models with custom serializers (<a href="https://redirect.github.com/pydantic/pydantic/issues/10290">#10290</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/91a4993283ec1f0f056629a4cadec378931efa84"><code>91a4993</code></a> Add link for complex docs (<a href="https://redirect.github.com/pydantic/pydantic/issues/10301">#10301</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/8a01cc8f96ea3ca4f46b4ae3cbf36084081383ac"><code>8a01cc8</code></a> Respect <code>schema_generator</code> config value in <code>TypeAdapter</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/10300">#10300</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/5c24abb3948008f62a545c59752fae97f111de76"><code>5c24abb</code></a> Ensure <code>__pydantic_complete__</code> is set when rebuilding dataclasses (<a href="https://redirect.github.com/pydantic/pydantic/issues/10291">#10291</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/0a78494236e3c121febe58cb556407c273f430de"><code>0a78494</code></a> Try fix for <code>coverage</code> with hidden files (<a href="https://redirect.github.com/pydantic/pydantic/issues/10298">#10298</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/d7e812571b2a947b0dfba71a08d9d2c7a57e7f31"><code>d7e8125</code></a> Ensure coverage data artifact is correctly uploaded (<a href="https://redirect.github.com/pydantic/pydantic/issues/10292">#10292</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/c109563de80843d97b86c2fe6c5cb853d8ff963e"><code>c109563</code></a> Support signature for wrap validators without <code>info</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/10277">#10277</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/0115f7b5bd4b3f471079b9a07ebcae19ec241512"><code>0115f7b</code></a> Reformat + add some benchmarks for annotated validators (<a href="https://redirect.github.com/pydantic/pydantic/issues/10276">#10276</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pydantic/pydantic/compare/v2.8.2...v2.9.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.8.2&new-version=2.9.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a4708392bd5..420d46bbf6c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -152,9 +152,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.8.2 +pydantic==2.9.0 # via python-on-whales -pydantic-core==2.20.1 +pydantic-core==2.23.2 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index 197af31a45d..e5ed6030ae1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,9 +149,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.8.2 +pydantic==2.9.0 # via python-on-whales -pydantic-core==2.20.1 +pydantic-core==2.23.2 # via pydantic pygments==2.18.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index d3b5b35892d..374f5762d44 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -66,9 +66,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.8.2 +pydantic==2.9.0 # via python-on-whales -pydantic-core==2.20.1 +pydantic-core==2.23.2 # via pydantic pygments==2.18.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 2e9a0e4abb1..82dd51bcf50 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,9 +77,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.8.2 +pydantic==2.9.0 # via python-on-whales -pydantic-core==2.20.1 +pydantic-core==2.23.2 # via pydantic pygments==2.18.0 # via rich From 446ed9ef7112c269edf728c01ea7dbfc0df96a50 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 6 Sep 2024 15:11:00 -0500 Subject: [PATCH 0500/1511] [PR #9029/466448c backport][3.11] Fix SSLContext creation in the TCPConnector with multiple loops (#9043) --- aiohttp/connector.py | 100 +++++++++++++++--------------------- tests/test_connector.py | 109 +++++++++++++++++++++------------------- tests/test_proxy.py | 3 +- 3 files changed, 100 insertions(+), 112 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 360eabc7bb2..bbdb140187d 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -50,14 +50,7 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ( - ceil_timeout, - is_ip_address, - noop, - sentinel, - set_exception, - set_result, -) +from .helpers import ceil_timeout, is_ip_address, noop, sentinel from .locks import EventResultOrError from .resolver import DefaultResolver @@ -748,6 +741,35 @@ def expired(self, key: Tuple[str, int]) -> bool: return self._timestamps[key] + self._ttl < monotonic() +def _make_ssl_context(verified: bool) -> SSLContext: + """Create SSL context. + + This method is not async-friendly and should be called from a thread + because it will load certificates from disk and do other blocking I/O. + """ + if ssl is None: + # No ssl support + return None + if verified: + return ssl.create_default_context() + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + +# The default SSLContext objects are created at import time +# since they do blocking I/O to load certificates from disk, +# and imports should always be done before the event loop starts +# or in a thread. +_SSL_CONTEXT_VERIFIED = _make_ssl_context(True) +_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False) + + class TCPConnector(BaseConnector): """TCP connector. @@ -778,7 +800,6 @@ class TCPConnector(BaseConnector): """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) - _made_ssl_context: Dict[bool, "asyncio.Future[SSLContext]"] = {} def __init__( self, @@ -982,25 +1003,7 @@ async def _create_connection( return proto - @staticmethod - def _make_ssl_context(verified: bool) -> SSLContext: - """Create SSL context. - - This method is not async-friendly and should be called from a thread - because it will load certificates from disk and do other blocking I/O. - """ - if verified: - return ssl.create_default_context() - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - sslcontext.options |= ssl.OP_NO_COMPRESSION - sslcontext.set_default_verify_paths() - return sslcontext - - async def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: """Logic to get the correct SSL context 0. if req.ssl is false, return None @@ -1024,35 +1027,14 @@ async def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: return sslcontext if sslcontext is not True: # not verified or fingerprinted - return await self._make_or_get_ssl_context(False) + return _SSL_CONTEXT_UNVERIFIED sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not True: # not verified or fingerprinted - return await self._make_or_get_ssl_context(False) - return await self._make_or_get_ssl_context(True) - - async def _make_or_get_ssl_context(self, verified: bool) -> SSLContext: - """Create or get cached SSL context.""" - try: - return await self._made_ssl_context[verified] - except KeyError: - loop = self._loop - future = loop.create_future() - self._made_ssl_context[verified] = future - try: - result = await loop.run_in_executor( - None, self._make_ssl_context, verified - ) - # BaseException is used since we might get CancelledError - except BaseException as ex: - del self._made_ssl_context[verified] - set_exception(future, ex) - raise - else: - set_result(future, result) - return result + return _SSL_CONTEXT_UNVERIFIED + return _SSL_CONTEXT_VERIFIED def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: ret = req.ssl @@ -1204,13 +1186,11 @@ async def _start_tls_connection( ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: """Wrap the raw TCP transport with TLS.""" tls_proto = self._factory() # Create a brand new proto for TLS - - # Safety of the `cast()` call here is based on the fact that - # internally `_get_ssl_context()` only returns `None` when - # `req.is_ssl()` evaluates to `False` which is never gonna happen - # in this code path. Of course, it's rather fragile - # maintainability-wise but this is to be solved separately. - sslcontext = cast(ssl.SSLContext, await self._get_ssl_context(req)) + sslcontext = self._get_ssl_context(req) + if TYPE_CHECKING: + # _start_tls_connection is unreachable in the current code path + # if sslcontext is None. + assert sslcontext is not None try: async with ceil_timeout( @@ -1288,7 +1268,7 @@ async def _create_direct_connection( *, client_error: Type[Exception] = ClientConnectorError, ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = await self._get_ssl_context(req) + sslcontext = self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) host = req.url.raw_host diff --git a/tests/test_connector.py b/tests/test_connector.py index 0129f0cc330..bbe77f2a705 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,5 +1,4 @@ # Tests of http client with custom Connector - import asyncio import gc import hashlib @@ -9,8 +8,9 @@ import sys import uuid from collections import deque +from concurrent import futures from contextlib import closing, suppress -from typing import Any, List, Optional, Type +from typing import Any, List, Literal, Optional from unittest import mock import pytest @@ -18,10 +18,16 @@ from yarl import URL import aiohttp -from aiohttp import client, web +from aiohttp import client, connector as connector_module, web from aiohttp.client import ClientRequest, ClientTimeout from aiohttp.client_reqrep import ConnectionKey -from aiohttp.connector import Connection, TCPConnector, _DNSCacheTable +from aiohttp.connector import ( + _SSL_CONTEXT_UNVERIFIED, + _SSL_CONTEXT_VERIFIED, + Connection, + TCPConnector, + _DNSCacheTable, +) from aiohttp.locks import EventResultOrError from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -1540,23 +1546,11 @@ async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn.clear_dns_cache("localhost") -async def test_dont_recreate_ssl_context() -> None: - conn = aiohttp.TCPConnector() - ctx = await conn._make_or_get_ssl_context(True) - assert ctx is await conn._make_or_get_ssl_context(True) - - -async def test_dont_recreate_ssl_context2() -> None: - conn = aiohttp.TCPConnector() - ctx = await conn._make_or_get_ssl_context(False) - assert ctx is await conn._make_or_get_ssl_context(False) - - async def test___get_ssl_context1() -> None: conn = aiohttp.TCPConnector() req = mock.Mock() req.is_ssl.return_value = False - assert await conn._get_ssl_context(req) is None + assert conn._get_ssl_context(req) is None async def test___get_ssl_context2(loop) -> None: @@ -1565,7 +1559,7 @@ async def test___get_ssl_context2(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = ctx - assert await conn._get_ssl_context(req) is ctx + assert conn._get_ssl_context(req) is ctx async def test___get_ssl_context3(loop) -> None: @@ -1574,7 +1568,7 @@ async def test___get_ssl_context3(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert await conn._get_ssl_context(req) is ctx + assert conn._get_ssl_context(req) is ctx async def test___get_ssl_context4(loop) -> None: @@ -1583,9 +1577,7 @@ async def test___get_ssl_context4(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = False - assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( - False - ) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED async def test___get_ssl_context5(loop) -> None: @@ -1594,9 +1586,7 @@ async def test___get_ssl_context5(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) - assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( - False - ) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED async def test___get_ssl_context6() -> None: @@ -1604,7 +1594,7 @@ async def test___get_ssl_context6() -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context(True) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED async def test_ssl_context_once() -> None: @@ -1616,31 +1606,9 @@ async def test_ssl_context_once() -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert await conn1._get_ssl_context(req) is await conn1._make_or_get_ssl_context( - True - ) - assert await conn2._get_ssl_context(req) is await conn1._make_or_get_ssl_context( - True - ) - assert await conn3._get_ssl_context(req) is await conn1._make_or_get_ssl_context( - True - ) - assert conn1._made_ssl_context is conn2._made_ssl_context is conn3._made_ssl_context - assert True in conn1._made_ssl_context - - -@pytest.mark.parametrize("exception", [OSError, ssl.SSLError, asyncio.CancelledError]) -async def test_ssl_context_creation_raises(exception: Type[BaseException]) -> None: - """Test that we try again if SSLContext creation fails the first time.""" - conn = aiohttp.TCPConnector() - conn._made_ssl_context.clear() - - with mock.patch.object( - conn, "_make_ssl_context", side_effect=exception - ), pytest.raises(exception): - await conn._make_or_get_ssl_context(True) - - assert isinstance(await conn._make_or_get_ssl_context(True), ssl.SSLContext) + assert conn1._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + assert conn2._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + assert conn3._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED async def test_close_twice(loop) -> None: @@ -2717,3 +2685,42 @@ async def allow_connection_and_add_dummy_waiter(): ) await connector.close() + + +def test_connector_multiple_event_loop() -> None: + """Test the connector with multiple event loops.""" + + async def async_connect() -> Literal[True]: + conn = aiohttp.TCPConnector() + loop = asyncio.get_running_loop() + req = ClientRequest("GET", URL("https://127.0.0.1"), loop=loop) + with suppress(aiohttp.ClientConnectorError): + with mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ssl.CertificateError, + ): + await conn.connect(req, [], ClientTimeout()) + return True + + def test_connect() -> Literal[True]: + loop = asyncio.new_event_loop() + try: + return loop.run_until_complete(async_connect()) + finally: + loop.close() + + with futures.ThreadPoolExecutor() as executor: + res_list = [executor.submit(test_connect) for _ in range(2)] + raw_response_list = [res.result() for res in futures.as_completed(res_list)] + + assert raw_response_list == [True, True] + + +def test_default_ssl_context_creation_without_ssl() -> None: + """Verify _make_ssl_context does not raise when ssl is not available.""" + with mock.patch.object(connector_module, "ssl", None): + assert connector_module._make_ssl_context(False) is None + assert connector_module._make_ssl_context(True) is None diff --git a/tests/test_proxy.py b/tests/test_proxy.py index c5e98deb8a5..4fa5e932098 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -12,6 +12,7 @@ import aiohttp from aiohttp.client_reqrep import ClientRequest, ClientResponse +from aiohttp.connector import _SSL_CONTEXT_VERIFIED from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -817,7 +818,7 @@ async def make_conn(): self.loop.start_tls.assert_called_with( mock.ANY, mock.ANY, - self.loop.run_until_complete(connector._make_or_get_ssl_context(True)), + _SSL_CONTEXT_VERIFIED, server_hostname="www.python.org", ssl_handshake_timeout=mock.ANY, ) From 9c34a8cbf3908634fd6da2eab23adaf240879588 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 20:36:12 +0000 Subject: [PATCH 0501/1511] [PR #9031/9f0ae074 backport][3.10] Avoid tracing overhead in http_writer when there are no traces (#9045) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9031.misc.rst | 1 + aiohttp/client_reqrep.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9031.misc.rst diff --git a/CHANGES/9031.misc.rst b/CHANGES/9031.misc.rst new file mode 100644 index 00000000000..1deab5230f7 --- /dev/null +++ b/CHANGES/9031.misc.rst @@ -0,0 +1 @@ +Tracing overhead is avoided in the http writer when there are no active traces -- by user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d2c5f16df2b..c261af0421e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -696,11 +696,15 @@ async def send(self, conn: "Connection") -> "ClientResponse": writer = StreamWriter( protocol, self.loop, - on_chunk_sent=functools.partial( - self._on_chunk_request_sent, self.method, self.url + on_chunk_sent=( + functools.partial(self._on_chunk_request_sent, self.method, self.url) + if self._traces + else None ), - on_headers_sent=functools.partial( - self._on_headers_request_sent, self.method, self.url + on_headers_sent=( + functools.partial(self._on_headers_request_sent, self.method, self.url) + if self._traces + else None ), ) From 4e1070e1523fd55409534c617d691aec4fdbe7c6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 21:02:33 +0000 Subject: [PATCH 0502/1511] [PR #9032/c693a816 backport][3.11] Fix Link-Local IPv6 Flags in the Resolver (#9047) Co-authored-by: GitNMLee <89409038+GitNMLee@users.noreply.github.com> Fixes #9028 Fixes #123'). --> --- CHANGES/9032.bugfix.rst | 3 +++ aiohttp/resolver.py | 5 +++-- tests/test_resolver.py | 27 ++++++++++++++++++++++----- 3 files changed, 28 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9032.bugfix.rst diff --git a/CHANGES/9032.bugfix.rst b/CHANGES/9032.bugfix.rst new file mode 100644 index 00000000000..8c8d81f6319 --- /dev/null +++ b/CHANGES/9032.bugfix.rst @@ -0,0 +1,3 @@ +Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` + +Link-Local IPv6 addresses can now be handled by the Resolver correctly. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index c8fce5b5706..4f15e84071d 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -17,6 +17,7 @@ _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV class ThreadedResolver(AbstractResolver): @@ -52,7 +53,7 @@ async def resolve( # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. resolved_host, _port = await self._loop.getnameinfo( - address, _NUMERIC_SOCKET_FLAGS + address, _NAME_SOCKET_FLAGS ) port = int(_port) else: @@ -120,7 +121,7 @@ async def resolve( # getnameinfo() unconditionally, but performance makes sense. result = await self._resolver.getnameinfo( (address[0].decode("ascii"), *address[1:]), - _NUMERIC_SOCKET_FLAGS, + _NAME_SOCKET_FLAGS, ) resolved_host = result.node else: diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 825db81e41b..e0e843f4782 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -3,12 +3,12 @@ import socket from ipaddress import ip_address from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union -from unittest.mock import Mock, patch +from unittest.mock import Mock, create_autospec, patch import pytest from aiohttp.resolver import ( - _NUMERIC_SOCKET_FLAGS, + _NAME_SOCKET_FLAGS, AsyncResolver, DefaultResolver, ThreadedResolver, @@ -153,9 +153,7 @@ async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None port=0, type=socket.SOCK_STREAM, ) - mock().getnameinfo.assert_called_with( - ("fe80::1", 0, 0, 3), _NUMERIC_SOCKET_FLAGS - ) + mock().getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") @@ -211,12 +209,31 @@ async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: loop = Mock() loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) loop.getnameinfo = fake_ipv6_nameinfo("fe80::1%eth0") + + # Mock the fake function that was returned by helper functions + loop.getaddrinfo = create_autospec(loop.getaddrinfo) + loop.getnameinfo = create_autospec(loop.getnameinfo) + + # Set the correct return values for mock functions + loop.getaddrinfo.return_value = await fake_ipv6_addrinfo(["fe80::1"])() + loop.getnameinfo.return_value = await fake_ipv6_nameinfo("fe80::1%eth0")() + resolver = ThreadedResolver() resolver._loop = loop real = await resolver.resolve("www.python.org") assert real[0]["hostname"] == "www.python.org" ipaddress.ip_address(real[0]["host"]) + loop.getaddrinfo.assert_called_with( + "www.python.org", + 0, + type=socket.SOCK_STREAM, + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + ) + + loop.getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) + async def test_threaded_resolver_multiple_replies() -> None: loop = Mock() From 53d3f595dac05565e4ca8657fa0c4be8c68de4f8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 21:10:13 +0000 Subject: [PATCH 0503/1511] [PR #9044/684b7922 backport][3.10] Add missing changelog entry for #9029 (#9049) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9029.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 CHANGES/9029.bugfix.rst diff --git a/CHANGES/9029.bugfix.rst b/CHANGES/9029.bugfix.rst new file mode 100644 index 00000000000..7ca956e3832 --- /dev/null +++ b/CHANGES/9029.bugfix.rst @@ -0,0 +1 @@ +Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. From a35a73b453948f1f3b6d5b2a3ad6ffbfd83ccb78 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 21:16:43 +0000 Subject: [PATCH 0504/1511] [PR #9044/684b7922 backport][3.11] Add missing changelog entry for #9029 (#9050) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9029.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 CHANGES/9029.bugfix.rst diff --git a/CHANGES/9029.bugfix.rst b/CHANGES/9029.bugfix.rst new file mode 100644 index 00000000000..7ca956e3832 --- /dev/null +++ b/CHANGES/9029.bugfix.rst @@ -0,0 +1 @@ +Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. From b48ebc1e9f9c03ed39b52ab95486d1d957995d30 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 6 Sep 2024 18:23:48 -0500 Subject: [PATCH 0505/1511] [PR #9032/c693a816 backport][3.10] Fix Link-Local IPv6 Flags in the Resolver (#9048) Co-authored-by: Nathan Lee <nathan.lee@garmin.com> Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: GitNMLee <89409038+GitNMLee@users.noreply.github.com> --- CHANGES/9032.bugfix.rst | 3 +++ aiohttp/resolver.py | 5 +++-- tests/test_resolver.py | 27 ++++++++++++++++++++++----- 3 files changed, 28 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9032.bugfix.rst diff --git a/CHANGES/9032.bugfix.rst b/CHANGES/9032.bugfix.rst new file mode 100644 index 00000000000..8c8d81f6319 --- /dev/null +++ b/CHANGES/9032.bugfix.rst @@ -0,0 +1,3 @@ +Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` + +Link-Local IPv6 addresses can now be handled by the Resolver correctly. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 10e36266abe..6283ec2b8d5 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -18,6 +18,7 @@ _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV _SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) @@ -54,7 +55,7 @@ async def resolve( # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. resolved_host, _port = await self._loop.getnameinfo( - address, _NUMERIC_SOCKET_FLAGS + address, _NAME_SOCKET_FLAGS ) port = int(_port) else: @@ -122,7 +123,7 @@ async def resolve( # getnameinfo() unconditionally, but performance makes sense. result = await self._resolver.getnameinfo( (address[0].decode("ascii"), *address[1:]), - _NUMERIC_SOCKET_FLAGS, + _NAME_SOCKET_FLAGS, ) resolved_host = result.node else: diff --git a/tests/test_resolver.py b/tests/test_resolver.py index f51506a6999..8b2ea620037 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -3,12 +3,12 @@ import socket from ipaddress import ip_address from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union -from unittest.mock import Mock, patch +from unittest.mock import Mock, create_autospec, patch import pytest from aiohttp.resolver import ( - _NUMERIC_SOCKET_FLAGS, + _NAME_SOCKET_FLAGS, _SUPPORTS_SCOPE_ID, AsyncResolver, DefaultResolver, @@ -157,9 +157,7 @@ async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None port=0, type=socket.SOCK_STREAM, ) - mock().getnameinfo.assert_called_with( - ("fe80::1", 0, 0, 3), _NUMERIC_SOCKET_FLAGS - ) + mock().getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") @@ -218,12 +216,31 @@ async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: loop = Mock() loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) loop.getnameinfo = fake_ipv6_nameinfo("fe80::1%eth0") + + # Mock the fake function that was returned by helper functions + loop.getaddrinfo = create_autospec(loop.getaddrinfo) + loop.getnameinfo = create_autospec(loop.getnameinfo) + + # Set the correct return values for mock functions + loop.getaddrinfo.return_value = await fake_ipv6_addrinfo(["fe80::1"])() + loop.getnameinfo.return_value = await fake_ipv6_nameinfo("fe80::1%eth0")() + resolver = ThreadedResolver() resolver._loop = loop real = await resolver.resolve("www.python.org") assert real[0]["hostname"] == "www.python.org" ipaddress.ip_address(real[0]["host"]) + loop.getaddrinfo.assert_called_with( + "www.python.org", + 0, + type=socket.SOCK_STREAM, + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + ) + + loop.getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) + async def test_threaded_resolver_multiple_replies() -> None: loop = Mock() From 4d022e4e68eb93c01ac53fad88efab70df0a193d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 6 Sep 2024 18:23:58 -0500 Subject: [PATCH 0506/1511] [PR #9029/466448c backport][3.10] Fix SSLContext creation in the TCPConnector with multiple loops (#9042) --- aiohttp/connector.py | 100 ++++++++++++++-------------------- tests/test_connector.py | 116 ++++++++++++++++++++++------------------ tests/test_proxy.py | 3 +- 3 files changed, 106 insertions(+), 113 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 7c6e747695e..c25f184bbbe 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -50,14 +50,7 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ( - ceil_timeout, - is_ip_address, - noop, - sentinel, - set_exception, - set_result, -) +from .helpers import ceil_timeout, is_ip_address, noop, sentinel from .locks import EventResultOrError from .resolver import DefaultResolver @@ -748,6 +741,35 @@ def expired(self, key: Tuple[str, int]) -> bool: return self._timestamps[key] + self._ttl < monotonic() +def _make_ssl_context(verified: bool) -> SSLContext: + """Create SSL context. + + This method is not async-friendly and should be called from a thread + because it will load certificates from disk and do other blocking I/O. + """ + if ssl is None: + # No ssl support + return None + if verified: + return ssl.create_default_context() + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + +# The default SSLContext objects are created at import time +# since they do blocking I/O to load certificates from disk, +# and imports should always be done before the event loop starts +# or in a thread. +_SSL_CONTEXT_VERIFIED = _make_ssl_context(True) +_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False) + + class TCPConnector(BaseConnector): """TCP connector. @@ -778,7 +800,6 @@ class TCPConnector(BaseConnector): """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) - _made_ssl_context: Dict[bool, "asyncio.Future[SSLContext]"] = {} def __init__( self, @@ -982,25 +1003,7 @@ async def _create_connection( return proto - @staticmethod - def _make_ssl_context(verified: bool) -> SSLContext: - """Create SSL context. - - This method is not async-friendly and should be called from a thread - because it will load certificates from disk and do other blocking I/O. - """ - if verified: - return ssl.create_default_context() - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - sslcontext.options |= ssl.OP_NO_COMPRESSION - sslcontext.set_default_verify_paths() - return sslcontext - - async def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: """Logic to get the correct SSL context 0. if req.ssl is false, return None @@ -1024,35 +1027,14 @@ async def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: return sslcontext if sslcontext is not True: # not verified or fingerprinted - return await self._make_or_get_ssl_context(False) + return _SSL_CONTEXT_UNVERIFIED sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not True: # not verified or fingerprinted - return await self._make_or_get_ssl_context(False) - return await self._make_or_get_ssl_context(True) - - async def _make_or_get_ssl_context(self, verified: bool) -> SSLContext: - """Create or get cached SSL context.""" - try: - return await self._made_ssl_context[verified] - except KeyError: - loop = self._loop - future = loop.create_future() - self._made_ssl_context[verified] = future - try: - result = await loop.run_in_executor( - None, self._make_ssl_context, verified - ) - # BaseException is used since we might get CancelledError - except BaseException as ex: - del self._made_ssl_context[verified] - set_exception(future, ex) - raise - else: - set_result(future, result) - return result + return _SSL_CONTEXT_UNVERIFIED + return _SSL_CONTEXT_VERIFIED def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: ret = req.ssl @@ -1204,13 +1186,11 @@ async def _start_tls_connection( ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: """Wrap the raw TCP transport with TLS.""" tls_proto = self._factory() # Create a brand new proto for TLS - - # Safety of the `cast()` call here is based on the fact that - # internally `_get_ssl_context()` only returns `None` when - # `req.is_ssl()` evaluates to `False` which is never gonna happen - # in this code path. Of course, it's rather fragile - # maintainability-wise but this is to be solved separately. - sslcontext = cast(ssl.SSLContext, await self._get_ssl_context(req)) + sslcontext = self._get_ssl_context(req) + if TYPE_CHECKING: + # _start_tls_connection is unreachable in the current code path + # if sslcontext is None. + assert sslcontext is not None try: async with ceil_timeout( @@ -1288,7 +1268,7 @@ async def _create_direct_connection( *, client_error: Type[Exception] = ClientConnectorError, ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = await self._get_ssl_context(req) + sslcontext = self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) host = req.url.raw_host diff --git a/tests/test_connector.py b/tests/test_connector.py index 0129f0cc330..9f9dbe66c28 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,5 +1,4 @@ # Tests of http client with custom Connector - import asyncio import gc import hashlib @@ -9,8 +8,9 @@ import sys import uuid from collections import deque +from concurrent import futures from contextlib import closing, suppress -from typing import Any, List, Optional, Type +from typing import Any, List, Literal, Optional from unittest import mock import pytest @@ -18,10 +18,21 @@ from yarl import URL import aiohttp -from aiohttp import client, web -from aiohttp.client import ClientRequest, ClientTimeout +from aiohttp import ( + ClientRequest, + ClientTimeout, + client, + connector as connector_module, + web, +) from aiohttp.client_reqrep import ConnectionKey -from aiohttp.connector import Connection, TCPConnector, _DNSCacheTable +from aiohttp.connector import ( + _SSL_CONTEXT_UNVERIFIED, + _SSL_CONTEXT_VERIFIED, + Connection, + TCPConnector, + _DNSCacheTable, +) from aiohttp.locks import EventResultOrError from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -1540,23 +1551,11 @@ async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn.clear_dns_cache("localhost") -async def test_dont_recreate_ssl_context() -> None: - conn = aiohttp.TCPConnector() - ctx = await conn._make_or_get_ssl_context(True) - assert ctx is await conn._make_or_get_ssl_context(True) - - -async def test_dont_recreate_ssl_context2() -> None: - conn = aiohttp.TCPConnector() - ctx = await conn._make_or_get_ssl_context(False) - assert ctx is await conn._make_or_get_ssl_context(False) - - async def test___get_ssl_context1() -> None: conn = aiohttp.TCPConnector() req = mock.Mock() req.is_ssl.return_value = False - assert await conn._get_ssl_context(req) is None + assert conn._get_ssl_context(req) is None async def test___get_ssl_context2(loop) -> None: @@ -1565,7 +1564,7 @@ async def test___get_ssl_context2(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = ctx - assert await conn._get_ssl_context(req) is ctx + assert conn._get_ssl_context(req) is ctx async def test___get_ssl_context3(loop) -> None: @@ -1574,7 +1573,7 @@ async def test___get_ssl_context3(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert await conn._get_ssl_context(req) is ctx + assert conn._get_ssl_context(req) is ctx async def test___get_ssl_context4(loop) -> None: @@ -1583,9 +1582,7 @@ async def test___get_ssl_context4(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = False - assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( - False - ) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED async def test___get_ssl_context5(loop) -> None: @@ -1594,9 +1591,7 @@ async def test___get_ssl_context5(loop) -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) - assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context( - False - ) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED async def test___get_ssl_context6() -> None: @@ -1604,7 +1599,7 @@ async def test___get_ssl_context6() -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert await conn._get_ssl_context(req) is await conn._make_or_get_ssl_context(True) + assert conn._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED async def test_ssl_context_once() -> None: @@ -1616,31 +1611,9 @@ async def test_ssl_context_once() -> None: req = mock.Mock() req.is_ssl.return_value = True req.ssl = True - assert await conn1._get_ssl_context(req) is await conn1._make_or_get_ssl_context( - True - ) - assert await conn2._get_ssl_context(req) is await conn1._make_or_get_ssl_context( - True - ) - assert await conn3._get_ssl_context(req) is await conn1._make_or_get_ssl_context( - True - ) - assert conn1._made_ssl_context is conn2._made_ssl_context is conn3._made_ssl_context - assert True in conn1._made_ssl_context - - -@pytest.mark.parametrize("exception", [OSError, ssl.SSLError, asyncio.CancelledError]) -async def test_ssl_context_creation_raises(exception: Type[BaseException]) -> None: - """Test that we try again if SSLContext creation fails the first time.""" - conn = aiohttp.TCPConnector() - conn._made_ssl_context.clear() - - with mock.patch.object( - conn, "_make_ssl_context", side_effect=exception - ), pytest.raises(exception): - await conn._make_or_get_ssl_context(True) - - assert isinstance(await conn._make_or_get_ssl_context(True), ssl.SSLContext) + assert conn1._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + assert conn2._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + assert conn3._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED async def test_close_twice(loop) -> None: @@ -2717,3 +2690,42 @@ async def allow_connection_and_add_dummy_waiter(): ) await connector.close() + + +def test_connector_multiple_event_loop() -> None: + """Test the connector with multiple event loops.""" + + async def async_connect() -> Literal[True]: + conn = aiohttp.TCPConnector() + loop = asyncio.get_running_loop() + req = ClientRequest("GET", URL("https://127.0.0.1"), loop=loop) + with suppress(aiohttp.ClientConnectorError): + with mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ssl.CertificateError, + ): + await conn.connect(req, [], ClientTimeout()) + return True + + def test_connect() -> Literal[True]: + loop = asyncio.new_event_loop() + try: + return loop.run_until_complete(async_connect()) + finally: + loop.close() + + with futures.ThreadPoolExecutor() as executor: + res_list = [executor.submit(test_connect) for _ in range(2)] + raw_response_list = [res.result() for res in futures.as_completed(res_list)] + + assert raw_response_list == [True, True] + + +def test_default_ssl_context_creation_without_ssl() -> None: + """Verify _make_ssl_context does not raise when ssl is not available.""" + with mock.patch.object(connector_module, "ssl", None): + assert connector_module._make_ssl_context(False) is None + assert connector_module._make_ssl_context(True) is None diff --git a/tests/test_proxy.py b/tests/test_proxy.py index c5e98deb8a5..4fa5e932098 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -12,6 +12,7 @@ import aiohttp from aiohttp.client_reqrep import ClientRequest, ClientResponse +from aiohttp.connector import _SSL_CONTEXT_VERIFIED from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -817,7 +818,7 @@ async def make_conn(): self.loop.start_tls.assert_called_with( mock.ANY, mock.ANY, - self.loop.run_until_complete(connector._make_or_get_ssl_context(True)), + _SSL_CONTEXT_VERIFIED, server_hostname="www.python.org", ssl_handshake_timeout=mock.ANY, ) From 8f30c91788196c774b31c1c23311d27a7ca04fc9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 23:51:00 +0000 Subject: [PATCH 0507/1511] [PR #9031/9f0ae074 backport][3.11] Avoid tracing overhead in http_writer when there are no traces (#9046) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9031.misc.rst | 1 + aiohttp/client_reqrep.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9031.misc.rst diff --git a/CHANGES/9031.misc.rst b/CHANGES/9031.misc.rst new file mode 100644 index 00000000000..1deab5230f7 --- /dev/null +++ b/CHANGES/9031.misc.rst @@ -0,0 +1 @@ +Tracing overhead is avoided in the http writer when there are no active traces -- by user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d7d5f63ec18..4ea1070a0fd 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -694,11 +694,15 @@ async def send(self, conn: "Connection") -> "ClientResponse": writer = StreamWriter( protocol, self.loop, - on_chunk_sent=functools.partial( - self._on_chunk_request_sent, self.method, self.url + on_chunk_sent=( + functools.partial(self._on_chunk_request_sent, self.method, self.url) + if self._traces + else None ), - on_headers_sent=functools.partial( - self._on_headers_request_sent, self.method, self.url + on_headers_sent=( + functools.partial(self._on_headers_request_sent, self.method, self.url) + if self._traces + else None ), ) From 5e99c4f1c74fa07426541f0b4ab8a0ea3a36518c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:18:43 +0000 Subject: [PATCH 0508/1511] Bump yarl from 1.9.7 to 1.10.0 (#9056) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.9.7 to 1.10.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.10.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>Fixed joining a path when the existing path was empty -- by :user:<code>bdraco</code>.</p> <p>A regression in :meth:<code>URL.join() <yarl.URL.join></code> was introduced in <a href="https://redirect.github.com/aio-libs/yarl/issues/1082">#1082</a>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1118">#1118</a>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Added :meth:<code>URL.without_query_params() <yarl.URL.without_query_params></code> method, to drop some parameters from query string -- by :user:<code>hongquan</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/774">#774</a>, <a href="https://redirect.github.com/aio-libs/yarl/issues/898">#898</a>, <a href="https://redirect.github.com/aio-libs/yarl/issues/1010">#1010</a>.</p> </li> <li> <p>The previously protected types <code>_SimpleQuery</code>, <code>_QueryVariable</code>, and <code>_Query</code> are now available for use externally as <code>SimpleQuery</code>, <code>QueryVariable</code>, and <code>Query</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1050">#1050</a>, <a href="https://redirect.github.com/aio-libs/yarl/issues/1113">#1113</a>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>Replaced all :class:<code>~typing.Optional</code> with :class:<code>~typing.Union</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1095">#1095</a>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Significantly improved performance of parsing the network location -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1112">#1112</a>.</p> </li> <li> <p>Added internal types to the cache to prevent future refactoring errors -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1117">#1117</a>.</p> </li> </ul> <hr /> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.10.0</h1> <p><em>(2024-09-06)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Fixed joining a path when the existing path was empty -- by :user:<code>bdraco</code>.</p> <p>A regression in :meth:<code>URL.join() <yarl.URL.join></code> was introduced in :issue:<code>1082</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1118</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Added :meth:<code>URL.without_query_params() <yarl.URL.without_query_params></code> method, to drop some parameters from query string -- by :user:<code>hongquan</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>774</code>, :issue:<code>898</code>, :issue:<code>1010</code>.</p> </li> <li> <p>The previously protected types <code>_SimpleQuery</code>, <code>_QueryVariable</code>, and <code>_Query</code> are now available for use externally as <code>SimpleQuery</code>, <code>QueryVariable</code>, and <code>Query</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1050</code>, :issue:<code>1113</code>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>Replaced all :class:<code>~typing.Optional</code> with :class:<code>~typing.Union</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1095</code>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Significantly improved performance of parsing the network location -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1112</code>.</p> </li> <li> <p>Added internal types to the cache to prevent future refactoring errors -- by :user:<code>bdraco</code>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/67c2c02243d4e2417fc22f23151764ff32d9c5a0"><code>67c2c02</code></a> Release 1.10.0</li> <li><a href="https://github.com/aio-libs/yarl/commit/ad8a264a863f567c6d9a51255c9e94ebeae8c80e"><code>ad8a264</code></a> Adjust formatting of changelog message for <a href="https://redirect.github.com/aio-libs/yarl/issues/1095">#1095</a> (<a href="https://redirect.github.com/aio-libs/yarl/issues/1121">#1121</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/ac4b58b1768df88d1d5a3a543c69542fad0d0c05"><code>ac4b58b</code></a> Fix merging of wheels during release process (<a href="https://redirect.github.com/aio-libs/yarl/issues/1120">#1120</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/0f01077432e7f0bf0065e3060920a1052a0fd41c"><code>0f01077</code></a> Fix joining a path when the existing path is empty (<a href="https://redirect.github.com/aio-libs/yarl/issues/1118">#1118</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/c3e419758ec7693e2c87c1cfcd06065984c36b15"><code>c3e4197</code></a> Add types to the internal cache (<a href="https://redirect.github.com/aio-libs/yarl/issues/1117">#1117</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/15c29ad00696e8ac8e28b0e7b0e362ef2f1ec0a1"><code>15c29ad</code></a> Reclassify <a href="https://redirect.github.com/aio-libs/yarl/issues/1113">#1113</a> as a feature (<a href="https://redirect.github.com/aio-libs/yarl/issues/1115">#1115</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/bb585b0f9c2c29a22887893482c18c6c6c7b5a1c"><code>bb585b0</code></a> Significantly speed up parsing netloc in URL objects (<a href="https://redirect.github.com/aio-libs/yarl/issues/1112">#1112</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/926b9dc4712975be3e06e7199c361edaa3e9f711"><code>926b9dc</code></a> Make <code>SimpleQuery</code>, <code>QueryVariable</code>, and <code>Query</code> types available (<a href="https://redirect.github.com/aio-libs/yarl/issues/1113">#1113</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/2c531c7caefb1de7d24ed1ba01550d6dea250718"><code>2c531c7</code></a> Add new method <code>without_query_params</code> to drop query keys (<a href="https://redirect.github.com/aio-libs/yarl/issues/898">#898</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/d080a2e37e0254384cf4c38c43680622e55aacc1"><code>d080a2e</code></a> Increment version to 1.10.0.dev0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1114">#1114</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/yarl/compare/v1.9.7...v1.10.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.9.7&new-version=1.10.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_urldispatcher.py | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 7d74dd7ab16..07c8f6e6ff3 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -39,7 +39,7 @@ cast, ) -from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] +from yarl import URL, __version__ as yarl_version from . import hdrs from .abc import AbstractMatchInfo, AbstractRouter, AbstractView diff --git a/requirements/base.txt b/requirements/base.txt index 5c629050db9..e828e202e26 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.7 +yarl==1.10.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 420d46bbf6c..b7368d5d3a0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -286,7 +286,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.9.7 +yarl==1.10.0 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index e5ed6030ae1..475eb077b8c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -278,7 +278,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.9.7 +yarl==1.10.0 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index be37aa14544..29d2f84db32 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -yarl==1.9.7 +yarl==1.10.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 82dd51bcf50..b5b839c2a48 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -136,5 +136,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.7 +yarl==1.10.0 # via -r requirements/runtime-deps.in From f631015d44e1e452f405c7b6057ee8794abaed56 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:18:50 +0000 Subject: [PATCH 0509/1511] [PR #9054/c3da10cd backport][3.10] Sync reify Cython implementation with yarl (#9057) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9054.misc.rst | 1 + aiohttp/_helpers.pyx | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 11 deletions(-) create mode 100644 CHANGES/9054.misc.rst diff --git a/CHANGES/9054.misc.rst b/CHANGES/9054.misc.rst new file mode 100644 index 00000000000..ddc71f453e5 --- /dev/null +++ b/CHANGES/9054.misc.rst @@ -0,0 +1 @@ +Improved performance of reify Cython implementation -- by :user:`bdraco`. diff --git a/aiohttp/_helpers.pyx b/aiohttp/_helpers.pyx index 665f367c5de..5f089225dc8 100644 --- a/aiohttp/_helpers.pyx +++ b/aiohttp/_helpers.pyx @@ -1,3 +1,6 @@ + +cdef _sentinel = object() + cdef class reify: """Use as a class method decorator. It operates almost exactly like the Python `@property` decorator, but it puts the result of the @@ -19,17 +22,14 @@ cdef class reify: return self.wrapped.__doc__ def __get__(self, inst, owner): - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise + if inst is None: + return self + cdef dict cache = inst._cache + val = cache.get(self.name, _sentinel) + if val is _sentinel: + val = self.wrapped(inst) + cache[self.name] = val + return val def __set__(self, inst, value): raise AttributeError("reified property is read-only") From 2f30ac47c48d3973383673ef6d3a85aca87a9e69 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:18:58 +0000 Subject: [PATCH 0510/1511] [PR #9054/c3da10cd backport][3.11] Sync reify Cython implementation with yarl (#9058) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9054.misc.rst | 1 + aiohttp/_helpers.pyx | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 11 deletions(-) create mode 100644 CHANGES/9054.misc.rst diff --git a/CHANGES/9054.misc.rst b/CHANGES/9054.misc.rst new file mode 100644 index 00000000000..ddc71f453e5 --- /dev/null +++ b/CHANGES/9054.misc.rst @@ -0,0 +1 @@ +Improved performance of reify Cython implementation -- by :user:`bdraco`. diff --git a/aiohttp/_helpers.pyx b/aiohttp/_helpers.pyx index 665f367c5de..5f089225dc8 100644 --- a/aiohttp/_helpers.pyx +++ b/aiohttp/_helpers.pyx @@ -1,3 +1,6 @@ + +cdef _sentinel = object() + cdef class reify: """Use as a class method decorator. It operates almost exactly like the Python `@property` decorator, but it puts the result of the @@ -19,17 +22,14 @@ cdef class reify: return self.wrapped.__doc__ def __get__(self, inst, owner): - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise + if inst is None: + return self + cdef dict cache = inst._cache + val = cache.get(self.name, _sentinel) + if val is _sentinel: + val = self.wrapped(inst) + cache[self.name] = val + return val def __set__(self, inst, value): raise AttributeError("reified property is read-only") From d049f4d1321ad1e1f5ba18063e47b723f3175f06 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:56:11 +0000 Subject: [PATCH 0511/1511] [PR #9055/11a96fcc backport][3.10] Add xfail test for issue #5180 (#9059) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_web_websocket_functional.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 2be54486ee9..5ddcdbce7d0 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1119,3 +1119,23 @@ async def on_shutdown(app: web.Application) -> None: assert reply.extra == "Server shutdown" assert websocket.closed is True + + +@pytest.mark.xfail(reason="close never reaches client per issue #5180") +async def test_ws_close_return_code(aiohttp_client: AiohttpClient) -> None: + """Test that the close code is returned when the server closes the connection.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_str("some data") + await asyncio.sleep(0.1) + await resp.receive() + assert resp.close_code is WSCloseCode.OK From 2d6898a0d368b48c9039244a819fcd1a246288bc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 22:05:43 +0000 Subject: [PATCH 0512/1511] [PR #9055/11a96fcc backport][3.11] Add xfail test for issue #5180 (#9060) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_web_websocket_functional.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 0ebd41db502..dfe3f59c5fd 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1119,3 +1119,23 @@ async def on_shutdown(app: web.Application) -> None: assert reply.extra == "Server shutdown" assert websocket.closed is True + + +@pytest.mark.xfail(reason="close never reaches client per issue #5180") +async def test_ws_close_return_code(aiohttp_client: AiohttpClient) -> None: + """Test that the close code is returned when the server closes the connection.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_str("some data") + await asyncio.sleep(0.1) + await resp.receive() + assert resp.close_code is WSCloseCode.OK From 69ed00f069df6a753b0e8792d3b68dc209af9813 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 12:59:10 -0500 Subject: [PATCH 0513/1511] [PR #9065/b6196e7 backport][3.10] Add coverage for combining an existing query string with params (#9066) --- tests/test_client_functional.py | 37 +++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 7de195264ac..58add05f577 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -672,8 +672,41 @@ async def handler(request): assert 200 == resp.status -async def test_drop_params_on_redirect(aiohttp_client) -> None: - async def handler_redirect(request): +async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: + """Test combining params with an existing query_string.""" + + async def handler(request: web.Request) -> web.Response: + assert request.rel_url.query_string == "q=abc&q=test&d=dog" + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 + + +@pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) +async def test_empty_params_and_query_string( + aiohttp_client: AiohttpClient, params: Any +) -> None: + """Test combining empty params with an existing query_string.""" + + async def handler(request: web.Request) -> web.Response: + assert request.rel_url.query_string == "q=abc" + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 + + +async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: + async def handler_redirect(request: web.Request) -> web.Response: return web.Response(status=301, headers={"Location": "/ok?a=redirect"}) async def handler_ok(request): From e34f91bdee226f81467101eae895d3a01b1e1ab9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 13:04:49 -0500 Subject: [PATCH 0514/1511] [PR #9065/b6196e7 backport][3.11] Add coverage for combining an existing query string with params (#9067) --- tests/test_client_functional.py | 37 +++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 74c4d99765e..a350171dacf 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -672,8 +672,41 @@ async def handler(request): assert 200 == resp.status -async def test_drop_params_on_redirect(aiohttp_client) -> None: - async def handler_redirect(request): +async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: + """Test combining params with an existing query_string.""" + + async def handler(request: web.Request) -> web.Response: + assert request.rel_url.query_string == "q=abc&q=test&d=dog" + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 + + +@pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) +async def test_empty_params_and_query_string( + aiohttp_client: AiohttpClient, params: Any +) -> None: + """Test combining empty params with an existing query_string.""" + + async def handler(request: web.Request) -> web.Response: + assert request.rel_url.query_string == "q=abc" + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 + + +async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: + async def handler_redirect(request: web.Request) -> web.Response: return web.Response(status=301, headers={"Location": "/ok?a=redirect"}) async def handler_ok(request): From 2bc41815ac71bb1c5a759b1e0452ddd76ee3f083 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 9 Sep 2024 00:11:07 +0100 Subject: [PATCH 0515/1511] [3.11] Drop async-timeout from dependencies (#9069) --- README.rst | 1 - docs/index.rst | 1 - 2 files changed, 2 deletions(-) diff --git a/README.rst b/README.rst index 45b647437e3..470ced9b29c 100644 --- a/README.rst +++ b/README.rst @@ -157,7 +157,6 @@ Please add *aiohttp* tag to your question there. Requirements ============ -- async-timeout_ - attrs_ - multidict_ - yarl_ diff --git a/docs/index.rst b/docs/index.rst index 9692152cb99..4ce20aca643 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -147,7 +147,6 @@ or have some suggestion in order to improve the library. Dependencies ============ -- *async_timeout* - *attrs* - *multidict* - *yarl* From d36317b472dfb72d57815448a6356235dc3cca0c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 8 Sep 2024 23:36:31 +0000 Subject: [PATCH 0516/1511] [PR #9069/2bc41815 backport][3.10] [3.11] Drop async-timeout from dependencies (#9070) **This is a backport of PR #9069 as merged into 3.11 (2bc41815ac71bb1c5a759b1e0452ddd76ee3f083).** Co-authored-by: Sam Bull <git@sambull.org> --- README.rst | 1 - docs/index.rst | 1 - 2 files changed, 2 deletions(-) diff --git a/README.rst b/README.rst index 45b647437e3..470ced9b29c 100644 --- a/README.rst +++ b/README.rst @@ -157,7 +157,6 @@ Please add *aiohttp* tag to your question there. Requirements ============ -- async-timeout_ - attrs_ - multidict_ - yarl_ diff --git a/docs/index.rst b/docs/index.rst index 9692152cb99..4ce20aca643 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -147,7 +147,6 @@ or have some suggestion in order to improve the library. Dependencies ============ -- *async_timeout* - *attrs* - *multidict* - *yarl* From 3794391a39f5314615e2f687caa9ff7361a45a06 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 19:29:07 -0500 Subject: [PATCH 0517/1511] [PR #9072/7fb1631 backport][3.11] Bump yarl to 1.11.0 (#9074) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e828e202e26..87300dd8515 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.10.0 +yarl==1.11.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b7368d5d3a0..2b04b656eb6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -286,7 +286,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.10.0 +yarl==1.11.0 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 475eb077b8c..a8e08879fd3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -278,7 +278,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.10.0 +yarl==1.11.0 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 29d2f84db32..89e30717677 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -yarl==1.10.0 +yarl==1.11.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index b5b839c2a48..a065d607643 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -136,5 +136,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.10.0 +yarl==1.11.0 # via -r requirements/runtime-deps.in From 965aac1ebbd90bbec92a1ba11094949c698d4165 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 19:47:15 -0500 Subject: [PATCH 0518/1511] [PR #9072/7fb1631 backport][3.10] Bump yarl to 1.11.0 (#9073) --- aiohttp/web_urldispatcher.py | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 0b300e84da1..c302351500b 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -39,7 +39,7 @@ cast, ) -from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] +from yarl import URL, __version__ as yarl_version from . import hdrs from .abc import AbstractMatchInfo, AbstractRouter, AbstractView diff --git a/requirements/base.txt b/requirements/base.txt index 0ab66407cde..6604aa0f6c5 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.21 # via cffi uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.6 +yarl==1.11.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 45a2ce5dea1..f3109af5fb7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -274,7 +274,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.9.6 +yarl==1.11.0 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index d7b0fbc81c7..54c0157b01a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -261,7 +261,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.9.6 +yarl==1.11.0 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 3a0b956d3de..279a9525fc5 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.9.6 +yarl==1.11.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index daae1ddb6ef..ad9ec0ace39 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -125,5 +125,5 @@ uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.6 +yarl==1.11.0 # via -r requirements/runtime-deps.in From 5b2b77dda5d52468e826252df3d736f2de8c4965 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 01:18:13 +0000 Subject: [PATCH 0519/1511] [PR #9071/a33270bc backport][3.10] Add tests for websocket close (#9075) Co-authored-by: J. Nick Koston <nick@koston.org> closes #5180 --- tests/test_web_websocket_functional.py | 28 +++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 5ddcdbce7d0..42faff8e517 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1121,10 +1121,31 @@ async def on_shutdown(app: web.Application) -> None: assert websocket.closed is True -@pytest.mark.xfail(reason="close never reaches client per issue #5180") async def test_ws_close_return_code(aiohttp_client: AiohttpClient) -> None: """Test that the close code is returned when the server closes the connection.""" + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_str("some data") + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + assert resp.close_code == WSCloseCode.OK + + +async def test_abnormal_closure_when_server_does_not_receive( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and a message is pending.""" + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) @@ -1137,5 +1158,6 @@ async def handler(request: web.Request) -> web.WebSocketResponse: resp = await client.ws_connect("/") await resp.send_str("some data") await asyncio.sleep(0.1) - await resp.receive() - assert resp.close_code is WSCloseCode.OK + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + assert resp.close_code == WSCloseCode.ABNORMAL_CLOSURE From 9587c45452b0ec2eb656aa502d176152a2eb4fe2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 01:36:56 +0000 Subject: [PATCH 0520/1511] [PR #9071/a33270bc backport][3.11] Add tests for websocket close (#9076) Co-authored-by: J. Nick Koston <nick@koston.org> closes #5180 --- tests/test_web_websocket_functional.py | 28 +++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index dfe3f59c5fd..5770eee87dc 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1121,10 +1121,31 @@ async def on_shutdown(app: web.Application) -> None: assert websocket.closed is True -@pytest.mark.xfail(reason="close never reaches client per issue #5180") async def test_ws_close_return_code(aiohttp_client: AiohttpClient) -> None: """Test that the close code is returned when the server closes the connection.""" + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_str("some data") + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + assert resp.close_code == WSCloseCode.OK + + +async def test_abnormal_closure_when_server_does_not_receive( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and a message is pending.""" + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) @@ -1137,5 +1158,6 @@ async def handler(request: web.Request) -> web.WebSocketResponse: resp = await client.ws_connect("/") await resp.send_str("some data") await asyncio.sleep(0.1) - await resp.receive() - assert resp.close_code is WSCloseCode.OK + msg = await resp.receive() + assert msg.type is aiohttp.WSMsgType.CLOSE + assert resp.close_code == WSCloseCode.ABNORMAL_CLOSURE From 7fd5e73eb44d3f6a5df807d2393d46481a8c1c56 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 20:57:40 -0500 Subject: [PATCH 0521/1511] [PR #9068/841d00e backport][3.11] Use URL.extend_query to add params in `ClientRequest` (#9078) --- CHANGES/9068.misc.rst | 3 +++ aiohttp/client_reqrep.py | 14 +++++++++----- tests/test_client_functional.py | 26 +++++++++++++++++++------- 3 files changed, 31 insertions(+), 12 deletions(-) create mode 100644 CHANGES/9068.misc.rst diff --git a/CHANGES/9068.misc.rst b/CHANGES/9068.misc.rst new file mode 100644 index 00000000000..7ce5ec5c839 --- /dev/null +++ b/CHANGES/9068.misc.rst @@ -0,0 +1,3 @@ +Use :meth:`URL.extend_query() <yarl.URL.extend_query>` to extend query params (requires yarl 1.11.0+) -- by :user:`bdraco`. + +If yarl is older than 1.11.0, the previous slower hand rolled version will be used. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4ea1070a0fd..e0232a40c4c 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL +from yarl import URL, __version__ as yarl_version from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -88,6 +88,7 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") +_YARL_SUPPORTS_EXTEND_QUERY = tuple(map(int, yarl_version.split(".")[:2])) >= (1, 11) json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") @@ -299,10 +300,13 @@ def __init__( # assert session is not None self._session = cast("ClientSession", session) if params: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) + if _YARL_SUPPORTS_EXTEND_QUERY: + url = url.extend_query(params) + else: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index a350171dacf..4edbfa2cfeb 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -20,7 +20,7 @@ from yarl import URL import aiohttp -from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web +from aiohttp import Fingerprint, ServerFingerprintMismatch, client_reqrep, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( InvalidURL, @@ -672,7 +672,10 @@ async def handler(request): assert 200 == resp.status -async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: +@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) +async def test_params_and_query_string( + aiohttp_client: AiohttpClient, yarl_supports_extend_query: bool +) -> None: """Test combining params with an existing query_string.""" async def handler(request: web.Request) -> web.Response: @@ -683,13 +686,18 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - async with client.get("/?q=abc", params="q=test&d=dog") as resp: - assert resp.status == 200 + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query + ): + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 @pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) +@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) async def test_empty_params_and_query_string( - aiohttp_client: AiohttpClient, params: Any + aiohttp_client: AiohttpClient, params: Any, yarl_supports_extend_query: bool ) -> None: """Test combining empty params with an existing query_string.""" @@ -701,8 +709,12 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - async with client.get("/?q=abc", params=params) as resp: - assert resp.status == 200 + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query + ): + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: From 524bd16663338a535f1002b44e91bfa9206337fb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 20:57:48 -0500 Subject: [PATCH 0522/1511] [PR #9068/841d00e backport][3.10] Use URL.extend_query to add params in `ClientRequest` (#9077) --- CHANGES/9068.misc.rst | 3 +++ aiohttp/client_reqrep.py | 14 +++++++++----- tests/test_client_functional.py | 26 +++++++++++++++++++------- 3 files changed, 31 insertions(+), 12 deletions(-) create mode 100644 CHANGES/9068.misc.rst diff --git a/CHANGES/9068.misc.rst b/CHANGES/9068.misc.rst new file mode 100644 index 00000000000..7ce5ec5c839 --- /dev/null +++ b/CHANGES/9068.misc.rst @@ -0,0 +1,3 @@ +Use :meth:`URL.extend_query() <yarl.URL.extend_query>` to extend query params (requires yarl 1.11.0+) -- by :user:`bdraco`. + +If yarl is older than 1.11.0, the previous slower hand rolled version will be used. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index c261af0421e..60d70724b3d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL +from yarl import URL, __version__ as yarl_version from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -88,6 +88,7 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") +_YARL_SUPPORTS_EXTEND_QUERY = tuple(map(int, yarl_version.split(".")[:2])) >= (1, 11) json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") @@ -301,10 +302,13 @@ def __init__( # assert session is not None self._session = cast("ClientSession", session) if params: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) + if _YARL_SUPPORTS_EXTEND_QUERY: + url = url.extend_query(params) + else: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 58add05f577..9325cc17e48 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -20,7 +20,7 @@ from yarl import URL import aiohttp -from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web +from aiohttp import Fingerprint, ServerFingerprintMismatch, client_reqrep, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( InvalidURL, @@ -672,7 +672,10 @@ async def handler(request): assert 200 == resp.status -async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: +@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) +async def test_params_and_query_string( + aiohttp_client: AiohttpClient, yarl_supports_extend_query: bool +) -> None: """Test combining params with an existing query_string.""" async def handler(request: web.Request) -> web.Response: @@ -683,13 +686,18 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - async with client.get("/?q=abc", params="q=test&d=dog") as resp: - assert resp.status == 200 + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query + ): + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 @pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) +@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) async def test_empty_params_and_query_string( - aiohttp_client: AiohttpClient, params: Any + aiohttp_client: AiohttpClient, params: Any, yarl_supports_extend_query: bool ) -> None: """Test combining empty params with an existing query_string.""" @@ -701,8 +709,12 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - async with client.get("/?q=abc", params=params) as resp: - assert resp.status == 200 + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query + ): + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: From 19049fd97e4d4a5ad326d1d066c2259559868cc3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 02:52:07 +0000 Subject: [PATCH 0523/1511] [PR #8564/b543677e backport][3.10] Use Query typedef from yarl for params (#9080) --- CHANGES/8564.feature.rst | 1 + aiohttp/client.py | 12 ++++++------ aiohttp/client_reqrep.py | 3 ++- aiohttp/typedefs.py | 13 +++++++++++++ 4 files changed, 22 insertions(+), 7 deletions(-) create mode 100644 CHANGES/8564.feature.rst diff --git a/CHANGES/8564.feature.rst b/CHANGES/8564.feature.rst new file mode 100644 index 00000000000..1eac9d12217 --- /dev/null +++ b/CHANGES/8564.feature.rst @@ -0,0 +1 @@ +Improved type on ``params`` to match the underlying type allowed by ``yarl`` -- by :user:`lpetre`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 8edd14d01ff..2814edc31ee 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -97,7 +97,7 @@ from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse from .streams import FlowControlDataQueue from .tracing import Trace, TraceConfig -from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL __all__ = ( # client_exceptions @@ -156,7 +156,7 @@ class _RequestOptions(TypedDict, total=False): - params: Union[Mapping[str, Union[str, int]], str, None] + params: Query data: Any json: Any cookies: Union[LooseCookies, None] @@ -450,7 +450,7 @@ async def _request( method: str, str_or_url: StrOrURL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, data: Any = None, json: Any = None, cookies: Optional[LooseCookies] = None, @@ -827,7 +827,7 @@ def ws_connect( heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, @@ -879,7 +879,7 @@ async def _ws_connect( heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, @@ -1421,7 +1421,7 @@ def request( method: str, url: StrOrURL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, data: Any = None, json: Any = None, headers: Optional[LooseHeaders] = None, diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 60d70724b3d..2df43d112cd 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -67,6 +67,7 @@ JSONDecoder, LooseCookies, LooseHeaders, + Query, RawHeaders, ) @@ -264,7 +265,7 @@ def __init__( method: str, url: URL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Optional[Iterable[str]] = None, data: Any = None, diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 9fb21c15f83..2e285fa2561 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -8,6 +8,7 @@ Iterable, Mapping, Protocol, + Sequence, Tuple, Union, ) @@ -15,6 +16,18 @@ from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr from yarl import URL +try: + # Available in yarl>=1.10.0 + from yarl import Query as _Query +except ImportError: # pragma: no cover + SimpleQuery = Union[str, int, float] # pragma: no cover + QueryVariable = Union[SimpleQuery, "Sequence[SimpleQuery]"] # pragma: no cover + _Query = Union[ # type: ignore[misc] # pragma: no cover + None, str, "Mapping[str, QueryVariable]", "Sequence[Tuple[str, QueryVariable]]" + ] + +Query = _Query + DEFAULT_JSON_ENCODER = json.dumps DEFAULT_JSON_DECODER = json.loads From 3bd4baa53a8b171823f13873d3b2b9207164c40c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 03:00:36 +0000 Subject: [PATCH 0524/1511] [PR #8564/b543677e backport][3.11] Use Query typedef from yarl for params (#9081) --- CHANGES/8564.feature.rst | 1 + aiohttp/client.py | 12 ++++++------ aiohttp/client_reqrep.py | 3 ++- aiohttp/typedefs.py | 13 +++++++++++++ 4 files changed, 22 insertions(+), 7 deletions(-) create mode 100644 CHANGES/8564.feature.rst diff --git a/CHANGES/8564.feature.rst b/CHANGES/8564.feature.rst new file mode 100644 index 00000000000..1eac9d12217 --- /dev/null +++ b/CHANGES/8564.feature.rst @@ -0,0 +1 @@ +Improved type on ``params`` to match the underlying type allowed by ``yarl`` -- by :user:`lpetre`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 3c4a0f97c04..1e5c1448ce5 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -101,7 +101,7 @@ from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse from .streams import FlowControlDataQueue from .tracing import Trace, TraceConfig -from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL __all__ = ( # client_exceptions @@ -161,7 +161,7 @@ class _RequestOptions(TypedDict, total=False): - params: Union[Mapping[str, Union[str, int]], str, None] + params: Query data: Any json: Any cookies: Union[LooseCookies, None] @@ -455,7 +455,7 @@ async def _request( method: str, str_or_url: StrOrURL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, data: Any = None, json: Any = None, cookies: Optional[LooseCookies] = None, @@ -835,7 +835,7 @@ def ws_connect( heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, @@ -887,7 +887,7 @@ async def _ws_connect( heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, @@ -1452,7 +1452,7 @@ def request( method: str, url: StrOrURL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, data: Any = None, json: Any = None, headers: Optional[LooseHeaders] = None, diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e0232a40c4c..79073cb895b 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -67,6 +67,7 @@ JSONDecoder, LooseCookies, LooseHeaders, + Query, RawHeaders, ) @@ -262,7 +263,7 @@ def __init__( method: str, url: URL, *, - params: Optional[Mapping[str, str]] = None, + params: Query = None, headers: Optional[LooseHeaders] = None, skip_auto_headers: Optional[Iterable[str]] = None, data: Any = None, diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 9fb21c15f83..2e285fa2561 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -8,6 +8,7 @@ Iterable, Mapping, Protocol, + Sequence, Tuple, Union, ) @@ -15,6 +16,18 @@ from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr from yarl import URL +try: + # Available in yarl>=1.10.0 + from yarl import Query as _Query +except ImportError: # pragma: no cover + SimpleQuery = Union[str, int, float] # pragma: no cover + QueryVariable = Union[SimpleQuery, "Sequence[SimpleQuery]"] # pragma: no cover + _Query = Union[ # type: ignore[misc] # pragma: no cover + None, str, "Mapping[str, QueryVariable]", "Sequence[Tuple[str, QueryVariable]]" + ] + +Query = _Query + DEFAULT_JSON_ENCODER = json.dumps DEFAULT_JSON_DECODER = json.loads From 424af54dd3aecb0f41a262fdd968e397b5167172 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 22:20:48 -0500 Subject: [PATCH 0525/1511] [PR #9079/7404afc backport][3.11] Bump yarl requirement to >=1.11.0 (#9082) --- CHANGES/9079.misc.rst | 1 + requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9079.misc.rst diff --git a/CHANGES/9079.misc.rst b/CHANGES/9079.misc.rst new file mode 100644 index 00000000000..db20492c9f8 --- /dev/null +++ b/CHANGES/9079.misc.rst @@ -0,0 +1 @@ +Increase minimum yarl version to 1.11.0 -- by :user:`bdraco`. diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 2299584a463..1b440bc7c68 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -9,4 +9,4 @@ Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 -yarl >= 1.0, < 2.0 +yarl >= 1.11.0, < 2.0 diff --git a/setup.cfg b/setup.cfg index cd1602880e6..c5258115f11 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,7 @@ install_requires = attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 - yarl >= 1.0, < 2.0 + yarl >= 1.11.0, < 2.0 [options.exclude_package_data] * = From 02b89114b26bf2f2665d5135884ea1bc01f64968 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 8 Sep 2024 23:08:27 -0500 Subject: [PATCH 0526/1511] Remove unused backwards compatibility code for old yarl versions (#9083) (#9085) --- aiohttp/client_reqrep.py | 11 ++--------- aiohttp/typedefs.py | 13 +------------ tests/test_client_functional.py | 26 +++++++------------------- 3 files changed, 10 insertions(+), 40 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 79073cb895b..75bcd3ecf5e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL, __version__ as yarl_version +from yarl import URL from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -89,7 +89,6 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -_YARL_SUPPORTS_EXTEND_QUERY = tuple(map(int, yarl_version.split(".")[:2])) >= (1, 11) json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") @@ -301,13 +300,7 @@ def __init__( # assert session is not None self._session = cast("ClientSession", session) if params: - if _YARL_SUPPORTS_EXTEND_QUERY: - url = url.extend_query(params) - else: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) + url = url.extend_query(params) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 2e285fa2561..cc8c0825b4e 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -8,23 +8,12 @@ Iterable, Mapping, Protocol, - Sequence, Tuple, Union, ) from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr -from yarl import URL - -try: - # Available in yarl>=1.10.0 - from yarl import Query as _Query -except ImportError: # pragma: no cover - SimpleQuery = Union[str, int, float] # pragma: no cover - QueryVariable = Union[SimpleQuery, "Sequence[SimpleQuery]"] # pragma: no cover - _Query = Union[ # type: ignore[misc] # pragma: no cover - None, str, "Mapping[str, QueryVariable]", "Sequence[Tuple[str, QueryVariable]]" - ] +from yarl import URL, Query as _Query Query = _Query diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 4edbfa2cfeb..a350171dacf 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -20,7 +20,7 @@ from yarl import URL import aiohttp -from aiohttp import Fingerprint, ServerFingerprintMismatch, client_reqrep, hdrs, web +from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( InvalidURL, @@ -672,10 +672,7 @@ async def handler(request): assert 200 == resp.status -@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) -async def test_params_and_query_string( - aiohttp_client: AiohttpClient, yarl_supports_extend_query: bool -) -> None: +async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: """Test combining params with an existing query_string.""" async def handler(request: web.Request) -> web.Response: @@ -686,18 +683,13 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - # Ensure the old path is tested for old yarl versions - with mock.patch.object( - client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query - ): - async with client.get("/?q=abc", params="q=test&d=dog") as resp: - assert resp.status == 200 + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 @pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) -@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) async def test_empty_params_and_query_string( - aiohttp_client: AiohttpClient, params: Any, yarl_supports_extend_query: bool + aiohttp_client: AiohttpClient, params: Any ) -> None: """Test combining empty params with an existing query_string.""" @@ -709,12 +701,8 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - # Ensure the old path is tested for old yarl versions - with mock.patch.object( - client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query - ): - async with client.get("/?q=abc", params=params) as resp: - assert resp.status == 200 + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: From 20f6858b10e27d7e049c993239c0487819833dbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 11:23:57 +0000 Subject: [PATCH 0527/1511] Bump build from 1.2.1 to 1.2.2 (#9091) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [build](https://github.com/pypa/build) from 1.2.1 to 1.2.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/build/releases">build's releases</a>.</em></p> <blockquote> <h2>Version 1.2.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Add editable to <code>builder.get_requries_for_build</code>'s static types (PR <a href="https://redirect.github.com/pypa/build/issues/764">#764</a>, fixes issue <a href="https://redirect.github.com/pypa/build/issues/763">#763</a>)</li> <li>Include artifact attestations in our release (PR <a href="https://redirect.github.com/pypa/build/issues/782">#782</a>)</li> <li>Fix typing compatibility with typed <code>pyproject-hooks</code> (PR <a href="https://redirect.github.com/pypa/build/issues/788">#788</a>)</li> <li>Mark more tests with <code>network</code> (PR <a href="https://redirect.github.com/pypa/build/issues/808">#808</a>)</li> <li>Add more intersphinx links to docs (PR <a href="https://redirect.github.com/pypa/build/issues/804">#804</a>)</li> <li>Make <code>uv</code> optional for tests (PR <a href="https://redirect.github.com/pypa/build/issues/807">#807</a> and <a href="https://redirect.github.com/pypa/build/issues/813">#813</a>)</li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/carlwgeorge"><code>@​carlwgeorge</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/build/pull/808">pypa/build#808</a></li> <li><a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/build/pull/804">pypa/build#804</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/build/compare/1.2.1...1.2.2">https://github.com/pypa/build/compare/1.2.1...1.2.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/build/blob/main/CHANGELOG.rst">build's changelog</a>.</em></p> <blockquote> <h1>1.2.2 (2024-09-06)</h1> <ul> <li>Add editable to <code>builder.get_requries_for_build</code>'s static types (PR :pr:<code>764</code>, fixes issue :issue:<code>763</code>)</li> <li>Include artifact attestations in our release (PR :pr:<code>782</code>)</li> <li>Fix typing compatibility with typed <code>pyproject-hooks</code> (PR :pr:<code>788</code>)</li> <li>Mark more tests with <code>network</code> (PR :pr:<code>808</code>)</li> <li>Add more intersphinx links to docs (PR :pr:<code>804</code>)</li> <li>Make <code>uv</code> optional for tests (PR :pr:<code>807</code> and :pr:<code>813</code>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/build/commit/3b0b5d07077473f5da3f038cf7b74cd2b65d2a98"><code>3b0b5d0</code></a> docs: changelog for 1.2.2 (<a href="https://redirect.github.com/pypa/build/issues/812">#812</a>)</li> <li><a href="https://github.com/pypa/build/commit/b44a886bacb2cbb886ce8be92c84235ce556916c"><code>b44a886</code></a> docs: more info in README</li> <li><a href="https://github.com/pypa/build/commit/8e19948e0d39103b3ca6c967c2d98a8a15f09357"><code>8e19948</code></a> build(deps): bump actions/attest-build-provenance in the actions group (<a href="https://redirect.github.com/pypa/build/issues/814">#814</a>)</li> <li><a href="https://github.com/pypa/build/commit/b90956ce785dd86e1815ba3e2ddc6f71af27c5aa"><code>b90956c</code></a> tests: add module case to uv detection (<a href="https://redirect.github.com/pypa/build/issues/813">#813</a>)</li> <li><a href="https://github.com/pypa/build/commit/e79f1b38c241965c87aff1a21d5073775d67b224"><code>e79f1b3</code></a> ci: remove bot comments from generated release notes (<a href="https://redirect.github.com/pypa/build/issues/810">#810</a>)</li> <li><a href="https://github.com/pypa/build/commit/f6da25ac20a29165a26a25707d056ff928c9332f"><code>f6da25a</code></a> pre-commit: bump repositories (<a href="https://redirect.github.com/pypa/build/issues/801">#801</a>)</li> <li><a href="https://github.com/pypa/build/commit/9a52c5010d7c1a853aa7709ab32775a5159f436f"><code>9a52c50</code></a> tests: optional uv (<a href="https://redirect.github.com/pypa/build/issues/807">#807</a>)</li> <li><a href="https://github.com/pypa/build/commit/553b700ebe00cb0f72be3251cb866e52ca78f4ca"><code>553b700</code></a> docs: Add a few intersphinx links to the Python Packaging User Guide (<a href="https://redirect.github.com/pypa/build/issues/804">#804</a>)</li> <li><a href="https://github.com/pypa/build/commit/336efcb2b7d488ac4d656a9925d167f415715a9b"><code>336efcb</code></a> build(deps): bump actions/attest-build-provenance in the actions group (<a href="https://redirect.github.com/pypa/build/issues/802">#802</a>)</li> <li><a href="https://github.com/pypa/build/commit/73b721346a37fabafdffc86a0d396b7a255fbde3"><code>73b7213</code></a> tests: mark more network tests (<a href="https://redirect.github.com/pypa/build/issues/808">#808</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/build/compare/1.2.1...1.2.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=build&package-manager=pip&previous-version=1.2.1&new-version=1.2.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2b04b656eb6..d3e54ce459f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -32,7 +32,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.2.1 +build==1.2.2 # via pip-tools certifi==2024.8.30 # via requests diff --git a/requirements/dev.txt b/requirements/dev.txt index a8e08879fd3..cb037d59243 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -32,7 +32,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.2.1 +build==1.2.2 # via pip-tools certifi==2024.8.30 # via requests From 9498f6a597c0a94f21825fb8dac6c051381dc42f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 11:30:22 +0000 Subject: [PATCH 0528/1511] Bump virtualenv from 20.26.3 to 20.26.4 (#9092) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.26.3 to 20.26.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.26.4</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.26.3 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2742">pypa/virtualenv#2742</a></li> <li>Fix whitespace around backticks in changelog by <a href="https://github.com/edmorley"><code>@​edmorley</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2751">pypa/virtualenv#2751</a></li> <li>Test latest Python 3.13 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2752">pypa/virtualenv#2752</a></li> <li>Fix typo in Nushell activation script by <a href="https://github.com/edmorley"><code>@​edmorley</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2754">pypa/virtualenv#2754</a></li> <li>GitHub Actions: Replace deprecated macos-12 with macos-13 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2756">pypa/virtualenv#2756</a></li> <li>Fix <a href="https://redirect.github.com/pypa/virtualenv/issues/2728">#2728</a>: Activating venv create unwanted console output by <a href="https://github.com/ShootGan"><code>@​ShootGan</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2748">pypa/virtualenv#2748</a></li> <li>Upgrade bundled wheels by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2760">pypa/virtualenv#2760</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/ShootGan"><code>@​ShootGan</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2748">pypa/virtualenv#2748</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.26.3...20.26.4">https://github.com/pypa/virtualenv/compare/20.26.3...20.26.4</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.26.4 (2024-09-07)</h2> <p>Bugfixes - 20.26.4</p> <pre><code>- no longer create `()` output in console during activation of a virtualenv by .bat file. (:issue:`2728`) - Upgrade embedded wheels: <ul> <li>wheel to <code>0.44.0</code> from <code>0.43.0</code></li> <li>pip to <code>24.2</code> from <code>24.1</code></li> <li>setuptools to <code>74.1.2</code> from <code>70.1.0</code> (:issue:<code>2760</code>) </code></pre></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/dc9416acbb822a797e2b6b12efd5072160899540"><code>dc9416a</code></a> release 20.26.4</li> <li><a href="https://github.com/pypa/virtualenv/commit/d8f1b147f2fed19cffa53ac91ec1d5bd468c5bdb"><code>d8f1b14</code></a> Upgrade bundled wheels (<a href="https://redirect.github.com/pypa/virtualenv/issues/2760">#2760</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/374660b653c71a2ff81b4e7cbf3d938ac95f1f07"><code>374660b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2753">#2753</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/61e774fcd38cde063c4804c020d77267c3e8e11e"><code>61e774f</code></a> Fix <a href="https://redirect.github.com/pypa/virtualenv/issues/2728">#2728</a>: Activating venv create unwanted console output (<a href="https://redirect.github.com/pypa/virtualenv/issues/2748">#2748</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/38e02c8447421dd881cf55a00516ab9dcd82f695"><code>38e02c8</code></a> Bump pypa/gh-action-pypi-publish from 1.9.0 to 1.10.1 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2757">#2757</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/77df0a0b0c54972ea4f41642771de2b6d1815dce"><code>77df0a0</code></a> GitHub Actions: Replace deprecated macos-12 with macos-13 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2756">#2756</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/265011a187100fec999420cd9ddcda75d35bfbff"><code>265011a</code></a> Fix typo in Nushell activation script (<a href="https://redirect.github.com/pypa/virtualenv/issues/2754">#2754</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/95c5eed96c225faff01ac2f108368e0c4303989b"><code>95c5eed</code></a> Test latest Python 3.13 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2752">#2752</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/805365be6d73cc9e18ffda62092938c6f6ea0387"><code>805365b</code></a> Fix whitespace around backticks in changelog (<a href="https://redirect.github.com/pypa/virtualenv/issues/2751">#2751</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/b6c052bdac73138251dc46abda6154f3a0bf8a14"><code>b6c052b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2743">#2743</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/virtualenv/compare/20.26.3...20.26.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.26.3&new-version=20.26.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d3e54ce459f..97effef1d9a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -278,7 +278,7 @@ uvloop==0.20.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.26.3 +virtualenv==20.26.4 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index cb037d59243..b55d4e7da02 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -270,7 +270,7 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.26.3 +virtualenv==20.26.4 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 374f5762d44..2b85f545b72 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,5 +119,5 @@ urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in -virtualenv==20.26.3 +virtualenv==20.26.4 # via pre-commit From 5d81fa8dc28c01d5f074761e4df04a77b8b08b1c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 11:46:59 +0000 Subject: [PATCH 0529/1511] Bump platformdirs from 4.2.2 to 4.3.2 (#9094) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/platformdirs/platformdirs) from 4.2.2 to 4.3.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/platformdirs/platformdirs/releases">platformdirs's releases</a>.</em></p> <blockquote> <h2>4.3.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Use uv as installer by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/300">tox-dev/platformdirs#300</a></li> <li>Fix multi-path returned from <code>_path</code> methods on MacOS by <a href="https://github.com/matthewhughes934"><code>@​matthewhughes934</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/299">tox-dev/platformdirs#299</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/matthewhughes934"><code>@​matthewhughes934</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/299">tox-dev/platformdirs#299</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.1...4.3.2">https://github.com/tox-dev/platformdirs/compare/4.3.1...4.3.2</a></p> <h2>4.3.1</h2> <!-- raw HTML omitted --> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.0...4.3.1">https://github.com/tox-dev/platformdirs/compare/4.3.0...4.3.1</a></p> <h2>4.3.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Speed up Hatch installation by <a href="https://github.com/ofek"><code>@​ofek</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/282">tox-dev/platformdirs#282</a></li> <li>Test with Python 3.13 by <a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/289">tox-dev/platformdirs#289</a></li> <li>Test with latest PyPy by <a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/290">tox-dev/platformdirs#290</a></li> <li>Use <code>include-hidden-files: true</code> to upload coverage artifacts by <a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/298">tox-dev/platformdirs#298</a></li> <li>Ensure PlatformDirs is valid superclass type for mypy AND not an abstract class for other checkers by <a href="https://github.com/Avasam"><code>@​Avasam</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/295">tox-dev/platformdirs#295</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/289">tox-dev/platformdirs#289</a></li> <li><a href="https://github.com/Avasam"><code>@​Avasam</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/295">tox-dev/platformdirs#295</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.2.2...4.3.0">https://github.com/tox-dev/platformdirs/compare/4.2.2...4.3.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/platformdirs/commit/c596271d168161caf14f24abd951e0e0d96f24f5"><code>c596271</code></a> Fix multi-path returned from <code>_path</code> methods on MacOS (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/299">#299</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/a420284fdeb471e8914ea3602902642ba3ddbfb7"><code>a420284</code></a> Use uv as installer (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/300">#300</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/49a89efaa912351008dc8f71c17b4ba609e70d79"><code>49a89ef</code></a> Update README.rst</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/48515323e95cd1d6060cff1b58f37cd4329c0d2b"><code>4851532</code></a> Update README.rst</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/330b2722b4e2db35b522a3389ed3e1f5daea35ac"><code>330b272</code></a> Ensure PlatformDirs is valid superclass type for mypy AND not an abstract cla...</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/1ca85924b99f4694dedbbf862875401892f3cfd7"><code>1ca8592</code></a> Bump pypa/gh-action-pypi-publish from 1.9.0 to 1.10.1 (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/297">#297</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/6ac03f5f7ae8d70185afdb2f9d4e6d6b14320604"><code>6ac03f5</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/293">#293</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/9e539d786a0d0038cdd0e9a31cccd2b9ff59accb"><code>9e539d7</code></a> Use <code>include-hidden-files: true</code> to upload coverage artifacts (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/298">#298</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/6a0ff603755b2b5af1f6e272054894867acfaed1"><code>6a0ff60</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/288">#288</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/8f59e91005b0d34aa1023bbfe661b9df3230aa8e"><code>8f59e91</code></a> Test with latest PyPy (<a href="https://redirect.github.com/platformdirs/platformdirs/issues/290">#290</a>)</li> <li>Additional commits viewable in <a href="https://github.com/platformdirs/platformdirs/compare/4.2.2...4.3.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.2.2&new-version=4.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 97effef1d9a..261195122e2 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -140,7 +140,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.2.2 +platformdirs==4.3.2 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/dev.txt b/requirements/dev.txt index b55d4e7da02..0ce3efdbe82 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.2.2 +platformdirs==4.3.2 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/lint.txt b/requirements/lint.txt index 2b85f545b72..6cf64101a64 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -56,7 +56,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.1 # via pytest -platformdirs==4.2.2 +platformdirs==4.3.2 # via virtualenv pluggy==1.5.0 # via pytest From 4b2ac73e01cdec1f07d290a0a716d3de6c0f8eca Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 20:02:07 +0000 Subject: [PATCH 0530/1511] [PR #9095/ffcf9dc4 backport][3.10] Reduce overhead to check if a host is an IP Address (#9096) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9095.misc.rst | 1 + aiohttp/helpers.py | 66 +++++++++++++++++++++++-------------------- tests/test_helpers.py | 28 ++++++++---------- 3 files changed, 49 insertions(+), 46 deletions(-) create mode 100644 CHANGES/9095.misc.rst diff --git a/CHANGES/9095.misc.rst b/CHANGES/9095.misc.rst new file mode 100644 index 00000000000..f4a06cb09d6 --- /dev/null +++ b/CHANGES/9095.misc.rst @@ -0,0 +1 @@ +Improved performance of checking if a host is an IP Address -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 0327d31d961..6abbe74d8cf 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -34,7 +34,6 @@ List, Mapping, Optional, - Pattern, Protocol, Tuple, Type, @@ -471,44 +470,51 @@ def __set__(self, inst: _TSelf[_T], value: _T) -> None: except ImportError: pass -_ipv4_pattern = ( - r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" -) -_ipv6_pattern = ( - r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" - r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" - r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" - r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" - r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" - r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" - r":|:(:[A-F0-9]{1,4}){7})$" -) -_ipv4_regex = re.compile(_ipv4_pattern) -_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) -_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) -_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) +def is_ipv4_address(host: Optional[Union[str, bytes]]) -> bool: + """Check if host looks like an IPv4 address. + + This function does not validate that the format is correct, only that + the host is a str or bytes, and its all numeric. -def _is_ip_address( - regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] -) -> bool: - if host is None: + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: return False + # For a host to be an ipv4 address, it must be all numeric. if isinstance(host, str): - return bool(regex.match(host)) - elif isinstance(host, (bytes, bytearray, memoryview)): - return bool(regexb.match(host)) - else: - raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + return host.replace(".", "").isdigit() + if isinstance(host, (bytes, bytearray, memoryview)): + return host.decode("ascii").replace(".", "").isdigit() + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + +def is_ipv6_address(host: Optional[Union[str, bytes]]) -> bool: + """Check if host looks like an IPv6 address. -is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) -is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + This function does not validate that the format is correct, only that + the host contains a colon and that it is a str or bytes. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: + return False + # The host must contain a colon to be an IPv6 address. + if isinstance(host, str): + return ":" in host + if isinstance(host, (bytes, bytearray, memoryview)): + return b":" in host + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + """Check if host looks like an IP Address. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ return is_ipv4_address(host) or is_ipv6_address(host) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 827a417c299..2d6e098aae5 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -267,14 +267,6 @@ def test_is_ip_address() -> None: assert not helpers.is_ip_address("localhost") assert not helpers.is_ip_address("www.example.com") - # Out of range - assert not helpers.is_ip_address("999.999.999.999") - # Contain a port - assert not helpers.is_ip_address("127.0.0.1:80") - assert not helpers.is_ip_address("[2001:db8:0:1]:80") - # Too many "::" - assert not helpers.is_ip_address("1200::AB00:1234::2552:7777:1313") - def test_is_ip_address_bytes() -> None: assert helpers.is_ip_address(b"127.0.0.1") @@ -285,14 +277,6 @@ def test_is_ip_address_bytes() -> None: assert not helpers.is_ip_address(b"localhost") assert not helpers.is_ip_address(b"www.example.com") - # Out of range - assert not helpers.is_ip_address(b"999.999.999.999") - # Contain a port - assert not helpers.is_ip_address(b"127.0.0.1:80") - assert not helpers.is_ip_address(b"[2001:db8:0:1]:80") - # Too many "::" - assert not helpers.is_ip_address(b"1200::AB00:1234::2552:7777:1313") - def test_ipv4_addresses() -> None: ip_addresses = [ @@ -340,6 +324,18 @@ def test_is_ip_address_invalid_type() -> None: with pytest.raises(TypeError): helpers.is_ip_address(object()) + with pytest.raises(TypeError): + helpers.is_ipv4_address(123) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv4_address(object()) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv6_address(123) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv6_address(object()) # type: ignore[arg-type] + # ----------------------------------- TimeoutHandle ------------------- From 7af8416233d57c40efbc0d8132435e348167cd04 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 20:10:24 +0000 Subject: [PATCH 0531/1511] [PR #9095/ffcf9dc4 backport][3.11] Reduce overhead to check if a host is an IP Address (#9097) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9095.misc.rst | 1 + aiohttp/helpers.py | 66 +++++++++++++++++++++++-------------------- tests/test_helpers.py | 28 ++++++++---------- 3 files changed, 49 insertions(+), 46 deletions(-) create mode 100644 CHANGES/9095.misc.rst diff --git a/CHANGES/9095.misc.rst b/CHANGES/9095.misc.rst new file mode 100644 index 00000000000..f4a06cb09d6 --- /dev/null +++ b/CHANGES/9095.misc.rst @@ -0,0 +1 @@ +Improved performance of checking if a host is an IP Address -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index bf9e135bb3c..88fc7412ea8 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -34,7 +34,6 @@ List, Mapping, Optional, - Pattern, Protocol, Tuple, Type, @@ -469,44 +468,51 @@ def __set__(self, inst: _TSelf[_T], value: _T) -> None: except ImportError: pass -_ipv4_pattern = ( - r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" -) -_ipv6_pattern = ( - r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" - r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" - r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" - r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" - r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" - r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" - r":|:(:[A-F0-9]{1,4}){7})$" -) -_ipv4_regex = re.compile(_ipv4_pattern) -_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) -_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) -_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) +def is_ipv4_address(host: Optional[Union[str, bytes]]) -> bool: + """Check if host looks like an IPv4 address. + + This function does not validate that the format is correct, only that + the host is a str or bytes, and its all numeric. -def _is_ip_address( - regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] -) -> bool: - if host is None: + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: return False + # For a host to be an ipv4 address, it must be all numeric. if isinstance(host, str): - return bool(regex.match(host)) - elif isinstance(host, (bytes, bytearray, memoryview)): - return bool(regexb.match(host)) - else: - raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + return host.replace(".", "").isdigit() + if isinstance(host, (bytes, bytearray, memoryview)): + return host.decode("ascii").replace(".", "").isdigit() + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + +def is_ipv6_address(host: Optional[Union[str, bytes]]) -> bool: + """Check if host looks like an IPv6 address. -is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) -is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + This function does not validate that the format is correct, only that + the host contains a colon and that it is a str or bytes. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: + return False + # The host must contain a colon to be an IPv6 address. + if isinstance(host, str): + return ":" in host + if isinstance(host, (bytes, bytearray, memoryview)): + return b":" in host + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + """Check if host looks like an IP Address. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ return is_ipv4_address(host) or is_ipv6_address(host) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 656364f43aa..13d73a312fc 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -267,14 +267,6 @@ def test_is_ip_address() -> None: assert not helpers.is_ip_address("localhost") assert not helpers.is_ip_address("www.example.com") - # Out of range - assert not helpers.is_ip_address("999.999.999.999") - # Contain a port - assert not helpers.is_ip_address("127.0.0.1:80") - assert not helpers.is_ip_address("[2001:db8:0:1]:80") - # Too many "::" - assert not helpers.is_ip_address("1200::AB00:1234::2552:7777:1313") - def test_is_ip_address_bytes() -> None: assert helpers.is_ip_address(b"127.0.0.1") @@ -285,14 +277,6 @@ def test_is_ip_address_bytes() -> None: assert not helpers.is_ip_address(b"localhost") assert not helpers.is_ip_address(b"www.example.com") - # Out of range - assert not helpers.is_ip_address(b"999.999.999.999") - # Contain a port - assert not helpers.is_ip_address(b"127.0.0.1:80") - assert not helpers.is_ip_address(b"[2001:db8:0:1]:80") - # Too many "::" - assert not helpers.is_ip_address(b"1200::AB00:1234::2552:7777:1313") - def test_ipv4_addresses() -> None: ip_addresses = [ @@ -341,6 +325,18 @@ def test_is_ip_address_invalid_type() -> None: with pytest.raises(TypeError): helpers.is_ip_address(object()) + with pytest.raises(TypeError): + helpers.is_ipv4_address(123) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv4_address(object()) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv6_address(123) # type: ignore[arg-type] + + with pytest.raises(TypeError): + helpers.is_ipv6_address(object()) # type: ignore[arg-type] + # ----------------------------------- TimeoutHandle ------------------- From c835d747b5f762d3af00335f9fa09ba716f97122 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 21:23:25 +0000 Subject: [PATCH 0532/1511] Bump filelock from 3.15.4 to 3.16.0 (#9093) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.15.4 to 3.16.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/py-filelock/releases">filelock's releases</a>.</em></p> <blockquote> <h2>3.16.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Test Python 3.13 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/352">tox-dev/filelock#352</a></li> <li>Add 3.13 to CI by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/359">tox-dev/filelock#359</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.15.4...3.16.0">https://github.com/tox-dev/filelock/compare/3.15.4...3.16.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/filelock/commit/0acea4f63dc11522441b89a6a699253818c0269d"><code>0acea4f</code></a> Add 3.13 to CI (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/359">#359</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/4dc2a8f5f3678f8b0fc9939b9fc09e1ca733e382"><code>4dc2a8f</code></a> Bump pypa/gh-action-pypi-publish from 1.9.0 to 1.10.1 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/356">#356</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/7397dd7a17678354b3a732faec98d1b50c71c0be"><code>7397dd7</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/354">#354</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/067b65169525f3358808b8dad0afd728c7750ea8"><code>067b651</code></a> Bump actions/download-artifact from 3 to 4.1.7 in /.github/workflows (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/357">#357</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/85760d7531cfaa6fe9382d950aadf2ebd21364a0"><code>85760d7</code></a> Test Python 3.13 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/352">#352</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/4104f3e0e6e809d3ce7ca3105cbb0f5599d45ddb"><code>4104f3e</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/348">#348</a>)</li> <li>See full diff in <a href="https://github.com/tox-dev/py-filelock/compare/3.15.4...3.16.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.15.4&new-version=3.16.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 261195122e2..d40abf6df52 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -70,7 +70,7 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest -filelock==3.15.4 +filelock==3.16.0 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 0ce3efdbe82..700e3dc1208 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -68,7 +68,7 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest -filelock==3.15.4 +filelock==3.16.0 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 6cf64101a64..d0a5781d1b8 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -32,7 +32,7 @@ distlib==0.3.8 # via virtualenv exceptiongroup==1.2.2 # via pytest -filelock==3.15.4 +filelock==3.16.0 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in From 3d5db00174ac54733b67e93c163c14d11033eded Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 12:39:18 +0000 Subject: [PATCH 0533/1511] Bump pytest from 8.3.2 to 8.3.3 (#9104) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.2 to 8.3.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.3.3</h2> <h1>pytest 8.3.3 (2024-09-09)</h1> <h2>Bug fixes</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12446">#12446</a>: Avoid calling <code>@property</code> (and other instance descriptors) during fixture discovery -- by <code>asottile</code>{.interpreted-text role="user"}</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12659">#12659</a>: Fixed the issue of not displaying assertion failure differences when using the parameter <code>--import-mode=importlib</code> in pytest>=8.1.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12667">#12667</a>: Fixed a regression where type change in [ExceptionInfo.errisinstance]{.title-ref} caused [mypy]{.title-ref} to fail.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12744">#12744</a>: Fixed typing compatibility with Python 3.9 or less -- replaced [typing.Self]{.title-ref} with [typing_extensions.Self]{.title-ref} -- by <code>Avasam</code>{.interpreted-text role="user"}</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12745">#12745</a>: Fixed an issue with backslashes being incorrectly converted in nodeid paths on Windows, ensuring consistent path handling across environments.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/6682">#6682</a>: Fixed bug where the verbosity levels where not being respected when printing the "msg" part of failed assertion (as in <code>assert condition, msg</code>).</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/9422">#9422</a>: Fix bug where disabling the terminal plugin via <code>-p no:terminal</code> would cause crashes related to missing the <code>verbose</code> option.</p> <p>-- by <code>GTowers1</code>{.interpreted-text role="user"}</p> </li> </ul> <h2>Improved documentation</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12663">#12663</a>: Clarify that the [pytest_deselected]{.title-ref} hook should be called from [pytest_collection_modifyitems]{.title-ref} hook implementations when items are deselected.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12678">#12678</a>: Remove erroneous quotes from [tmp_path_retention_policy]{.title-ref} example in docs.</li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12769">#12769</a>: Fix typos discovered by codespell and add codespell to pre-commit hooks.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/d0f136fe64f9374f18a04562305b178fb380d1ec"><code>d0f136f</code></a> build(deps): Bump pypa/gh-action-pypi-publish from 1.10.0 to 1.10.1 (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12790">#12790</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/972f307c7861ae498e705d3d12e003fa4b035ac0"><code>972f307</code></a> Prepare release version 8.3.3</li> <li><a href="https://github.com/pytest-dev/pytest/commit/0dabdcfe4de99147a07bd577804b60818ea25bc4"><code>0dabdcf</code></a> Include co-authors in release announcement (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12795">#12795</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12797">#12797</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/a9910a413a691e1b216e2235a9cbec0921117702"><code>a9910a4</code></a> Do not discover properties when iterating fixtures (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12781">#12781</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12788">#12788</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/0f10b6b0d8138d3539de75cb7b2e33167b6fc882"><code>0f10b6b</code></a> Fix issue with slashes being turned into backslashes on Windows (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12760">#12760</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12">#12</a>...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/300d13d2231db85186729c2091ea33480cb39c1a"><code>300d13d</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12785">#12785</a> from pytest-dev/patchback/backports/8.3.x/57cccf7f4...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/e5d32c73abcf4fa1362b15aaf660074de8f710d4"><code>e5d32c7</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12784">#12784</a> from svenevs/fix/docs-example-parametrize-minor-typo</li> <li><a href="https://github.com/pytest-dev/pytest/commit/bc913d194ec009699194b016ca619d5ae7f22c91"><code>bc913d1</code></a> Streamline checks for verbose option (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12706">#12706</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12778">#12778</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/01cfcc9f2dda817b25511772593012fd93e092d0"><code>01cfcc9</code></a> Fix typos and introduce codespell pre-commit hook (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12769">#12769</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12774">#12774</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/4873394d53635ef62d1915d23972ed4281a784eb"><code>4873394</code></a> doc: Remove past training (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12772">#12772</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12773">#12773</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.3.2...8.3.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.3.2&new-version=8.3.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d40abf6df52..ecb06752a2e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -170,7 +170,7 @@ pyproject-hooks==1.1.0 # via # build # pip-tools -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 700e3dc1208..e67753f3597 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -165,7 +165,7 @@ pyproject-hooks==1.1.0 # via # build # pip-tools -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index d0a5781d1b8..c75880dc0d8 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -72,7 +72,7 @@ pydantic-core==2.23.2 # via pydantic pygments==2.18.0 # via rich -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/lint.in # pytest-mock diff --git a/requirements/test.txt b/requirements/test.txt index a065d607643..8db0bb18584 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -83,7 +83,7 @@ pydantic-core==2.23.2 # via pydantic pygments==2.18.0 # via rich -pytest==8.3.2 +pytest==8.3.3 # via # -r requirements/test.in # pytest-cov From 277813ca5a7d370ecc1240e46e606d7bea400aa3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 12:46:19 +0000 Subject: [PATCH 0534/1511] Bump yarl from 1.11.0 to 1.11.1 (#9105) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.11.0 to 1.11.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.11.1</h2> <h2>Bug fixes</h2> <ul> <li> <p>Allowed scheme replacement for relative URLs if the scheme does not require a host -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/280">#280</a>, <a href="https://redirect.github.com/aio-libs/yarl/issues/1138">#1138</a>.</p> </li> <li> <p>Allowed empty host for URL schemes other than the special schemes listed in the WHATWG URL spec -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1136">#1136</a>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Loosened restriction on integers as query string values to allow classes that implement <code>__int__</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1139">#1139</a>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of normalizing paths -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1137">#1137</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.11.1</h1> <p><em>(2024-09-09)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Allowed scheme replacement for relative URLs if the scheme does not require a host -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>280</code>, :issue:<code>1138</code>.</p> </li> <li> <p>Allowed empty host for URL schemes other than the special schemes listed in the WHATWG URL spec -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1136</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Loosened restriction on integers as query string values to allow classes that implement <code>__int__</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1139</code>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of normalizing paths -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1137</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/134d4cd82147c29488ca0d798042498d95725814"><code>134d4cd</code></a> Release 1.11.1</li> <li><a href="https://github.com/aio-libs/yarl/commit/fa321e5f79ffa9f587d2d3103130de81bd5845d4"><code>fa321e5</code></a> Cleanup tense of changelog messages (<a href="https://redirect.github.com/aio-libs/yarl/issues/1140">#1140</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/8048180848a2bbfffc3a0493c40663a67ef60af5"><code>8048180</code></a> Allow scheme replacement for relative URLs if the target scheme does not requ...</li> <li><a href="https://github.com/aio-libs/yarl/commit/2340c7264f302c163319595418a236b781367e51"><code>2340c72</code></a> Accept objects that support <strong>int</strong> for query vars (<a href="https://redirect.github.com/aio-libs/yarl/issues/1139">#1139</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/0ee0104ec400e578e53a138498dbad076660645c"><code>0ee0104</code></a> Allow empty hosts for schemes that do not require them (<a href="https://redirect.github.com/aio-libs/yarl/issues/1136">#1136</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/1e969abb9cef055c5d4350d60532ee63775a1708"><code>1e969ab</code></a> Small speed up to normalizing the path (<a href="https://redirect.github.com/aio-libs/yarl/issues/1137">#1137</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/07c4e037c77e6b77fbbe9d0b393072e10a53cb66"><code>07c4e03</code></a> Increment version to 1.11.1.dev0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1135">#1135</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/yarl/compare/v1.11.0...v1.11.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.11.0&new-version=1.11.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 87300dd8515..b8f70307c91 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.11.0 +yarl==1.11.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ecb06752a2e..14e784e5eb6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -286,7 +286,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.11.0 +yarl==1.11.1 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index e67753f3597..bf8d7d71098 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -278,7 +278,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.11.0 +yarl==1.11.1 # via -r requirements/runtime-deps.in zipp==3.20.1 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 89e30717677..988d7b275c9 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -yarl==1.11.0 +yarl==1.11.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 8db0bb18584..96f3f7c3ee5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -136,5 +136,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.11.0 +yarl==1.11.1 # via -r requirements/runtime-deps.in From 47c08987cd54f5bc5b67eccff3b45c64ad7fe8b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 13:01:44 +0000 Subject: [PATCH 0535/1511] Bump pydantic from 2.9.0 to 2.9.1 (#9107) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.9.0 to 2.9.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.9.1 (2024-09-09)</h2> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Fix Predicate issue in v2.9.0 by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10321">#10321</a></li> <li>Fixing <code>annotated-types</code> bound to <code>>=0.6.0</code> by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10327">#10327</a></li> <li>Turn <code>tzdata</code> install requirement into optional <code>timezone</code> dependency by <a href="https://github.com/jakob-keller"><code>@​jakob-keller</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10331">#10331</a></li> <li>Fix <code>IncExc</code> type alias definition by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10339">#10339</a></li> <li>Use correct types namespace when building namedtuple core schemas by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10337">#10337</a></li> <li>Fix evaluation of stringified annotations during namespace inspection by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10347">#10347</a></li> <li>Fix tagged union serialization with alias generators by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1442">pydantic/pydantic-core#1442</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.9.0...v2.9.1">https://github.com/pydantic/pydantic/compare/v2.9.0...v2.9.1</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/ecc5275d01e3d8de15c3641d35eb5151f5778833"><code>ecc5275</code></a> bump</li> <li><a href="https://github.com/pydantic/pydantic/commit/2c61bfda43e67b8308f86c77ae4121f447f134dd"><code>2c61bfd</code></a> Fix evaluation of stringified annotations during namespace inspection (<a href="https://redirect.github.com/pydantic/pydantic/issues/10347">#10347</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/3d364cbf994bc6676b8419b8ad588d4d49ab2f29"><code>3d364cb</code></a> Use correct types namespace when building namedtuple core schemas (<a href="https://redirect.github.com/pydantic/pydantic/issues/10337">#10337</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/2746ccba230b47d279ed5aa4e4831bbdba60ad70"><code>2746ccb</code></a> Fix <code>IncEx</code> type alias definition (<a href="https://redirect.github.com/pydantic/pydantic/issues/10339">#10339</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/b32d4109675316912b99a7f4fc56dcbf2c73840c"><code>b32d410</code></a> Turn <code>tzdata</code> install requirement into optional <code>timezone</code> dependency (<a href="https://redirect.github.com/pydantic/pydantic/issues/10331">#10331</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/7d857eb89c4f3c0389f8e12d83f14c89fab75f37"><code>7d857eb</code></a> Fixing <code>annotated-types</code> bound (<a href="https://redirect.github.com/pydantic/pydantic/issues/10327">#10327</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/07cbe50fa0a7d217d8382f79c43d02201d25a4fe"><code>07cbe50</code></a> Fix <code>Predicate</code> issue in <code>v2.9.0</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/10321">#10321</a>)</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.9.0...v2.9.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.9.0&new-version=2.9.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 14e784e5eb6..24511763e32 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -152,9 +152,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.0 +pydantic==2.9.1 # via python-on-whales -pydantic-core==2.23.2 +pydantic-core==2.23.3 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index bf8d7d71098..7ff9de0c37a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,9 +149,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.0 +pydantic==2.9.1 # via python-on-whales -pydantic-core==2.23.2 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index c75880dc0d8..159b481e30e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -66,9 +66,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.0 +pydantic==2.9.1 # via python-on-whales -pydantic-core==2.23.2 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 96f3f7c3ee5..03e9f77a961 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,9 +77,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.0 +pydantic==2.9.1 # via python-on-whales -pydantic-core==2.23.2 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via rich From bea443a85d407df08fe6ff9798def5f57f7f3968 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 13:08:46 +0000 Subject: [PATCH 0536/1511] Bump multidict from 6.0.5 to 6.1.0 (#9106) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.0.5 to 6.1.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.1.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>Covered the unreachable code path in <code>multidict._multidict_base._abc_itemsview_register()</code> with typing -- by :user:<code>skinnyBat</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/928">#928</a>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Added support for Python 3.13 -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1002">#1002</a>.</p> </li> </ul> <h2>Removals and backward incompatible breaking changes</h2> <ul> <li> <p>Removed Python 3.7 support -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/997">#997</a>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>Added tests to have full code coverage of the <code>multidict._multidict_base._viewbaseset_richcmp()</code> function -- by :user:<code>skinnyBat</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/928">#928</a>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.1.0 (2024-09-09)</h1> <h2>Bug fixes</h2> <ul> <li> <p>Covered the unreachable code path in <code>multidict._multidict_base._abc_itemsview_register()</code> with typing -- by :user:<code>skinnyBat</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>928</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Added support for Python 3.13 -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1002</code>.</p> </li> </ul> <h2>Removals and backward incompatible breaking changes</h2> <ul> <li> <p>Removed Python 3.7 support -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>997</code>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>Added tests to have full code coverage of the <code>multidict._multidict_base._viewbaseset_richcmp()</code> function -- by :user:<code>skinnyBat</code>.</p> <p><em>Related issues and pull requests on GitHub:</em></p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/4140e63780dc6dd600a1837cb9b4c5198c3dcd68"><code>4140e63</code></a> Release 6.1.0</li> <li><a href="https://github.com/aio-libs/multidict/commit/f3876fde179a4e5ada220d9e52208bb97b96d7eb"><code>f3876fd</code></a> Python 3.13 support (<a href="https://redirect.github.com/aio-libs/multidict/issues/1002">#1002</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/62ab55e9a5226895786710d1f22887f2174820d0"><code>62ab55e</code></a> Bump test-summary/action from 2.3 to 2.4 (<a href="https://redirect.github.com/aio-libs/multidict/issues/983">#983</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/039c298c2eb3e7861d422a76f05cdc7b1c031513"><code>039c298</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/multidict/issues/964">#964</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/a27f0574dee506fc420f74311390df2b72f363ef"><code>a27f057</code></a> Bump black from 24.4.0 to 24.8.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1001">#1001</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/ac1025313cd3d1ecaab7f4e54e7c96a36b3e2c3f"><code>ac10253</code></a> Bump pytest-cov from 4.1.0 to 5.0.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/963">#963</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/794c6b2a24ad20084b9cc159cefaeb222a5d2c0b"><code>794c6b2</code></a> Bump pre-commit from 3.7.0 to 3.8.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1000">#1000</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/234d44874d4e5d8bfb1a00cb8e6767827e2f5d01"><code>234d448</code></a> Bump dependabot/fetch-metadata from 2.0.0 to 2.2.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/985">#985</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/056c7de0cda3c91a83c7d5623e1062781e0f11db"><code>056c7de</code></a> Bump sigstore/gh-action-sigstore-python from 2.1.1 to 3.0.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/986">#986</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/f176071bc07fc139ef77f47ed58a1527eb685a55"><code>f176071</code></a> Bump pypa/cibuildwheel from 2.17.0 to 2.20.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/993">#993</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/multidict/compare/v6.0.5...v6.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.0.5&new-version=6.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 4 +++- requirements/constraints.txt | 3 ++- requirements/cython.txt | 4 +++- requirements/dev.txt | 3 ++- requirements/multidict.txt | 4 +++- requirements/runtime-deps.txt | 4 +++- requirements/test.txt | 3 ++- 7 files changed, 18 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index b8f70307c91..d947f437f98 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.0.5 +multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl @@ -36,6 +36,8 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi +typing-extensions==4.12.2 + # via multidict uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.11.1 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 24511763e32..e54f64ccf31 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -115,7 +115,7 @@ markupsafe==2.1.5 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in @@ -264,6 +264,7 @@ typing-extensions==4.12.2 # via # aioredis # annotated-types + # multidict # mypy # pydantic # pydantic-core diff --git a/requirements/cython.txt b/requirements/cython.txt index ae232fdd2ee..f67cc903a0b 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,5 +6,7 @@ # cython==3.0.11 # via -r requirements/cython.in -multidict==6.0.5 +multidict==6.1.0 # via -r requirements/multidict.in +typing-extensions==4.12.2 + # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index 7ff9de0c37a..90fffae5826 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -113,7 +113,7 @@ markupsafe==2.1.5 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl @@ -256,6 +256,7 @@ typing-extensions==4.12.2 # via # aioredis # annotated-types + # multidict # mypy # pydantic # pydantic-core diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 915f9c24dcc..b8b44428920 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,5 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.0.5 +multidict==6.1.0 # via -r requirements/multidict.in +typing-extensions==4.12.2 + # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 988d7b275c9..eea3d44a539 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.4.1 # aiosignal idna==3.4 # via yarl -multidict==6.0.5 +multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl @@ -32,5 +32,7 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi +typing-extensions==4.12.2 + # via multidict yarl==1.11.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 03e9f77a961..56d9a6c9ceb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.0.5 +multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl @@ -124,6 +124,7 @@ typer==0.12.5 typing-extensions==4.12.2 # via # annotated-types + # multidict # mypy # pydantic # pydantic-core From b90dd1e41f77e3296edf8932cc0ec59ab1343463 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 10 Sep 2024 16:08:14 +0100 Subject: [PATCH 0537/1511] Avoid compressing empty body (#9108) (#9110) (cherry picked from commit 1d112418a05dcdcabd38590351e78bec8f4a45bc) --- CHANGES/9108.bugfix.rst | 1 + aiohttp/client.py | 4 +-- aiohttp/client_reqrep.py | 8 +++--- tests/test_client_functional.py | 45 ++++++++++++++++++++++++++++++--- 4 files changed, 50 insertions(+), 8 deletions(-) create mode 100644 CHANGES/9108.bugfix.rst diff --git a/CHANGES/9108.bugfix.rst b/CHANGES/9108.bugfix.rst new file mode 100644 index 00000000000..8be000575e8 --- /dev/null +++ b/CHANGES/9108.bugfix.rst @@ -0,0 +1 @@ +Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 1e5c1448ce5..edf4090832f 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -170,7 +170,7 @@ class _RequestOptions(TypedDict, total=False): auth: Union[BasicAuth, None] allow_redirects: bool max_redirects: int - compress: Union[str, None] + compress: Union[str, bool, None] chunked: Union[bool, None] expect100: bool raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] @@ -464,7 +464,7 @@ async def _request( auth: Optional[BasicAuth] = None, allow_redirects: bool = True, max_redirects: int = 10, - compress: Optional[str] = None, + compress: Union[str, bool, None] = None, chunked: Optional[bool] = None, expect100: bool = False, raise_for_status: Union[ diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 75bcd3ecf5e..7d4467dbdbb 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -269,7 +269,7 @@ def __init__( cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, - compress: Optional[str] = None, + compress: Union[str, bool, None] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, @@ -494,7 +494,9 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" - if data is None: + if not data: + # Don't compress an empty body. + self.compress = None return enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() @@ -705,7 +707,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": ) if self.compress: - writer.enable_compression(self.compress) + writer.enable_compression(self.compress) # type: ignore[arg-type] if self.chunked is not None: writer.enable_chunking() diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index a350171dacf..70c5bf16096 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,7 +12,7 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Type +from typing import Any, AsyncIterator, Optional, Type from unittest import mock import pytest @@ -31,6 +31,9 @@ SocketTimeoutError, TooManyRedirects, ) +from aiohttp.client_reqrep import ClientRequest +from aiohttp.connector import Connection +from aiohttp.http_writer import StreamWriter from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient from aiohttp.test_utils import unused_port @@ -1498,8 +1501,44 @@ async def handler(request): assert 200 == resp.status -async def test_POST_DATA_DEFLATE(aiohttp_client) -> None: - async def handler(request): +@pytest.mark.parametrize("data", (None, b"")) +async def test_GET_DEFLATE( + aiohttp_client: AiohttpClient, data: Optional[bytes] +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.json_response({"ok": True}) + + write_mock = None + original_write_bytes = ClientRequest.write_bytes + + async def write_bytes( + self: ClientRequest, writer: StreamWriter, conn: Connection + ) -> None: + nonlocal write_mock + original_write = writer._write + + with mock.patch.object( + writer, "_write", autospec=True, spec_set=True, side_effect=original_write + ) as write_mock: + await original_write_bytes(self, writer, conn) + + with mock.patch.object(ClientRequest, "write_bytes", write_bytes): + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", data=data, compress=True) as resp: + assert resp.status == 200 + content = await resp.json() + assert content == {"ok": True} + + assert write_mock is not None + # No chunks should have been sent for an empty body. + write_mock.assert_not_called() + + +async def test_POST_DATA_DEFLATE(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: data = await request.post() return web.json_response(dict(data)) From 89951ecf658a49cfe477d1a389336f90a043ff2d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 19:04:02 +0100 Subject: [PATCH 0538/1511] [PR #9110/b90dd1e4 backport][3.10] Avoid compressing empty body (#9108) (#9111) **This is a backport of PR #9110 as merged into 3.11 (b90dd1e41f77e3296edf8932cc0ec59ab1343463).** (cherry picked from commit 1d112418a05dcdcabd38590351e78bec8f4a45bc) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9108.bugfix.rst | 1 + aiohttp/client.py | 4 +-- aiohttp/client_reqrep.py | 8 +++--- tests/test_client_functional.py | 45 ++++++++++++++++++++++++++++++--- 4 files changed, 50 insertions(+), 8 deletions(-) create mode 100644 CHANGES/9108.bugfix.rst diff --git a/CHANGES/9108.bugfix.rst b/CHANGES/9108.bugfix.rst new file mode 100644 index 00000000000..8be000575e8 --- /dev/null +++ b/CHANGES/9108.bugfix.rst @@ -0,0 +1 @@ +Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 2814edc31ee..5f9e95f4706 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -165,7 +165,7 @@ class _RequestOptions(TypedDict, total=False): auth: Union[BasicAuth, None] allow_redirects: bool max_redirects: int - compress: Union[str, None] + compress: Union[str, bool, None] chunked: Union[bool, None] expect100: bool raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] @@ -459,7 +459,7 @@ async def _request( auth: Optional[BasicAuth] = None, allow_redirects: bool = True, max_redirects: int = 10, - compress: Optional[str] = None, + compress: Union[str, bool, None] = None, chunked: Optional[bool] = None, expect100: bool = False, raise_for_status: Union[ diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 2df43d112cd..93e7b59a8a1 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -272,7 +272,7 @@ def __init__( cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, version: http.HttpVersion = http.HttpVersion11, - compress: Optional[str] = None, + compress: Union[str, bool, None] = None, chunked: Optional[bool] = None, expect100: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, @@ -503,7 +503,9 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" - if data is None: + if not data: + # Don't compress an empty body. + self.compress = None return enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() @@ -714,7 +716,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": ) if self.compress: - writer.enable_compression(self.compress) + writer.enable_compression(self.compress) # type: ignore[arg-type] if self.chunked is not None: writer.enable_chunking() diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 9325cc17e48..082db6f3e9a 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,7 +12,7 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Type +from typing import Any, AsyncIterator, Optional, Type from unittest import mock import pytest @@ -31,6 +31,9 @@ SocketTimeoutError, TooManyRedirects, ) +from aiohttp.client_reqrep import ClientRequest +from aiohttp.connector import Connection +from aiohttp.http_writer import StreamWriter from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient from aiohttp.test_utils import unused_port @@ -1510,8 +1513,44 @@ async def handler(request): assert 200 == resp.status -async def test_POST_DATA_DEFLATE(aiohttp_client) -> None: - async def handler(request): +@pytest.mark.parametrize("data", (None, b"")) +async def test_GET_DEFLATE( + aiohttp_client: AiohttpClient, data: Optional[bytes] +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.json_response({"ok": True}) + + write_mock = None + original_write_bytes = ClientRequest.write_bytes + + async def write_bytes( + self: ClientRequest, writer: StreamWriter, conn: Connection + ) -> None: + nonlocal write_mock + original_write = writer._write + + with mock.patch.object( + writer, "_write", autospec=True, spec_set=True, side_effect=original_write + ) as write_mock: + await original_write_bytes(self, writer, conn) + + with mock.patch.object(ClientRequest, "write_bytes", write_bytes): + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", data=data, compress=True) as resp: + assert resp.status == 200 + content = await resp.json() + assert content == {"ok": True} + + assert write_mock is not None + # No chunks should have been sent for an empty body. + write_mock.assert_not_called() + + +async def test_POST_DATA_DEFLATE(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: data = await request.post() return web.json_response(dict(data)) From 398eef29d571b01faffc1c85c6a9918daff83574 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 10 Sep 2024 23:41:27 +0100 Subject: [PATCH 0539/1511] =?UTF-8?q?Add=20`strategy`=20argument=20to=20`S?= =?UTF-8?q?treamResponse.enable=5Fcompression()`=20meth=E2=80=A6=20(#9114)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …od (#6257) (cherry picked from commit b8eca2754de8b30148c7bc2233f019d8045bce36) Co-authored-by: Konstantin Shootkin <1991konstantin@gmail.com> --- CHANGES/6257.feature | 4 ++++ CONTRIBUTORS.txt | 1 + aiohttp/abc.py | 5 ++++- aiohttp/web_response.py | 11 +++++++++-- docs/web_reference.rst | 5 ++++- tests/test_web_response.py | 11 ++++++----- 6 files changed, 28 insertions(+), 9 deletions(-) create mode 100644 CHANGES/6257.feature diff --git a/CHANGES/6257.feature b/CHANGES/6257.feature new file mode 100644 index 00000000000..51fc6bf9bb7 --- /dev/null +++ b/CHANGES/6257.feature @@ -0,0 +1,4 @@ +Added ``strategy`` parameter to :meth:`aiohttp.web.StreamResponse.enable_compression` +The value of this parameter is passed to the :func:`zlib.compressobj` function, allowing people +to use a more sufficient compression algorithm for their data served by :mod:`aiohttp.web` +-- by :user:`shootkin` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e7214dfedd4..cf22583989f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -201,6 +201,7 @@ Kevin Samuel Kimmo Parviainen-Jalanko Kirill Klenov Kirill Malovitsa +Konstantin Shutkin Konstantin Valetov Krzysztof Blazewicz Kyrylo Perevozchikov diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 3fb024048a4..59a7976ec06 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -1,6 +1,7 @@ import asyncio import logging import socket +import zlib from abc import ABC, abstractmethod from collections.abc import Sized from http.cookies import BaseCookie, Morsel @@ -208,7 +209,9 @@ async def drain(self) -> None: """Flush the write buffer.""" @abstractmethod - def enable_compression(self, encoding: str = "deflate") -> None: + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: """Enable HTTP body compression""" @abstractmethod diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 24ea9f5b46b..d4f18271a83 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -6,6 +6,7 @@ import math import time import warnings +import zlib from concurrent.futures import Executor from http import HTTPStatus from http.cookies import SimpleCookie @@ -85,6 +86,7 @@ def __init__( self._keep_alive: Optional[bool] = None self._chunked = False self._compression = False + self._compression_strategy: int = zlib.Z_DEFAULT_STRATEGY self._compression_force: Optional[ContentCoding] = None self._cookies = SimpleCookie() @@ -174,7 +176,9 @@ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) def enable_compression( - self, force: Optional[Union[bool, ContentCoding]] = None + self, + force: Optional[Union[bool, ContentCoding]] = None, + strategy: int = zlib.Z_DEFAULT_STRATEGY, ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. @@ -190,6 +194,7 @@ def enable_compression( self._compression = True self._compression_force = force + self._compression_strategy = strategy @property def headers(self) -> "CIMultiDict[str]": @@ -404,7 +409,9 @@ async def _do_start_compression(self, coding: ContentCoding) -> None: if coding != ContentCoding.identity: assert self._payload_writer is not None self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._payload_writer.enable_compression(coding.value) + self._payload_writer.enable_compression( + coding.value, self._compression_strategy + ) # Compressed payload may have different content length, # remove the header self._headers.popall(hdrs.CONTENT_LENGTH, None) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index bb22cfd6369..39b503de248 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -669,7 +669,7 @@ and :ref:`aiohttp-web-signals` handlers:: .. seealso:: :meth:`enable_compression` - .. method:: enable_compression(force=None) + .. method:: enable_compression(force=None, strategy=zlib.Z_DEFAULT_STRATEGY) Enable compression. @@ -679,6 +679,9 @@ and :ref:`aiohttp-web-signals` handlers:: *Accept-Encoding* is not checked if *force* is set to a :class:`ContentCoding`. + *strategy* accepts a :mod:`zlib` compression strategy. + See :func:`zlib.compressobj` for possible values. + .. seealso:: :attr:`compression` .. attribute:: chunked diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 2e1e332e0a5..b71730868e4 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -3,6 +3,7 @@ import gzip import io import json +import zlib from concurrent.futures import ThreadPoolExecutor from typing import AsyncIterator, Optional from unittest import mock @@ -461,7 +462,7 @@ async def test_compression_default_coding() -> None: msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("deflate") + msg.enable_compression.assert_called_with("deflate", zlib.Z_DEFAULT_STRATEGY) assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) assert msg.filter is not None @@ -476,7 +477,7 @@ async def test_force_compression_deflate() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("deflate") + msg.enable_compression.assert_called_with("deflate", zlib.Z_DEFAULT_STRATEGY) assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) @@ -488,7 +489,7 @@ async def test_force_compression_no_accept_deflate() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("deflate") + msg.enable_compression.assert_called_with("deflate", zlib.Z_DEFAULT_STRATEGY) assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) @@ -502,7 +503,7 @@ async def test_force_compression_gzip() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("gzip") + msg.enable_compression.assert_called_with("gzip", zlib.Z_DEFAULT_STRATEGY) assert "gzip" == resp.headers.get(hdrs.CONTENT_ENCODING) @@ -514,7 +515,7 @@ async def test_force_compression_no_accept_gzip() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("gzip") + msg.enable_compression.assert_called_with("gzip", zlib.Z_DEFAULT_STRATEGY) assert "gzip" == resp.headers.get(hdrs.CONTENT_ENCODING) From 1607be98825c6cebeafb3e19c9473184c85f4ca6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 10:33:50 +0000 Subject: [PATCH 0540/1511] Bump pytz from 2024.1 to 2024.2 (#9115) Bumps [pytz](https://github.com/stub42/pytz) from 2024.1 to 2024.2. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/stub42/pytz/commit/3944f75a65268002562b9fd4c912d8bf566e0a66"><code>3944f75</code></a> Bump version numbers to 2024.2 / 2024b</li> <li><a href="https://github.com/stub42/pytz/commit/640c9bd426a3e62f12e7d5424d936b91dc442d93"><code>640c9bd</code></a> IANA 2024b</li> <li><a href="https://github.com/stub42/pytz/commit/382ca0c9e9e5efc8c56ba8f25513ef9287f5281d"><code>382ca0c</code></a> Squashed 'tz/' changes from 380c07cef..923e54bae</li> <li><a href="https://github.com/stub42/pytz/commit/96a1e880f84bed98b54d64de25f08d7b50639b55"><code>96a1e88</code></a> Stop testing unavailable and EOL Python 3.5</li> <li><a href="https://github.com/stub42/pytz/commit/68186b6da89c1772678d5585df6740349369600f"><code>68186b6</code></a> Add support for Python 3.13</li> <li><a href="https://github.com/stub42/pytz/commit/e994058eb38f4c2dd8798e0de3720ac207c7e6f6"><code>e994058</code></a> Run other jobs if one fails</li> <li><a href="https://github.com/stub42/pytz/commit/2326f9f5a93e46eb58563c9c51e4506959e2f500"><code>2326f9f</code></a> Bump GitHub Actions</li> <li>See full diff in <a href="https://github.com/stub42/pytz/compare/release_2024.1...release_2024.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytz&package-manager=pip&previous-version=2024.1&new-version=2024.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e54f64ccf31..fe76d5b3d07 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,7 @@ python-on-whales==0.73.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2024.1 +pytz==2024.2 # via babel pyyaml==6.0.2 # via pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 90fffae5826..9e6ed748fa6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -183,7 +183,7 @@ python-on-whales==0.73.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2024.1 +pytz==2024.2 # via babel pyyaml==6.0.2 # via pre-commit diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index a54c0f9224e..7abb4b04e16 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -48,7 +48,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.18.0 # via sphinx -pytz==2024.1 +pytz==2024.2 # via babel requests==2.32.3 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index fd36d67bc1a..324b5b87c93 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -46,7 +46,7 @@ pillow==9.5.0 # blockdiag pygments==2.18.0 # via sphinx -pytz==2024.1 +pytz==2024.2 # via babel requests==2.32.3 # via sphinx From 68980affc69a9f9b8ca3bf9c5e8d07bdb1494bb1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 10:49:05 +0000 Subject: [PATCH 0541/1511] Bump rich from 13.8.0 to 13.8.1 (#9116) Bumps [rich](https://github.com/Textualize/rich) from 13.8.0 to 13.8.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>The Python 3.13 release</h2> <h2>[13.8.1] - 2024-09-10</h2> <h3>Fixed</h3> <ul> <li>Added support for Python 3.13 <a href="https://redirect.github.com/Textualize/rich/pull/3481">Textualize/rich#3481</a></li> <li>Fixed infinite loop when appending Text to same instance <a href="https://redirect.github.com/Textualize/rich/pull/3480">Textualize/rich#3480</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[13.8.1] - 2024-09-10</h2> <h3>Fixed</h3> <ul> <li>Added support for Python 3.13 <a href="https://redirect.github.com/Textualize/rich/pull/3481">Textualize/rich#3481</a></li> <li>Fixed infinite loop when appending Text to same instance <a href="https://redirect.github.com/Textualize/rich/pull/3480">Textualize/rich#3480</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/260b35fd99c6b6ee9e28be8e3789e74bc3dd0cef"><code>260b35f</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3485">#3485</a> from Textualize/bump1381</li> <li><a href="https://github.com/Textualize/rich/commit/574038d7310c38ef2600a915be5b616f4ab596ca"><code>574038d</code></a> bump</li> <li><a href="https://github.com/Textualize/rich/commit/1f131d13b81e928d71d346461e54941ad5f3141e"><code>1f131d1</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3481">#3481</a> from hugovk/add-3.13</li> <li><a href="https://github.com/Textualize/rich/commit/c8abbb3bd2632655d4dce61e7a8bffc49d981cc8"><code>c8abbb3</code></a> Fix test for Python 3.13</li> <li><a href="https://github.com/Textualize/rich/commit/815596ef0cb1f257a0fa4471b4bc1d4a6677e13c"><code>815596e</code></a> Add support for Python 3.13</li> <li><a href="https://github.com/Textualize/rich/commit/22c2cffd8e88181ad1162ca9098d190ec28c6996"><code>22c2cff</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3480">#3480</a> from Textualize/fix-infinite-append</li> <li><a href="https://github.com/Textualize/rich/commit/f44e8bd743aaea1301899c8b0ca1e6c6dd456c70"><code>f44e8bd</code></a> changelog</li> <li><a href="https://github.com/Textualize/rich/commit/f2ee29531bd01354a5acf26f7c98dfadf70e6ab1"><code>f2ee295</code></a> fix infinite loop in append</li> <li>See full diff in <a href="https://github.com/Textualize/rich/compare/v13.8.0...v13.8.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.8.0&new-version=13.8.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fe76d5b3d07..2d6e2171c3b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -201,7 +201,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.8.0 +rich==13.8.1 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 9e6ed748fa6..1995cf3bbe0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -196,7 +196,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.8.0 +rich==13.8.1 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 159b481e30e..43925da0796 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -86,7 +86,7 @@ pyyaml==6.0.2 # via pre-commit requests==2.32.3 # via python-on-whales -rich==13.8.0 +rich==13.8.1 # via typer shellingham==1.5.4 # via typer diff --git a/requirements/test.txt b/requirements/test.txt index 56d9a6c9ceb..7114023eaaf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -102,7 +102,7 @@ regex==2024.7.24 # via re-assert requests==2.32.3 # via python-on-whales -rich==13.8.0 +rich==13.8.1 # via typer setuptools-git==1.2 # via -r requirements/test.in From 01e89211657d95c5ff53b2fe6d3c93b184e3d5fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 10:56:06 +0000 Subject: [PATCH 0542/1511] Bump importlib-resources from 6.4.4 to 6.4.5 (#9117) Bumps [importlib-resources](https://github.com/python/importlib_resources) from 6.4.4 to 6.4.5. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/importlib_resources/blob/main/NEWS.rst">importlib-resources's changelog</a>.</em></p> <blockquote> <h1>v6.4.5</h1> <h2>Bugfixes</h2> <ul> <li>Omit sentinel values from a namespace path. (<a href="https://redirect.github.com/python/importlib_resources/issues/311">#311</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/importlib_resources/commit/284148b005b57031a354402c446473f53cab2c49"><code>284148b</code></a> Finalize</li> <li><a href="https://github.com/python/importlib_resources/commit/63a7bcba42e6222971edd91f49e7efffe1972f35"><code>63a7bcb</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_resources/issues/315">#315</a> from python/bugfix/311-non-path-namespace-paths</li> <li><a href="https://github.com/python/importlib_resources/commit/2c145c5b1ff95290794b2cb63e5c924e1847456d"><code>2c145c5</code></a> Omit sentinel values from a namespace path.</li> <li><a href="https://github.com/python/importlib_resources/commit/47d73b1e7787cd66ee57be676f2385d2183f78ac"><code>47d73b1</code></a> Add test capturing failure when resolving the MultiplexedPath for a namespace...</li> <li><a href="https://github.com/python/importlib_resources/commit/4875bc5179938324d157d9917b3a0bfb5fca8dd1"><code>4875bc5</code></a> Add type annotations for _candidate_paths</li> <li><a href="https://github.com/python/importlib_resources/commit/d84ca376316016420297fbc310ba181ca7d2864d"><code>d84ca37</code></a> Fix typo in _temp_path comment.</li> <li><a href="https://github.com/python/importlib_resources/commit/1a6fef299da3e0d6f74cb26e1d41fc0f13ed63ad"><code>1a6fef2</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/python/importlib_resources/commit/790fa6e6feb9a93d39135494819b12e9df8a7bba"><code>790fa6e</code></a> Include the trailing slash in disable_error_code(overload-overlap), also requ...</li> <li><a href="https://github.com/python/importlib_resources/commit/2beb8b0c9d0f7046370e7c58c4e6baaf35154a16"><code>2beb8b0</code></a> Add support for linking usernames.</li> <li><a href="https://github.com/python/importlib_resources/commit/0c326f3f77b2420163f73d97f8fbd090fa49147d"><code>0c326f3</code></a> Add a degenerate nitpick_ignore for downstream consumers. Add a 'local' comme...</li> <li>Additional commits viewable in <a href="https://github.com/python/importlib_resources/compare/v6.4.4...v6.4.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=importlib-resources&package-manager=pip&previous-version=6.4.4&new-version=6.4.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2d6e2171c3b..bc9196fe9ee 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -99,7 +99,7 @@ importlib-metadata==8.4.0 # via # build # sphinx -importlib-resources==6.4.4 +importlib-resources==6.4.5 # via towncrier incremental==24.7.2 # via towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index 1995cf3bbe0..5d742ac7407 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -97,7 +97,7 @@ importlib-metadata==8.4.0 # via # build # sphinx -importlib-resources==6.4.4 +importlib-resources==6.4.5 # via towncrier incremental==24.7.2 # via towncrier diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 7abb4b04e16..41bd5bc0886 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -28,7 +28,7 @@ imagesize==1.4.1 # via sphinx importlib-metadata==8.4.0 # via sphinx -importlib-resources==6.4.4 +importlib-resources==6.4.5 # via towncrier incremental==24.7.2 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 324b5b87c93..48a67c0643f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -28,7 +28,7 @@ imagesize==1.4.1 # via sphinx importlib-metadata==8.4.0 # via sphinx -importlib-resources==6.4.4 +importlib-resources==6.4.5 # via towncrier incremental==24.7.2 # via towncrier From fb6726feeee841a480a1b08f4d02a8a73fd8229d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 15:09:04 +0100 Subject: [PATCH 0543/1511] [PR #7043/bee613d0 backport][3.10] Add clarification about `GracefulExit` when using `handle_signals=True` (#9122) **This is a backport of PR #7043 as merged into master (bee613d090cab3b7c00a83604668181961b562ff).** Co-authored-by: Daste <stefankar1000@gmail.com> --- CHANGES/4414.doc | 1 + docs/web_reference.rst | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 CHANGES/4414.doc diff --git a/CHANGES/4414.doc b/CHANGES/4414.doc new file mode 100644 index 00000000000..b4be46afee8 --- /dev/null +++ b/CHANGES/4414.doc @@ -0,0 +1 @@ +Clarified that ``GracefulExit`` needs to be handled in ``AppRunner`` and ``ServerRunner`` when using ``handle_signals=True``. -- by :user:`Daste745` diff --git a/docs/web_reference.rst b/docs/web_reference.rst index bb22cfd6369..cdfe5a050e9 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -2734,7 +2734,8 @@ application on specific TCP or Unix socket, e.g.:: :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by - default). + default). These handlers will raise + :exc:`GracefulExit`. :param kwargs: named parameters to pass into web protocol. @@ -2807,7 +2808,8 @@ application on specific TCP or Unix socket, e.g.:: :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by - default). + default). These handlers will raise + :exc:`GracefulExit`. :param kwargs: named parameters to pass into web protocol. @@ -2938,6 +2940,16 @@ application on specific TCP or Unix socket, e.g.:: ``128`` by default. +.. exception:: GracefulExit + + Raised by signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` + defined in :class:`AppRunner` and :class:`ServerRunner` + when ``handle_signals`` is set to ``True``. + + Inherited from :exc:`SystemExit`, + which exits with error code ``1`` if not handled. + + Utilities --------- From 8596dc2651a0da7ed6ea641517f4a96af31f3466 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 16:18:34 +0100 Subject: [PATCH 0544/1511] [PR #7043/bee613d0 backport][3.11] Add clarification about `GracefulExit` when using `handle_signals=True` (#9123) **This is a backport of PR #7043 as merged into master (bee613d090cab3b7c00a83604668181961b562ff).** Co-authored-by: Daste <stefankar1000@gmail.com> --- CHANGES/4414.doc | 1 + docs/web_reference.rst | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 CHANGES/4414.doc diff --git a/CHANGES/4414.doc b/CHANGES/4414.doc new file mode 100644 index 00000000000..b4be46afee8 --- /dev/null +++ b/CHANGES/4414.doc @@ -0,0 +1 @@ +Clarified that ``GracefulExit`` needs to be handled in ``AppRunner`` and ``ServerRunner`` when using ``handle_signals=True``. -- by :user:`Daste745` diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 39b503de248..f2f5361ca43 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -2737,7 +2737,8 @@ application on specific TCP or Unix socket, e.g.:: :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by - default). + default). These handlers will raise + :exc:`GracefulExit`. :param kwargs: named parameters to pass into web protocol. @@ -2810,7 +2811,8 @@ application on specific TCP or Unix socket, e.g.:: :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by - default). + default). These handlers will raise + :exc:`GracefulExit`. :param kwargs: named parameters to pass into web protocol. @@ -2941,6 +2943,16 @@ application on specific TCP or Unix socket, e.g.:: ``128`` by default. +.. exception:: GracefulExit + + Raised by signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` + defined in :class:`AppRunner` and :class:`ServerRunner` + when ``handle_signals`` is set to ``True``. + + Inherited from :exc:`SystemExit`, + which exits with error code ``1`` if not handled. + + Utilities --------- From 72739222fd42c77765077d118e35e849f1532661 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 11 Sep 2024 16:38:32 +0100 Subject: [PATCH 0545/1511] Add repr() test (#9121) --- tests/test_streams.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_streams.py b/tests/test_streams.py index 115371c806d..fcf13a91eb3 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1126,6 +1126,7 @@ async def test_unread_empty(self) -> None: async def test_empty_stream_reader() -> None: s = streams.EmptyStreamReader() assert str(s) is not None + assert repr(s) == "<EmptyStreamReader>" assert s.set_exception(ValueError()) is None assert s.exception() is None assert s.feed_eof() is None From 333a7a8df200a166b4c24925573903cb4eee1d9c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 17:01:47 +0100 Subject: [PATCH 0546/1511] [PR #9121/72739222 backport][3.10] Add repr() test (#9125) **This is a backport of PR #9121 as merged into 3.11 (72739222fd42c77765077d118e35e849f1532661).** Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_streams.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_streams.py b/tests/test_streams.py index 115371c806d..fcf13a91eb3 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1126,6 +1126,7 @@ async def test_unread_empty(self) -> None: async def test_empty_stream_reader() -> None: s = streams.EmptyStreamReader() assert str(s) is not None + assert repr(s) == "<EmptyStreamReader>" assert s.set_exception(ValueError()) is None assert s.exception() is None assert s.feed_eof() is None From 9d529deb2b28f5fef411016d89ca8ed64cca49a9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:40:22 +0000 Subject: [PATCH 0547/1511] Bump regex from 2024.7.24 to 2024.9.11 (#9127) Bumps [regex](https://github.com/mrabarnett/mrab-regex) from 2024.7.24 to 2024.9.11. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/mrabarnett/mrab-regex/blob/hg/changelog.txt">regex's changelog</a>.</em></p> <blockquote> <p>Version: 2024.9.14</p> <pre><code>Reverted to actions/download-artifact@v3 and actions/upload-artifact@v3 in main.yml because GitHub Actions failed when using them. </code></pre> <p>Version: 2024.9.13</p> <pre><code>Updated to actions/upload-artifact@v4 in main.yml. </code></pre> <p>Version: 2024.9.12</p> <pre><code>Updated to actions/download-artifact@v4 in main.yml. </code></pre> <p>Version: 2024.9.11</p> <pre><code>Updated to Unicode 16.0.0. </code></pre> <p>Version: 2024.7.24</p> <pre><code>Git issue 539: Bug: Partial matching fails on a simple example </code></pre> <p>Version: 2024.6.22</p> <pre><code>Git issue 535: Regex fails Unicode 15.1 GraphemeBreakTest due to missing new GB9c rule implementation </code></pre> <p>Version: 2024.5.15</p> <pre><code>Git issue 530: hangs with fuzzy and optionals <p>It's not hanging, it'll finish eventually. It's just an example of catastrophic backtracking.</p> <p>The error printed when Ctrl+C is pressed does show a bug, though, which is now fixed.<br /> </code></pre></p> <p>Version: 2024.5.10</p> <pre><code>Updated for Python 3.13. <p><time.h> now needs to be included explicitly because Python.h no longer includes it.<br /> </code></pre></p> <p>Version: 2024.4.28</p> <pre><code>Git issue 527: `VERBOSE`/`X` flag breaks `\N` escapes </code></pre> <p>Version: 2024.4.16</p> <pre><code>Git issue 525: segfault when fuzzy matching empty list </code></pre> <p>Version: 2023.12.25</p> <pre><code>Cannot get release notification action in main.yml to work. Commenting it out for now. </code></pre> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/d3510fea2af432e67c9f9dce3b612fab18cdf375"><code>d3510fe</code></a> Updated to Unicode 16.0.0.</li> <li>See full diff in <a href="https://github.com/mrabarnett/mrab-regex/compare/2024.7.24...2024.9.11">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=regex&package-manager=pip&previous-version=2024.7.24&new-version=2024.9.11)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bc9196fe9ee..62936f4aba2 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -194,7 +194,7 @@ pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2024.7.24 +regex==2024.9.11 # via re-assert requests==2.32.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 5d742ac7407..1d288e81f3f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -189,7 +189,7 @@ pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2024.7.24 +regex==2024.9.11 # via re-assert requests==2.32.3 # via diff --git a/requirements/test.txt b/requirements/test.txt index 7114023eaaf..86ba7331b84 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -98,7 +98,7 @@ python-on-whales==0.73.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in -regex==2024.7.24 +regex==2024.9.11 # via re-assert requests==2.32.3 # via python-on-whales From 295ed82d261bf6e14605214cd47fc7a8d40d49d3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:11:35 +0100 Subject: [PATCH 0548/1511] [PR #7567/55a2af19 backport][3.11] Minor fixes to docs (#9132) **This is a backport of PR #7567 as merged into master (55a2af19d6549c8ddebd15edc25fda3c4b5094a9).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/client_advanced.rst | 4 ++-- docs/client_reference.rst | 8 ++++---- docs/streams.rst | 4 ++-- docs/web_reference.rst | 6 +++--- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 958e31dcc7c..26594a21b1c 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -618,7 +618,7 @@ Graceful Shutdown ----------------- When :class:`ClientSession` closes at the end of an ``async with`` -block (or through a direct :meth:`ClientSession.close()` call), the +block (or through a direct :meth:`ClientSession.close` call), the underlying connection remains open due to asyncio internal details. In practice, the underlying connection will close after a short while. However, if the event loop is stopped before the underlying @@ -658,7 +658,7 @@ on this. Character Set Detection ----------------------- -If you encounter a :exc:`UnicodeDecodeError` when using :meth:`ClientResponse.text()` +If you encounter a :exc:`UnicodeDecodeError` when using :meth:`ClientResponse.text` this may be because the response does not include the charset needed to decode the body. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 77230a755c6..a16443f275e 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -134,7 +134,7 @@ The client session supports the context manager protocol for self closing. :param bool raise_for_status: - Automatically call :meth:`ClientResponse.raise_for_status()` for + Automatically call :meth:`ClientResponse.raise_for_status` for each response, ``False`` by default. This parameter can be overridden when making a request, e.g.:: @@ -325,7 +325,7 @@ The client session supports the context manager protocol for self closing. .. attribute:: raise_for_status - Should :meth:`ClientResponse.raise_for_status()` be called for each response + Should :meth:`ClientResponse.raise_for_status` be called for each response Either :class:`bool` or :class:`collections.abc.Callable` @@ -454,7 +454,7 @@ The client session supports the context manager protocol for self closing. :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). - :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status()` for + :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status` for response if set to ``True``. If set to ``None`` value from ``ClientSession`` will be used. ``None`` by default (optional). @@ -876,7 +876,7 @@ certification chaining. ``False`` by default (optional). :param bool raise_for_status: Automatically call - :meth:`ClientResponse.raise_for_status()` + :meth:`ClientResponse.raise_for_status` for response if set to ``True``. If set to ``None`` value from ``ClientSession`` will be used. diff --git a/docs/streams.rst b/docs/streams.rst index 9d49a80f1b6..8e4be9d5343 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -182,7 +182,7 @@ Helpers .. seealso:: - :meth:`StreamReader.at_eof()` + :meth:`StreamReader.at_eof` .. method:: StreamReader.at_eof() @@ -208,7 +208,7 @@ Helpers .. warning:: The method does not wake up waiters. - E.g. :meth:`~StreamReader.read()` will not be resumed. + E.g. :meth:`~StreamReader.read` will not be resumed. .. method:: wait_eof() diff --git a/docs/web_reference.rst b/docs/web_reference.rst index f2f5361ca43..f0da3237bd0 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -965,8 +965,8 @@ and :ref:`aiohttp-web-signals` handlers:: :meth:`receive` and others. To enable back-pressure from slow websocket clients treat methods - :meth:`ping()`, :meth:`pong()`, :meth:`send_str()`, - :meth:`send_bytes()`, :meth:`send_json()` as coroutines. By + :meth:`ping`, :meth:`pong`, :meth:`send_str`, + :meth:`send_bytes`, :meth:`send_json` as coroutines. By default write buffer size is set to 64k. :param bool autoping: Automatically send @@ -1652,7 +1652,7 @@ Application and Router :async: A :ref:`coroutine<coroutine>` that should be called on - server stopping but before :meth:`cleanup()`. + server stopping but before :meth:`cleanup`. The purpose of the method is calling :attr:`on_shutdown` signal handlers. From 828811a549df4dc55c52c8fa071b129c13bd06eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:11:45 +0100 Subject: [PATCH 0549/1511] [PR #7567/55a2af19 backport][3.10] Minor fixes to docs (#9131) **This is a backport of PR #7567 as merged into master (55a2af19d6549c8ddebd15edc25fda3c4b5094a9).** Co-authored-by: Sam Bull <git@sambull.org> --- docs/client_advanced.rst | 4 ++-- docs/client_reference.rst | 8 ++++---- docs/streams.rst | 4 ++-- docs/web_reference.rst | 6 +++--- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 958e31dcc7c..26594a21b1c 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -618,7 +618,7 @@ Graceful Shutdown ----------------- When :class:`ClientSession` closes at the end of an ``async with`` -block (or through a direct :meth:`ClientSession.close()` call), the +block (or through a direct :meth:`ClientSession.close` call), the underlying connection remains open due to asyncio internal details. In practice, the underlying connection will close after a short while. However, if the event loop is stopped before the underlying @@ -658,7 +658,7 @@ on this. Character Set Detection ----------------------- -If you encounter a :exc:`UnicodeDecodeError` when using :meth:`ClientResponse.text()` +If you encounter a :exc:`UnicodeDecodeError` when using :meth:`ClientResponse.text` this may be because the response does not include the charset needed to decode the body. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index bcd2108c1eb..1686aa7c113 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -132,7 +132,7 @@ The client session supports the context manager protocol for self closing. :param bool raise_for_status: - Automatically call :meth:`ClientResponse.raise_for_status()` for + Automatically call :meth:`ClientResponse.raise_for_status` for each response, ``False`` by default. This parameter can be overridden when making a request, e.g.:: @@ -323,7 +323,7 @@ The client session supports the context manager protocol for self closing. .. attribute:: raise_for_status - Should :meth:`ClientResponse.raise_for_status()` be called for each response + Should :meth:`ClientResponse.raise_for_status` be called for each response Either :class:`bool` or :class:`collections.abc.Callable` @@ -452,7 +452,7 @@ The client session supports the context manager protocol for self closing. :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). - :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status()` for + :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status` for response if set to ``True``. If set to ``None`` value from ``ClientSession`` will be used. ``None`` by default (optional). @@ -875,7 +875,7 @@ certification chaining. ``False`` by default (optional). :param bool raise_for_status: Automatically call - :meth:`ClientResponse.raise_for_status()` + :meth:`ClientResponse.raise_for_status` for response if set to ``True``. If set to ``None`` value from ``ClientSession`` will be used. diff --git a/docs/streams.rst b/docs/streams.rst index 9d49a80f1b6..8e4be9d5343 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -182,7 +182,7 @@ Helpers .. seealso:: - :meth:`StreamReader.at_eof()` + :meth:`StreamReader.at_eof` .. method:: StreamReader.at_eof() @@ -208,7 +208,7 @@ Helpers .. warning:: The method does not wake up waiters. - E.g. :meth:`~StreamReader.read()` will not be resumed. + E.g. :meth:`~StreamReader.read` will not be resumed. .. method:: wait_eof() diff --git a/docs/web_reference.rst b/docs/web_reference.rst index cdfe5a050e9..4efba726fa9 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -962,8 +962,8 @@ and :ref:`aiohttp-web-signals` handlers:: :meth:`receive` and others. To enable back-pressure from slow websocket clients treat methods - :meth:`ping()`, :meth:`pong()`, :meth:`send_str()`, - :meth:`send_bytes()`, :meth:`send_json()` as coroutines. By + :meth:`ping`, :meth:`pong`, :meth:`send_str`, + :meth:`send_bytes`, :meth:`send_json` as coroutines. By default write buffer size is set to 64k. :param bool autoping: Automatically send @@ -1649,7 +1649,7 @@ Application and Router :async: A :ref:`coroutine<coroutine>` that should be called on - server stopping but before :meth:`cleanup()`. + server stopping but before :meth:`cleanup`. The purpose of the method is calling :attr:`on_shutdown` signal handlers. From 5ad9ed25884879b45e1137bcb6cc6f6eac2f1427 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 17:34:27 +0100 Subject: [PATCH 0550/1511] [PR #9098/94685fb6 backport][3.11] Remove extra ``MultiDictProxy`` wrapper from ``BaseRequest.query`` (#9113) **This is a backport of PR #9098 as merged into master (94685fb672296629c93b41e1528268c2a667c806).** Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 2465e6655ad..eca5063e30e 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -486,7 +486,7 @@ def raw_path(self) -> str: @reify def query(self) -> "MultiMapping[str]": """A multidict with all the variables in the query string.""" - return MultiDictProxy(self._rel_url.query) + return self._rel_url.query @reify def query_string(self) -> str: From 501f50310f989473c9edcd8b91cf514447a07685 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 17:34:49 +0100 Subject: [PATCH 0551/1511] [PR #9098/94685fb6 backport][3.10] Remove extra ``MultiDictProxy`` wrapper from ``BaseRequest.query`` (#9112) **This is a backport of PR #9098 as merged into master (94685fb672296629c93b41e1528268c2a667c806).** Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 1d94c576794..f233afbbd44 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -486,7 +486,7 @@ def raw_path(self) -> str: @reify def query(self) -> "MultiMapping[str]": """A multidict with all the variables in the query string.""" - return MultiDictProxy(self._rel_url.query) + return self._rel_url.query @reify def query_string(self) -> str: From a81f1296d1fbefd5713d04231e9749f6a298b5e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= <Mic92@users.noreply.github.com> Date: Thu, 12 Sep 2024 23:02:02 +0200 Subject: [PATCH 0552/1511] Implement binding to IPv6 addresses in the pytest server fixture (#9124) Co-authored-by: tan01 <jonnytan@google.com> Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua> --- CHANGES/4650.bugfix | 1 + aiohttp/test_utils.py | 5 ++++- tests/test_test_utils.py | 12 ++++++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 CHANGES/4650.bugfix diff --git a/CHANGES/4650.bugfix b/CHANGES/4650.bugfix new file mode 100644 index 00000000000..5c9fc17ff60 --- /dev/null +++ b/CHANGES/4650.bugfix @@ -0,0 +1 @@ +Implement binding to IPv6 addresses in the pytest server fixture. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 13b6f4d9c50..08ce5bff9e1 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -119,10 +119,13 @@ async def start_server( await self.runner.setup() if not self.port: self.port = 0 + absolute_host = self.host try: version = ipaddress.ip_address(self.host).version except ValueError: version = 4 + if version == 6: + absolute_host = f"[{self.host}]" family = socket.AF_INET6 if version == 6 else socket.AF_INET _sock = self.socket_factory(self.host, self.port, family) self.host, self.port = _sock.getsockname()[:2] @@ -135,7 +138,7 @@ async def start_server( self.port = sockets[0].getsockname()[1] if not self.scheme: self.scheme = "https" if self._ssl else "http" - self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}") @abstractmethod # pragma: no cover async def _make_runner(self, **kwargs: Any) -> BaseRunner: diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 77349246616..a9c5179aedc 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -371,3 +371,15 @@ def factory(*args, **kwargs) -> socket: pass assert factory_called + + +@pytest.mark.parametrize( + ("hostname", "expected_host"), + [("127.0.0.1", "127.0.0.1"), ("localhost", "127.0.0.1"), ("::1", "::1")], +) +async def test_test_server_hostnames(hostname, expected_host, loop) -> None: + app = _create_example_app() + server = _TestServer(app, host=hostname, loop=loop) + async with server: + pass + assert server.host == expected_host From c2df9cb899066aaa7e6c05231e5257ed53e1d1e1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 21:25:02 +0000 Subject: [PATCH 0553/1511] Bump importlib-metadata from 8.4.0 to 8.5.0 (#9128) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 8.4.0 to 8.5.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/importlib_metadata/blob/main/NEWS.rst">importlib-metadata's changelog</a>.</em></p> <blockquote> <h1>v8.5.0</h1> <h2>Features</h2> <ul> <li>Deferred import of zipfile.Path (<a href="https://redirect.github.com/python/importlib_metadata/issues/502">#502</a>)</li> <li>Deferred import of json (<a href="https://redirect.github.com/python/importlib_metadata/issues/503">#503</a>)</li> <li>Rely on zipp overlay for zipfile.Path.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/importlib_metadata/commit/b34810b1e0665580a91ea19b6317a1890ecd42c1"><code>b34810b</code></a> Finalize</li> <li><a href="https://github.com/python/importlib_metadata/commit/8c1d1fa4f4e2160fef17b0bb7fef2ca276b53a99"><code>8c1d1fa</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_metadata/issues/501">#501</a> from Avasam/Pass-mypy-and-link-issues</li> <li><a href="https://github.com/python/importlib_metadata/commit/afa39e8e08b48fbedd3b8ac94cf58de39ff09c35"><code>afa39e8</code></a> Back out changes to tests._path</li> <li><a href="https://github.com/python/importlib_metadata/commit/8b909f9b1cdfc056eff3acd38337661ab24ef8b3"><code>8b909f9</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_metadata/issues/503">#503</a> from danielhollas/defer-json</li> <li><a href="https://github.com/python/importlib_metadata/commit/2a3f50d8bbd41fc831676e7dc89d84c605c85760"><code>2a3f50d</code></a> Add news fragment.</li> <li><a href="https://github.com/python/importlib_metadata/commit/3f78dc17786e0e0290db450e843ac494af0158e9"><code>3f78dc1</code></a> Add comment to protect the deferred import.</li> <li><a href="https://github.com/python/importlib_metadata/commit/18eb2da0ee267394c1735bec5b1d9f2b0fa77dd9"><code>18eb2da</code></a> Revert "Defer platform import"</li> <li><a href="https://github.com/python/importlib_metadata/commit/58832f234ba1aea5906c4d56f4b2c4a21d640608"><code>58832f2</code></a> Merge pull request <a href="https://redirect.github.com/python/importlib_metadata/issues/502">#502</a> from danielhollas/defer-zipp</li> <li><a href="https://github.com/python/importlib_metadata/commit/e3ce33b45e572824b482049570cac13da543999b"><code>e3ce33b</code></a> Add news fragment.</li> <li><a href="https://github.com/python/importlib_metadata/commit/d11b67fed9f21503ca369e33c917a8038994ce0b"><code>d11b67f</code></a> Add comment to protect the deferred import.</li> <li>Additional commits viewable in <a href="https://github.com/python/importlib_metadata/compare/v8.4.0...v8.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=importlib-metadata&package-manager=pip&previous-version=8.4.0&new-version=8.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 62936f4aba2..c6b76c6564a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -95,7 +95,7 @@ idna==3.3 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 # via # build # sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 1d288e81f3f..6d5b0e200be 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -93,7 +93,7 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 # via # build # sphinx diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 41bd5bc0886..17c44816fc9 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -26,7 +26,7 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 # via sphinx importlib-resources==6.4.5 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 48a67c0643f..dba8517376f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -26,7 +26,7 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 # via sphinx importlib-resources==6.4.5 # via towncrier From 839d06079d81513ea09a9714b5e6c3fd7742673f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 22:47:32 +0100 Subject: [PATCH 0554/1511] [PR #9124/a81f1296 backport][3.10] Implement binding to IPv6 addresses in the pytest server fixture (#9135) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #9124 as merged into 3.11 (a81f1296d1fbefd5713d04231e9749f6a298b5e7).** Co-authored-by: Jörg Thalheim <Mic92@users.noreply.github.com> --- CHANGES/4650.bugfix | 1 + aiohttp/test_utils.py | 5 ++++- tests/test_test_utils.py | 12 ++++++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 CHANGES/4650.bugfix diff --git a/CHANGES/4650.bugfix b/CHANGES/4650.bugfix new file mode 100644 index 00000000000..5c9fc17ff60 --- /dev/null +++ b/CHANGES/4650.bugfix @@ -0,0 +1 @@ +Implement binding to IPv6 addresses in the pytest server fixture. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 328561fb6a7..01496b6711a 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -119,10 +119,13 @@ async def start_server( await self.runner.setup() if not self.port: self.port = 0 + absolute_host = self.host try: version = ipaddress.ip_address(self.host).version except ValueError: version = 4 + if version == 6: + absolute_host = f"[{self.host}]" family = socket.AF_INET6 if version == 6 else socket.AF_INET _sock = self.socket_factory(self.host, self.port, family) self.host, self.port = _sock.getsockname()[:2] @@ -135,7 +138,7 @@ async def start_server( self.port = sockets[0].getsockname()[1] if not self.scheme: self.scheme = "https" if self._ssl else "http" - self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}") @abstractmethod # pragma: no cover async def _make_runner(self, **kwargs: Any) -> BaseRunner: diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 77349246616..a9c5179aedc 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -371,3 +371,15 @@ def factory(*args, **kwargs) -> socket: pass assert factory_called + + +@pytest.mark.parametrize( + ("hostname", "expected_host"), + [("127.0.0.1", "127.0.0.1"), ("localhost", "127.0.0.1"), ("::1", "::1")], +) +async def test_test_server_hostnames(hostname, expected_host, loop) -> None: + app = _create_example_app() + server = _TestServer(app, host=hostname, loop=loop) + async with server: + pass + assert server.host == expected_host From 306b4d0c4cbbf5e6257179f281254153517a5fc4 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 13 Sep 2024 22:18:55 +0100 Subject: [PATCH 0555/1511] Fix keepalive race condition (#9140) (#9142) (cherry picked from commit 37e3aa4639193c6423562a4a4dfbf3310772b7a6) --- CHANGES/9140.bugfix.rst | 1 + aiohttp/web_protocol.py | 2 +- tests/test_web_functional.py | 47 ++++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9140.bugfix.rst diff --git a/CHANGES/9140.bugfix.rst b/CHANGES/9140.bugfix.rst new file mode 100644 index 00000000000..c9b8f7bf4ea --- /dev/null +++ b/CHANGES/9140.bugfix.rst @@ -0,0 +1 @@ +Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamosorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a2f159c3b7c..a7f7b546903 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -437,7 +437,7 @@ def _process_keepalive(self) -> None: return # handler in idle state - if self._waiter: + if self._waiter and not self._waiter.done(): self.force_close() async def _handle_request( diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index ad9e7c288fc..5b2e5fe9353 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -24,6 +24,7 @@ from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING from aiohttp.test_utils import make_mocked_coro from aiohttp.typedefs import Handler +from aiohttp.web_protocol import RequestHandler try: import brotlicffi as brotli @@ -2242,3 +2243,49 @@ async def handler(_): assert TRANSFER_ENCODING not in resp.headers await resp.read() == b"" await resp.release() + + +async def test_keepalive_race_condition(aiohttp_client: Any) -> None: + protocol = None + orig_data_received = RequestHandler.data_received + + def delay_received(self, data: bytes) -> None: + """Emulate race condition. + + The keepalive callback needs to be called between data_received() and + when start() resumes from the waiter set within data_received(). + """ + data = orig_data_received(self, data) + if protocol is None: # First request creating the keepalive connection. + return data + + assert self is protocol + assert protocol._keepalive_handle is not None + # Cancel existing callback that would run at some point in future. + protocol._keepalive_handle.cancel() + protocol._keepalive_handle = None + + # Set next run time into the past and run callback manually. + protocol._next_keepalive_close_time = asyncio.get_running_loop().time() - 1 + protocol._process_keepalive() + + return data + + async def handler(request: web.Request) -> web.Response: + nonlocal protocol + protocol = request.protocol + return web.Response() + + target = "aiohttp.web_protocol.RequestHandler.data_received" + with mock.patch(target, delay_received): + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + # Open connection, so we have a keepalive connection and reference to protocol. + async with client.get("/") as resp: + assert resp.status == 200 + assert protocol is not None + # Make 2nd request which will hit the race condition. + async with client.get("/") as resp: + assert resp.status == 200 From 7531deefa3458aa87dcfa61c379fc5542621b67e Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 13 Sep 2024 22:25:32 +0100 Subject: [PATCH 0556/1511] Fix keepalive race condition (#9140) (#9143) (cherry picked from commit 37e3aa4639193c6423562a4a4dfbf3310772b7a6) --- CHANGES/9140.bugfix.rst | 1 + aiohttp/web_protocol.py | 2 +- tests/test_web_functional.py | 47 ++++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9140.bugfix.rst diff --git a/CHANGES/9140.bugfix.rst b/CHANGES/9140.bugfix.rst new file mode 100644 index 00000000000..c9b8f7bf4ea --- /dev/null +++ b/CHANGES/9140.bugfix.rst @@ -0,0 +1 @@ +Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamosorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a2f159c3b7c..a7f7b546903 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -437,7 +437,7 @@ def _process_keepalive(self) -> None: return # handler in idle state - if self._waiter: + if self._waiter and not self._waiter.done(): self.force_close() async def _handle_request( diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 6f612ffc011..969153b1603 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -24,6 +24,7 @@ from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING from aiohttp.test_utils import make_mocked_coro from aiohttp.typedefs import Handler +from aiohttp.web_protocol import RequestHandler try: import brotlicffi as brotli @@ -2246,3 +2247,49 @@ async def handler(_): assert TRANSFER_ENCODING not in resp.headers await resp.read() == b"" await resp.release() + + +async def test_keepalive_race_condition(aiohttp_client: Any) -> None: + protocol = None + orig_data_received = RequestHandler.data_received + + def delay_received(self, data: bytes) -> None: + """Emulate race condition. + + The keepalive callback needs to be called between data_received() and + when start() resumes from the waiter set within data_received(). + """ + data = orig_data_received(self, data) + if protocol is None: # First request creating the keepalive connection. + return data + + assert self is protocol + assert protocol._keepalive_handle is not None + # Cancel existing callback that would run at some point in future. + protocol._keepalive_handle.cancel() + protocol._keepalive_handle = None + + # Set next run time into the past and run callback manually. + protocol._next_keepalive_close_time = asyncio.get_running_loop().time() - 1 + protocol._process_keepalive() + + return data + + async def handler(request: web.Request) -> web.Response: + nonlocal protocol + protocol = request.protocol + return web.Response() + + target = "aiohttp.web_protocol.RequestHandler.data_received" + with mock.patch(target, delay_received): + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + # Open connection, so we have a keepalive connection and reference to protocol. + async with client.get("/") as resp: + assert resp.status == 200 + assert protocol is not None + # Make 2nd request which will hit the race condition. + async with client.get("/") as resp: + assert resp.status == 200 From 73bb415d4b7b4a6a6bf4663018ac94f33d51f5a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 11:31:49 +0000 Subject: [PATCH 0557/1511] Bump zipp from 3.20.1 to 3.20.2 (#9151) Bumps [zipp](https://github.com/jaraco/zipp) from 3.20.1 to 3.20.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jaraco/zipp/blob/main/NEWS.rst">zipp's changelog</a>.</em></p> <blockquote> <h1>v3.20.2</h1> <h2>Bugfixes</h2> <ul> <li>Make zipp.compat.overlay.zipfile hashable. (<a href="https://redirect.github.com/jaraco/zipp/issues/126">#126</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jaraco/zipp/commit/a575660e5c76daca0924f6e9520fd7109e05f424"><code>a575660</code></a> Make no assertions about the number. It could be negative.</li> <li><a href="https://github.com/jaraco/zipp/commit/0b3a1b9ddbc8d9f646d51810b082711bf03261c7"><code>0b3a1b9</code></a> Finalize</li> <li><a href="https://github.com/jaraco/zipp/commit/a4c79614a52f34d2999246d807538d46e5986feb"><code>a4c7961</code></a> Make zipp.compat.overlay.zipfile hashable.</li> <li><a href="https://github.com/jaraco/zipp/commit/d66007a66b7dbd88e69eaf59faae8b614cba256d"><code>d66007a</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/jaraco/zipp/commit/3fe8c5ba792fd58a5a24eef4e8a845f3b5dd6c2c"><code>3fe8c5b</code></a><code>jaraco/skeleton#146</code></li> <li><a href="https://github.com/jaraco/zipp/commit/81b766c06cc83679c4a04c2bfa6d2c8cc559bf33"><code>81b766c</code></a> Fix an incompatibility (and source of merge conflicts) with projects using Ru...</li> <li><a href="https://github.com/jaraco/zipp/commit/b8a63ca4b77d28eb808c457ec781ed3f8ba50671"><code>b8a63ca</code></a> Merge pull request <a href="https://redirect.github.com/jaraco/zipp/issues/125">#125</a> from saschanaz/patch-1</li> <li><a href="https://github.com/jaraco/zipp/commit/0b95ec706308342782231a9be2acd98be7ccf996"><code>0b95ec7</code></a> Suppress F821</li> <li><a href="https://github.com/jaraco/zipp/commit/5d2fa666ffae2e89a6e4ccbc5ed9b3a5b8d64fc0"><code>5d2fa66</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/jaraco/zipp/commit/a675458e1a7d6ae81d0d441338a74dc98ffc5a61"><code>a675458</code></a> Allow the workflow to be triggered manually.</li> <li>Additional commits viewable in <a href="https://github.com/jaraco/zipp/compare/v3.20.1...v3.20.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=zipp&package-manager=pip&previous-version=3.20.1&new-version=3.20.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c6b76c6564a..6a62b419c33 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -289,7 +289,7 @@ wheel==0.44.0 # via pip-tools yarl==1.11.1 # via -r requirements/runtime-deps.in -zipp==3.20.1 +zipp==3.20.2 # via # importlib-metadata # importlib-resources diff --git a/requirements/dev.txt b/requirements/dev.txt index 6d5b0e200be..51d5161a1f3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -281,7 +281,7 @@ wheel==0.44.0 # via pip-tools yarl==1.11.1 # via -r requirements/runtime-deps.in -zipp==3.20.1 +zipp==3.20.2 # via # importlib-metadata # importlib-resources diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 17c44816fc9..5ec2afcb9a6 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -90,7 +90,7 @@ urllib3==2.2.2 # via requests webcolors==24.8.0 # via blockdiag -zipp==3.20.1 +zipp==3.20.2 # via # importlib-metadata # importlib-resources diff --git a/requirements/doc.txt b/requirements/doc.txt index dba8517376f..5236e1c23d8 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -85,7 +85,7 @@ urllib3==2.2.2 # via requests webcolors==24.8.0 # via blockdiag -zipp==3.20.1 +zipp==3.20.2 # via # importlib-metadata # importlib-resources From de998dd3e8f51d37b1c5a045718fe1ca53fb679e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 11:47:50 +0000 Subject: [PATCH 0558/1511] Bump setuptools from 74.1.2 to 75.0.0 (#9153) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 74.1.2 to 75.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.0.0</h1> <h2>Features</h2> <ul> <li>Declare also the dependencies used by distutils (adds jaraco.collections).</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Removed upload_docs command. (<a href="https://redirect.github.com/pypa/setuptools/issues/2971">#2971</a>)</li> <li><code>pypa/distutils#294</code><a href="https://redirect.github.com/pypa/setuptools/issues/4649">#4649</a>)</li> </ul> <h1>v74.1.3</h1> <h2>Bugfixes</h2> <ul> <li>Fix cross-platform compilation using <code>distutils._msvccompiler.MSVCCompiler</code> -- by :user:<code>saschanaz</code> and :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4648">#4648</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/5e27b2a6e324e70bd82a045aef8f75c84a3d3b28"><code>5e27b2a</code></a> Bump version: 74.1.3 → 75.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/af9e245b57ef78fce03c7e28a0189388d8e4de18"><code>af9e245</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4649">#4649</a> from pypa/feature/distutils-7283751</li> <li><a href="https://github.com/pypa/setuptools/commit/f15861e3ae2fb8a74efc528fa25754c329090611"><code>f15861e</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/ce01828b6894c4597609da54fd343ea1aabfec84"><code>ce01828</code></a> Merge <a href="https://github.com/pypa/distutils">https://github.com/pypa/distutils</a> into feature/distutils-7283751</li> <li><a href="https://github.com/pypa/setuptools/commit/378984e02edae91d5f49425da8436f8dd9152b8a"><code>378984e</code></a> Remove news fragments, not useful here.</li> <li><a href="https://github.com/pypa/setuptools/commit/ffdf0bd160ebf6acc1a0790ec1cb72d7da8968c4"><code>ffdf0bd</code></a> Merge tag 'v74.1.3'</li> <li><a href="https://github.com/pypa/setuptools/commit/4c274911c59dd0161303d6cb991ec2a621ce1fb9"><code>4c27491</code></a> Bump version: 74.1.2 → 74.1.3</li> <li><a href="https://github.com/pypa/setuptools/commit/a56a8f9cb798eec1c023242db15a7b3e27c72911"><code>a56a8f9</code></a> Fix cross-platform compilation using <code>distutils._msvccompiler.MSVCCompiler</code>.</li> <li><a href="https://github.com/pypa/setuptools/commit/72837514c2b67081401db556be9aaaa43debe44f"><code>7283751</code></a> cygwinccompiler: Get the compilers from sysconfig</li> <li><a href="https://github.com/pypa/setuptools/commit/47353c5c699024429d353b7ae47af5406cac46c2"><code>47353c5</code></a> Fix cross-platform compilation using <code>distutils._msvccompiler.MSVCCompiler</code></li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v74.1.2...v75.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=74.1.2&new-version=75.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6a62b419c33..3f28b098138 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -297,7 +297,7 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.1.2 +setuptools==75.0.0 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index 51d5161a1f3..f7df4925022 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -289,7 +289,7 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==74.1.2 +setuptools==75.0.0 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 5ec2afcb9a6..056e7455347 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.1.2 +setuptools==75.0.0 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 5236e1c23d8..4bf88490f35 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==74.1.2 +setuptools==75.0.0 # via # blockdiag # incremental From 2b7ca81708f90d9223e1fe597a28993a0a03e617 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 11:54:23 +0000 Subject: [PATCH 0559/1511] Bump urllib3 from 2.2.2 to 2.2.3 (#9154) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.2.2 to 2.2.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/releases">urllib3's releases</a>.</em></p> <blockquote> <h2>2.2.3</h2> <h2>🚀 urllib3 is fundraising for HTTP/2 support</h2> <p><a href="https://sethmlarson.dev/urllib3-is-fundraising-for-http2-support">urllib3 is raising ~$40,000 USD</a> to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support for 2023. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects <a href="https://opencollective.com/urllib3">please consider contributing financially</a> to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.</p> <p>Thank you for your support.</p> <h2>Features</h2> <ul> <li>Added support for Python 3.13. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3473">#3473</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Fixed the default encoding of chunked request bodies to be UTF-8 instead of ISO-8859-1. All other methods of supplying a request body already use UTF-8 starting in urllib3 v2.0. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3053">#3053</a>)</li> <li>Fixed ResourceWarning on CONNECT with Python < 3.11.4 by backporting <a href="https://redirect.github.com/python/cpython/issues/103472">python/cpython#103472</a>. (`<a href="https://redirect.github.com/urllib3/urllib3/issues/3252">#3252</a>)</li> <li>Adjust tolerance for floating-point comparison on Windows to avoid flakiness in CI (<a href="https://redirect.github.com/urllib3/urllib3/issues/3413">#3413</a>)</li> <li>Fixed a crash where certain standard library hash functions were absent in restricted environments. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3432">#3432</a>)</li> <li>Fixed mypy error when adding to <code>HTTPConnection.default_socket_options</code>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3448">#3448</a>)</li> </ul> <h2>HTTP/2 (experimental)</h2> <p>HTTP/2 support is still in early development.</p> <ul> <li>Excluded Transfer-Encoding: chunked from HTTP/2 request body (<a href="https://redirect.github.com/urllib3/urllib3/issues/3425">#3425</a>)</li> <li>Added version checking for <code>h2</code> (<a href="https://pypi.org/project/h2/">https://pypi.org/project/h2/</a>) usage. Now only accepting supported h2 major version 4.x.x. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3290">#3290</a>)</li> <li>Added a probing mechanism for determining whether a given target origin supports HTTP/2 via ALPN. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3301">#3301</a>)</li> <li>Add support for sending a request body with HTTP/2 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3302">#3302</a>)</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/urllib3/urllib3/compare/2.2.2...2.2.3">https://github.com/urllib3/urllib3/compare/2.2.2...2.2.3</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/blob/main/CHANGES.rst">urllib3's changelog</a>.</em></p> <blockquote> <h1>2.2.3 (2024-09-12)</h1> <h2>Features</h2> <ul> <li>Added support for Python 3.13. (<code>[#3473](https://github.com/urllib3/urllib3/issues/3473) <https://github.com/urllib3/urllib3/issues/3473></code>__)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Fixed the default encoding of chunked request bodies to be UTF-8 instead of ISO-8859-1. All other methods of supplying a request body already use UTF-8 starting in urllib3 v2.0. (<code>[#3053](https://github.com/urllib3/urllib3/issues/3053) <https://github.com/urllib3/urllib3/issues/3053></code>__)</li> <li>Fixed ResourceWarning on CONNECT with Python <!-- raw HTML omitted -->`__)</li> <li>Adjust tolerance for floating-point comparison on Windows to avoid flakiness in CI (<code>[#3413](https://github.com/urllib3/urllib3/issues/3413) <https://github.com/urllib3/urllib3/issues/3413></code>__)</li> <li>Fixed a crash where certain standard library hash functions were absent in restricted environments. (<code>[#3432](https://github.com/urllib3/urllib3/issues/3432) <https://github.com/urllib3/urllib3/issues/3432></code>__)</li> <li>Fixed mypy error when adding to <code>HTTPConnection.default_socket_options</code>. (<code>[#3448](https://github.com/urllib3/urllib3/issues/3448) <https://github.com/urllib3/urllib3/issues/3448></code>__)</li> </ul> <h2>HTTP/2 (experimental)</h2> <p>HTTP/2 support is still in early development.</p> <ul> <li> <p>Excluded Transfer-Encoding: chunked from HTTP/2 request body (<code>[#3425](https://github.com/urllib3/urllib3/issues/3425) <https://github.com/urllib3/urllib3/issues/3425></code>__)</p> </li> <li> <p>Added version checking for <code>h2</code> (<a href="https://pypi.org/project/h2/">https://pypi.org/project/h2/</a>) usage.</p> <p>Now only accepting supported h2 major version 4.x.x. (<code>[#3290](https://github.com/urllib3/urllib3/issues/3290) <https://github.com/urllib3/urllib3/issues/3290></code>__)</p> </li> <li> <p>Added a probing mechanism for determining whether a given target origin supports HTTP/2 via ALPN. (<code>[#3301](https://github.com/urllib3/urllib3/issues/3301) <https://github.com/urllib3/urllib3/issues/3301></code>__)</p> </li> <li> <p>Add support for sending a request body with HTTP/2 (<code>[#3302](https://github.com/urllib3/urllib3/issues/3302) <https://github.com/urllib3/urllib3/issues/3302></code>__)</p> </li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Note for downstream distributors: the <code>_version.py</code> file has been removed and is now created at build time by hatch-vcs. (<code>[#3412](https://github.com/urllib3/urllib3/issues/3412) <https://github.com/urllib3/urllib3/issues/3412></code>__)</li> <li>Drop support for end-of-life PyPy3.8 and PyPy3.9. (<code>[#3475](https://github.com/urllib3/urllib3/issues/3475) <https://github.com/urllib3/urllib3/issues/3475></code>__)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/urllib3/urllib3/commit/2458bfcd3dacdf6c196e98d077fc6bb02a5fc1df"><code>2458bfc</code></a> Release 2.2.3</li> <li><a href="https://github.com/urllib3/urllib3/commit/9b25db6d00e43858d49303ae55c43bc4a9832668"><code>9b25db6</code></a> Only attempt to publish for upstream</li> <li><a href="https://github.com/urllib3/urllib3/commit/b9adeef8501180cd7d04cc3fb90bed4bbc34b1bb"><code>b9adeef</code></a> Drop support for EOL PyPy3.8 and PyPy3.9</li> <li><a href="https://github.com/urllib3/urllib3/commit/b1d4649d43375f11a3072b4d9b5d33425d123bae"><code>b1d4649</code></a> Add explicit support for Python 3.13</li> <li><a href="https://github.com/urllib3/urllib3/commit/cc42860721836febf3fb6ebb485ed27d7f80122d"><code>cc42860</code></a> Bump cryptography from 42.0.4 to 43.0.1 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3470">#3470</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/3dae2e9b30d2e39bf20daea2353aa7ef055640cf"><code>3dae2e9</code></a> Bump pypa/gh-action-pypi-publish from 1.9.0 to 1.10.1 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3469">#3469</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/1e94feb2a671bf28721114dfea1105a2c1f91788"><code>1e94feb</code></a> Revert "Add TLS settings for HTTP/2 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3456">#3456</a>)" (<a href="https://redirect.github.com/urllib3/urllib3/issues/3466">#3466</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/aa73abc7b22a4a67e0ee957f5a3031109f73d3d9"><code>aa73abc</code></a> Bump actions/setup-python from 5.1.0 to 5.2.0 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3468">#3468</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/abbfbcb1dd274fc54b4f0a7785fd04d59b634195"><code>abbfbcb</code></a> Add 1.26.20 to changelog and make the publish workflow the same (<a href="https://redirect.github.com/urllib3/urllib3/issues/3464">#3464</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/d48061505e72271116c5a33b04dbca6273f2a737"><code>d480615</code></a> Add TLS settings for HTTP/2 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3456">#3456</a>)</li> <li>Additional commits viewable in <a href="https://github.com/urllib3/urllib3/compare/2.2.2...2.2.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=2.2.2&new-version=2.2.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3f28b098138..6be5b505474 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -273,7 +273,7 @@ typing-extensions==4.12.2 # typer uritemplate==4.1.1 # via gidgethub -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index f7df4925022..e44e850e71e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -265,7 +265,7 @@ typing-extensions==4.12.2 # typer uritemplate==4.1.1 # via gidgethub -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 056e7455347..3045e447bca 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -86,7 +86,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.2.2 +urllib3==2.2.3 # via requests webcolors==24.8.0 # via blockdiag diff --git a/requirements/doc.txt b/requirements/doc.txt index 4bf88490f35..32c5882a4ff 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -81,7 +81,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.2.2 +urllib3==2.2.3 # via requests webcolors==24.8.0 # via blockdiag diff --git a/requirements/lint.txt b/requirements/lint.txt index 43925da0796..713418389b3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -115,7 +115,7 @@ typing-extensions==4.12.2 # python-on-whales # rich # typer -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 86ba7331b84..d1d78bb00e1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -131,7 +131,7 @@ typing-extensions==4.12.2 # python-on-whales # rich # typer -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in From 78187d8aaaa21374924ca2fc5c84df9a2b030aea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 11:58:19 +0000 Subject: [PATCH 0560/1511] Bump platformdirs from 4.3.2 to 4.3.3 (#9155) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/tox-dev/platformdirs) from 4.3.2 to 4.3.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/platformdirs/releases">platformdirs's releases</a>.</em></p> <blockquote> <h2>4.3.3</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Update check.yml by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/302">tox-dev/platformdirs#302</a></li> <li>don't include outdated changelog in docs by <a href="https://github.com/cbm755"><code>@​cbm755</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/301">tox-dev/platformdirs#301</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/cbm755"><code>@​cbm755</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/301">tox-dev/platformdirs#301</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.2...4.3.3">https://github.com/tox-dev/platformdirs/compare/4.3.2...4.3.3</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/platformdirs/commit/85feea9d67e6d87b2640f20572862b79f4c6792e"><code>85feea9</code></a> don't include outdated changelog in docs (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/301">#301</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/580dffd03281bba560dfbeb80518d9b2f2b4a107"><code>580dffd</code></a> Update check.yml (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/302">#302</a>)</li> <li>See full diff in <a href="https://github.com/tox-dev/platformdirs/compare/4.3.2...4.3.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.3.2&new-version=4.3.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6be5b505474..9f985a4fe86 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -140,7 +140,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.2 +platformdirs==4.3.3 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/dev.txt b/requirements/dev.txt index e44e850e71e..1592975b89c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.2 +platformdirs==4.3.3 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/lint.txt b/requirements/lint.txt index 713418389b3..5e8e8a0ec51 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -56,7 +56,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.1 # via pytest -platformdirs==4.3.2 +platformdirs==4.3.3 # via virtualenv pluggy==1.5.0 # via pytest From c27fe0d1f2460c5c91674c4421e496d24173cdee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 12:04:49 +0000 Subject: [PATCH 0561/1511] Bump identify from 2.6.0 to 2.6.1 (#9156) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.0 to 2.6.1. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/d1032c90d01cb08038ba0b5cd19a880463763b8b"><code>d1032c9</code></a> v2.6.1</li> <li><a href="https://github.com/pre-commit/identify/commit/b980f11fba15398621990a8360f5f9863b75c723"><code>b980f11</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/476">#476</a> from AleksaC/astro</li> <li><a href="https://github.com/pre-commit/identify/commit/52ba50e2a234147d85320b6e1cff065b30377020"><code>52ba50e</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/477">#477</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/f4ca44e83d5d7816f228edac8c560aec3eaa647d"><code>f4ca44e</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/257394116f98b4d8c8b65a49831e279a3a948fa9"><code>2573941</code></a> add astro extension</li> <li><a href="https://github.com/pre-commit/identify/commit/eca58ebf9073d5ea29fcb98d284120bc52aa785b"><code>eca58eb</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/475">#475</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/4cbbd37d09aafc061580cf7cc2222951e0e4fa2f"><code>4cbbd37</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/bcde20e5461c2216656d3c582c31530184cbdbb4"><code>bcde20e</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/472">#472</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/53d2329179a8e002a0b1faeba18766bb918f7d7e"><code>53d2329</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/1d481776e5081b3a628b09334f44ebc6fcd34923"><code>1d48177</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/471">#471</a> from pre-commit/pre-commit-ci-update-config</li> <li>Additional commits viewable in <a href="https://github.com/pre-commit/identify/compare/v2.6.0...v2.6.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.0&new-version=2.6.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9f985a4fe86..7868f665797 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -86,7 +86,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.0 +identify==2.6.1 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 1592975b89c..12b6c9e3491 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -84,7 +84,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.0 +identify==2.6.1 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 5e8e8a0ec51..dc451770b10 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -36,7 +36,7 @@ filelock==3.16.0 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.0 +identify==2.6.1 # via pre-commit idna==3.7 # via From 478d27dd43f57436176c3c4d5d69d6d0b522845c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:13:48 +0000 Subject: [PATCH 0562/1511] Bump pypa/cibuildwheel from 2.20.0 to 2.21.1 (#9163) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.20.0 to 2.21.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>Version 2.21.1</h2> <ul> <li>🐛 Fix a bug in the Linux build, where files copied to the container would have invalid ownership permissions (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2007">#2007</a>)</li> <li>🐛 Fix a bug on Windows where cibuildwheel would call upon <code>uv</code> to install dependencies for versions of CPython that it does not support (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2005">#2005</a>)</li> <li>🐛 Fix a bug where <code>uv 0.4.10</code> would not use the right Python when testing on Linux. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2008">#2008</a>)</li> <li>🛠 Bump our documentation pins, fixes an issue with a missing package (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2011">#2011</a>)</li> </ul> <h2>Version 2.21.0</h2> <ul> <li>⚠️ Update CPython 3.12 to 3.12.6, which changes the macOS minimum deployment target on CPython 3.12 from macOS 10.9 to macOS 10.13 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1998">#1998</a>)</li> <li>🛠 Changes the behaviour when inheriting <code>config-settings</code> in TOML overrides - rather than extending each key, which is rarely useful, individual keys will override previously set values. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1803">#1803</a>)</li> <li>🛠 Update CPython 3.13 to 3.13.0rc2 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1998">#1998</a>)</li> <li>✨ Adds support for multiarch OCI images (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> <li>🐛 Fixes some bugs building Linux wheels on macOS. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> <li>⚠️ Changes the minimum version of Docker/Podman to Docker API version 1.43, Podman API version 3. The only mainstream runner this should affect is Travis Graviton2 runners - if so you can <a href="https://redirect.github.com/pypa/cibuildwheel/pull/1961#issuecomment-2304060019">upgrade your version of Docker</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.21.1</h3> <p><em>16 September 2024</em></p> <ul> <li>🐛 Fix a bug in the Linux build, where files copied to the container would have invalid ownership permissions (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2007">#2007</a>)</li> <li>🐛 Fix a bug on Windows where cibuildwheel would call upon <code>uv</code> to install dependencies for versions of CPython that it does not support (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2005">#2005</a>)</li> <li>🐛 Fix a bug where <code>uv 0.4.10</code> would not use the right Python when testing on Linux. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2008">#2008</a>)</li> <li>🛠 Bump our documentation pins, fixes an issue with a missing package (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2011">#2011</a>)</li> </ul> <h3>v2.21.0</h3> <p><em>13 September 2024</em></p> <ul> <li>⚠️ Update CPython 3.12 to 3.12.6, which changes the macOS minimum deployment target on CPython 3.12 from macOS 10.9 to macOS 10.13 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1998">#1998</a>)</li> <li>🛠 Changes the behaviour when inheriting <code>config-settings</code> in TOML overrides - rather than extending each key, which is rarely useful, individual keys will override previously set values. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1803">#1803</a>)</li> <li>🛠 Update CPython 3.13 to 3.13.0rc2 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1998">#1998</a>)</li> <li>✨ Adds support for multiarch OCI images (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> <li>🐛 Fixes some bugs building Linux wheels on macOS. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> <li>⚠️ Changes the minimum version of Docker/Podman to Docker API version 1.43, Podman API version 3. The only mainstream runner this should affect is Travis Graviton2 runners - if so you can <a href="https://redirect.github.com/pypa/cibuildwheel/pull/1961#issuecomment-2304060019">upgrade your version of Docker</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/d4a2945fcc8d13f20a1b99d461b8e844d5fc6e23"><code>d4a2945</code></a> Bump version: v2.21.1</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/9913c031283a2cd9681e3c01f823be18c3a9bebb"><code>9913c03</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2013">#2013</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/c0e28d3401163fd478cb77cdfc17bed5ad3a0080"><code>c0e28d3</code></a> fix: support uv 0.4.10+ on Linux and update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2008">#2008</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/8c42e7980c87df9edb3742a6cb8e33c7676f940d"><code>8c42e79</code></a> fix: file ownership of files copied into the container (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2007">#2007</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/01ecd4e01124e228be68723746e18f286a3c8ddd"><code>01ecd4e</code></a> docs: bump pinned versions (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2011">#2011</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/33da1f7d1434c62e8bff8017551ce6e19d59accf"><code>33da1f7</code></a> fix: do not use <code>uv</code> to setup python on windows when conditions are not met (...</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/79b0dd328794e1180a7268444d46cdf12e1abd01"><code>79b0dd3</code></a> Bump version: v2.21.0</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/0787a44d997310003e5fe4ae52d30517c73606c6"><code>0787a44</code></a> fix: enforce minimum version of docker/podman (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1961">#1961</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/fd11286290a46d09f484ded0a3636655279a5f31"><code>fd11286</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1998">#1998</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/22dc864d60d0f7ea8d768ffbe0eded598f49452f"><code>22dc864</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2000">#2000</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.20.0...v2.21.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.20.0&new-version=2.21.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index df27a9108d6..54ceb6b74cd 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -354,7 +354,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.20.0 + uses: pypa/cibuildwheel@v2.21.1 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 22885ae872a7acb09c701f907714fe987dca92bb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:51:02 +0000 Subject: [PATCH 0563/1511] Bump setuptools from 75.0.0 to 75.1.0 (#9164) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.0.0 to 75.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.1.0</h1> <h2>Features</h2> <ul> <li>Deprecated <code>bdist_wheel.universal</code> configuration. (<a href="https://redirect.github.com/pypa/setuptools/issues/4617">#4617</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Removed reference to upload_docs module in entry points. (<a href="https://redirect.github.com/pypa/setuptools/issues/4650">#4650</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/3106af0512fe67464a8b5e7524c07fddf7717660"><code>3106af0</code></a> Bump version: 75.0.0 → 75.1.0</li> <li><a href="https://github.com/pypa/setuptools/commit/37c3d27f0b1757b7273da4eda837890b8e5dd015"><code>37c3d27</code></a> Removed reference to upload_docs module in entry points.</li> <li><a href="https://github.com/pypa/setuptools/commit/9fb53fd7bdb15b046d720cb979a308e93f0b57ea"><code>9fb53fd</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4617">#4617</a> from abravalheri/issue-4612</li> <li><a href="https://github.com/pypa/setuptools/commit/cd3ba7d7d68aa5503f782c3ccc3464f7cd878655"><code>cd3ba7d</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4644">#4644</a> from DimitriPapadopoulos/codespell</li> <li><a href="https://github.com/pypa/setuptools/commit/8513d29bc8e291948164736b9ee4381d76628e32"><code>8513d29</code></a> Fix a couple typos found by codespell</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v75.0.0...v75.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.0.0&new-version=75.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7868f665797..18ab4a97475 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -297,7 +297,7 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==75.0.0 +setuptools==75.1.0 # via # blockdiag # incremental diff --git a/requirements/dev.txt b/requirements/dev.txt index 12b6c9e3491..4e9506ffbe4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -289,7 +289,7 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==75.0.0 +setuptools==75.1.0 # via # blockdiag # incremental diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 3045e447bca..57cfa253fe7 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -96,7 +96,7 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==75.0.0 +setuptools==75.1.0 # via # blockdiag # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 32c5882a4ff..49c8f3864ac 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -91,7 +91,7 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==75.0.0 +setuptools==75.1.0 # via # blockdiag # incremental From d664327d2cb954eb65bbd006482c815f44467986 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 17 Sep 2024 17:45:33 +0200 Subject: [PATCH 0564/1511] [PR #9158/bf022b3 backport][3.10] Cache construction of middleware handlers (#9165) --- CHANGES/9158.misc.rst | 3 +++ aiohttp/web_app.py | 42 +++++++++++++++++++++++++++--------- tests/test_web_middleware.py | 22 ++++++++++++------- 3 files changed, 49 insertions(+), 18 deletions(-) create mode 100644 CHANGES/9158.misc.rst diff --git a/CHANGES/9158.misc.rst b/CHANGES/9158.misc.rst new file mode 100644 index 00000000000..8d87623c056 --- /dev/null +++ b/CHANGES/9158.misc.rst @@ -0,0 +1,3 @@ +Significantly improved performance of middlewares -- by :user:`bdraco`. + +The construction of the middleware wrappers is now cached and is built once per handler instead of on every request. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 3510bffda60..b8768064507 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -1,7 +1,7 @@ import asyncio import logging import warnings -from functools import partial, update_wrapper +from functools import lru_cache, partial, update_wrapper from typing import ( TYPE_CHECKING, Any, @@ -38,7 +38,7 @@ from .http_parser import RawRequestMessage from .log import web_logger from .streams import StreamReader -from .typedefs import Middleware +from .typedefs import Handler, Middleware from .web_exceptions import NotAppKeyWarning from .web_log import AccessLogger from .web_middlewares import _fix_request_current_app @@ -79,6 +79,17 @@ _Resource = TypeVar("_Resource", bound=AbstractResource) +@lru_cache(None) +def _build_middlewares( + handler: Handler, apps: Tuple["Application", ...] +) -> Callable[[Request], Awaitable[StreamResponse]]: + """Apply middlewares to handler.""" + for app in apps: + for m, _ in app._middlewares_handlers: # type: ignore[union-attr] + handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc] + return handler + + class Application(MutableMapping[Union[str, AppKey[Any]], Any]): ATTRS = frozenset( [ @@ -89,6 +100,7 @@ class Application(MutableMapping[Union[str, AppKey[Any]], Any]): "_handler_args", "_middlewares", "_middlewares_handlers", + "_has_legacy_middlewares", "_run_middlewares", "_state", "_frozen", @@ -143,6 +155,7 @@ def __init__( self._middlewares_handlers: _MiddlewaresHandlers = None # initialized on freezing self._run_middlewares: Optional[bool] = None + self._has_legacy_middlewares: bool = True self._state: Dict[Union[AppKey[Any], str], object] = {} self._frozen = False @@ -228,6 +241,9 @@ def __len__(self) -> int: def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: return iter(self._state) + def __hash__(self) -> int: + return id(self) + @overload # type: ignore[override] def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... @@ -284,6 +300,9 @@ def pre_freeze(self) -> None: self._on_shutdown.freeze() self._on_cleanup.freeze() self._middlewares_handlers = tuple(self._prepare_middleware()) + self._has_legacy_middlewares = any( + not new_style for _, new_style in self._middlewares_handlers + ) # If current app and any subapp do not have middlewares avoid run all # of the code footprint that it implies, which have a middleware @@ -525,14 +544,17 @@ async def _handle(self, request: Request) -> StreamResponse: handler = match_info.handler if self._run_middlewares: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler # type: ignore[misc] - ) - else: - handler = await m(app, handler) # type: ignore[arg-type,assignment] + if not self._has_legacy_middlewares: + handler = _build_middlewares(handler, match_info.apps[::-1]) + else: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler # type: ignore[misc] + ) + else: + handler = await m(app, handler) # type: ignore[arg-type,assignment] resp = await handler(request) diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py index dbe23e02035..9c4462be409 100644 --- a/tests/test_web_middleware.py +++ b/tests/test_web_middleware.py @@ -24,10 +24,13 @@ async def middleware(request, handler: Handler): app.middlewares.append(middleware) app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.get("/") - assert 201 == resp.status - txt = await resp.text() - assert "OK[MIDDLEWARE]" == txt + + # Call twice to verify cache works + for _ in range(2): + resp = await client.get("/") + assert 201 == resp.status + txt = await resp.text() + assert "OK[MIDDLEWARE]" == txt async def test_middleware_handles_exception(loop, aiohttp_client) -> None: @@ -44,10 +47,13 @@ async def middleware(request, handler: Handler): app.middlewares.append(middleware) app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.get("/") - assert 501 == resp.status - txt = await resp.text() - assert "Error text[MIDDLEWARE]" == txt + + # Call twice to verify cache works + for _ in range(2): + resp = await client.get("/") + assert 501 == resp.status + txt = await resp.text() + assert "Error text[MIDDLEWARE]" == txt async def test_middleware_chain(loop, aiohttp_client) -> None: From 94ecbaaf6f361542bdc7db7bb393bae0258e7125 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 17 Sep 2024 17:52:20 +0200 Subject: [PATCH 0565/1511] [PR #9158/bf022b3 backport][3.11] Cache construction of middleware handlers (#9166) --- CHANGES/9158.misc.rst | 3 +++ aiohttp/web_app.py | 42 +++++++++++++++++++++++++++--------- tests/test_web_middleware.py | 22 ++++++++++++------- 3 files changed, 49 insertions(+), 18 deletions(-) create mode 100644 CHANGES/9158.misc.rst diff --git a/CHANGES/9158.misc.rst b/CHANGES/9158.misc.rst new file mode 100644 index 00000000000..8d87623c056 --- /dev/null +++ b/CHANGES/9158.misc.rst @@ -0,0 +1,3 @@ +Significantly improved performance of middlewares -- by :user:`bdraco`. + +The construction of the middleware wrappers is now cached and is built once per handler instead of on every request. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 8403bbbc826..8d109f793ca 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -1,7 +1,7 @@ import asyncio import logging import warnings -from functools import partial, update_wrapper +from functools import lru_cache, partial, update_wrapper from typing import ( TYPE_CHECKING, Any, @@ -38,7 +38,7 @@ from .http_parser import RawRequestMessage from .log import web_logger from .streams import StreamReader -from .typedefs import Middleware +from .typedefs import Handler, Middleware from .web_exceptions import NotAppKeyWarning from .web_log import AccessLogger from .web_middlewares import _fix_request_current_app @@ -79,6 +79,17 @@ _Resource = TypeVar("_Resource", bound=AbstractResource) +@lru_cache(None) +def _build_middlewares( + handler: Handler, apps: Tuple["Application", ...] +) -> Callable[[Request], Awaitable[StreamResponse]]: + """Apply middlewares to handler.""" + for app in apps: + for m, _ in app._middlewares_handlers: # type: ignore[union-attr] + handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc] + return handler + + class Application(MutableMapping[Union[str, AppKey[Any]], Any]): ATTRS = frozenset( [ @@ -89,6 +100,7 @@ class Application(MutableMapping[Union[str, AppKey[Any]], Any]): "_handler_args", "_middlewares", "_middlewares_handlers", + "_has_legacy_middlewares", "_run_middlewares", "_state", "_frozen", @@ -143,6 +155,7 @@ def __init__( self._middlewares_handlers: _MiddlewaresHandlers = None # initialized on freezing self._run_middlewares: Optional[bool] = None + self._has_legacy_middlewares: bool = True self._state: Dict[Union[AppKey[Any], str], object] = {} self._frozen = False @@ -228,6 +241,9 @@ def __len__(self) -> int: def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: return iter(self._state) + def __hash__(self) -> int: + return id(self) + @overload # type: ignore[override] def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... @@ -284,6 +300,9 @@ def pre_freeze(self) -> None: self._on_shutdown.freeze() self._on_cleanup.freeze() self._middlewares_handlers = tuple(self._prepare_middleware()) + self._has_legacy_middlewares = any( + not new_style for _, new_style in self._middlewares_handlers + ) # If current app and any subapp do not have middlewares avoid run all # of the code footprint that it implies, which have a middleware @@ -525,14 +544,17 @@ async def _handle(self, request: Request) -> StreamResponse: handler = match_info.handler if self._run_middlewares: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler # type: ignore[misc] - ) - else: - handler = await m(app, handler) # type: ignore[arg-type,assignment] + if not self._has_legacy_middlewares: + handler = _build_middlewares(handler, match_info.apps[::-1]) + else: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler # type: ignore[misc] + ) + else: + handler = await m(app, handler) # type: ignore[arg-type,assignment] resp = await handler(request) diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py index dbe23e02035..9c4462be409 100644 --- a/tests/test_web_middleware.py +++ b/tests/test_web_middleware.py @@ -24,10 +24,13 @@ async def middleware(request, handler: Handler): app.middlewares.append(middleware) app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.get("/") - assert 201 == resp.status - txt = await resp.text() - assert "OK[MIDDLEWARE]" == txt + + # Call twice to verify cache works + for _ in range(2): + resp = await client.get("/") + assert 201 == resp.status + txt = await resp.text() + assert "OK[MIDDLEWARE]" == txt async def test_middleware_handles_exception(loop, aiohttp_client) -> None: @@ -44,10 +47,13 @@ async def middleware(request, handler: Handler): app.middlewares.append(middleware) app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.get("/") - assert 501 == resp.status - txt = await resp.text() - assert "Error text[MIDDLEWARE]" == txt + + # Call twice to verify cache works + for _ in range(2): + resp = await client.get("/") + assert 501 == resp.status + txt = await resp.text() + assert "Error text[MIDDLEWARE]" == txt async def test_middleware_chain(loop, aiohttp_client) -> None: From 9ad236dfe0894829a05a72c3b568ec5d0c9430ac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 11:06:45 +0000 Subject: [PATCH 0566/1511] Bump platformdirs from 4.3.3 to 4.3.6 (#9176) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/tox-dev/platformdirs) from 4.3.3 to 4.3.6. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/platformdirs/releases">platformdirs's releases</a>.</em></p> <blockquote> <h2>4.3.6</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Fix readme download target by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/307">tox-dev/platformdirs#307</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.5...4.3.6">https://github.com/tox-dev/platformdirs/compare/4.3.5...4.3.6</a></p> <h2>4.3.5</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Split build and publish for release by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/306">tox-dev/platformdirs#306</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.4...4.3.5">https://github.com/tox-dev/platformdirs/compare/4.3.4...4.3.5</a></p> <h2>4.3.4</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Use upstream setup-uv with uv python by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/305">tox-dev/platformdirs#305</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.3...4.3.4">https://github.com/tox-dev/platformdirs/compare/4.3.3...4.3.4</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/platformdirs/commit/bc0405cb9c9439e6923b2dc090f91ad5daaf7dec"><code>bc0405c</code></a> Fix readme download target (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/307">#307</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/c076da9e8ca199826bb5a9ba42b21632ac617869"><code>c076da9</code></a> Split build and publish for release (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/306">#306</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/30199ecfcaceb72c1361410ed0d3b2aa9c319a60"><code>30199ec</code></a> Use upstream setup-uv with uv python (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/305">#305</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/3ce64d0c873263556156f21bb9cf94a00b7c241d"><code>3ce64d0</code></a> Bump astral-sh/setup-uv from 2 to 3 (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/304">#304</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/67691e23c98134a7d7c94ad25e2b7c647aa3e7aa"><code>67691e2</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/303">#303</a>)</li> <li>See full diff in <a href="https://github.com/tox-dev/platformdirs/compare/4.3.3...4.3.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.3.3&new-version=4.3.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 18ab4a97475..4aad0716b8c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -140,7 +140,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.3 +platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/dev.txt b/requirements/dev.txt index 4e9506ffbe4..98c40875498 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.3 +platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/lint.txt b/requirements/lint.txt index dc451770b10..ab0282a1b68 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -56,7 +56,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.1 # via pytest -platformdirs==4.3.3 +platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest From 930f42d30f590f179727ee77df52d041f8bbc024 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 11:26:15 +0000 Subject: [PATCH 0567/1511] Bump filelock from 3.16.0 to 3.16.1 (#9177) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.16.0 to 3.16.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/py-filelock/releases">filelock's releases</a>.</em></p> <blockquote> <h2>3.16.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>CI improvements by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/362">tox-dev/filelock#362</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.16.0...3.16.1">https://github.com/tox-dev/filelock/compare/3.16.0...3.16.1</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/filelock/commit/c2c43e456b4369ecac8c932115e41b3addc5c3d6"><code>c2c43e4</code></a> CI improvements (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/362">#362</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/4e9873196502700775d51560be5267e3fb408dc0"><code>4e98731</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/361">#361</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/e6865be6475b968d80245822d694114e2b38cdaf"><code>e6865be</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/360">#360</a>)</li> <li>See full diff in <a href="https://github.com/tox-dev/py-filelock/compare/3.16.0...3.16.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.16.0&new-version=3.16.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4aad0716b8c..15b8eac1c91 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -70,7 +70,7 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest -filelock==3.16.0 +filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 98c40875498..d9ea10584ef 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -68,7 +68,7 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest -filelock==3.16.0 +filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index ab0282a1b68..29695178a5a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -32,7 +32,7 @@ distlib==0.3.8 # via virtualenv exceptiongroup==1.2.2 # via pytest -filelock==3.16.0 +filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in From 1ec476bab7aab9d5d43dcb790387ad690d2647a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:09:12 +0000 Subject: [PATCH 0568/1511] Bump pydantic from 2.9.1 to 2.9.2 (#9184) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.9.1 to 2.9.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.9.2 (2024-09-17)</h2> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Do not error when trying to evaluate annotations of private attributes by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10358">#10358</a></li> <li>Adding notes on designing sound <code>Callable</code> discriminators by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10400">#10400</a></li> <li>Fix serialization schema generation when using <code>PlainValidator</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10427">#10427</a></li> <li>Fix <code>Union</code> serialization warnings by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1449">pydantic/pydantic-core#1449</a></li> <li>Fix variance issue in <code>_IncEx</code> type alias, only allow <code>True</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10414">#10414</a></li> <li>Fix <code>ZoneInfo</code> validation with various invalid types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10408">#10408</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.9.1...v2.9.2">https://github.com/pydantic/pydantic/compare/v2.9.1...v2.9.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.9.2 (2024-09-17)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.9.2">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Do not error when trying to evaluate annotations of private attributes by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10358">#10358</a></li> <li>Adding notes on designing sound <code>Callable</code> discriminators by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10400">#10400</a></li> <li>Fix serialization schema generation when using <code>PlainValidator</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10427">#10427</a></li> <li>Fix <code>Union</code> serialization warnings by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic-core/pull/1449">pydantic/pydantic-core#1449</a></li> <li>Fix variance issue in <code>_IncEx</code> type alias, only allow <code>True</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10414">#10414</a></li> <li>Fix <code>ZoneInfo</code> validation with various invalid types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10408">#10408</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/7cedbfb03df82ac55c844c97e6f975359cb51bb9"><code>7cedbfb</code></a> history updates</li> <li><a href="https://github.com/pydantic/pydantic/commit/7eab2b8f7565077410ee6b5f59efc2a8245a7e34"><code>7eab2b8</code></a> v bump</li> <li><a href="https://github.com/pydantic/pydantic/commit/c0a288f1457734c0ae7ea8d3ae2f5e458327c4cd"><code>c0a288f</code></a> Fix <code>ZoneInfo</code> with various invalid types (<a href="https://redirect.github.com/pydantic/pydantic/issues/10408">#10408</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/ea6115de0f36461b8fa9638c49249ebd4b9fd806"><code>ea6115d</code></a> Fix variance issue in <code>_IncEx</code> type alias, only allow <code>True</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/10414">#10414</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/fbfe25a1195c1055034581e1a48ff6308231d70c"><code>fbfe25a</code></a> Fix serialization schema generation when using <code>PlainValidator</code> (<a href="https://redirect.github.com/pydantic/pydantic/issues/10427">#10427</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/26cff3ccf65f29fd503c1357280a9f4d87f41fd6"><code>26cff3c</code></a> Adding notes on designing callable discriminators (<a href="https://redirect.github.com/pydantic/pydantic/issues/10400">#10400</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/8a0e7adf6ac6d31056818f9bf8ce5a9dab6c9a6e"><code>8a0e7ad</code></a> Do not error when trying to evaluate annotations of private attributes (<a href="https://redirect.github.com/pydantic/pydantic/issues/10358">#10358</a>)</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.9.1...v2.9.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.9.1&new-version=2.9.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 15b8eac1c91..f9ce299d906 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -152,9 +152,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index d9ea10584ef..634a1085ecd 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,9 +149,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 29695178a5a..0e51b0ac55c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -66,9 +66,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index d1d78bb00e1..bf1be8a9e33 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,9 +77,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via rich From 5e43625bab8f23f443d4d5999084b7de1ec18660 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:13:33 +0000 Subject: [PATCH 0569/1511] Bump virtualenv from 20.26.4 to 20.26.5 (#9179) Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.26.4 to 20.26.5. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.26.5 (2024-09-17)</h2> <p>Bugfixes - 20.26.5</p> <pre><code>- Upgrade embedded wheels: setuptools to ``75.1.0`` from ``74.1.2`` - by :user:`gaborbernat`. (:issue:`2765`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/f3172b4da576b88275a14d2e7bbeb98b8f958a05"><code>f3172b4</code></a> release 20.26.5</li> <li><a href="https://github.com/pypa/virtualenv/commit/22b9795eb6bed0c17d0415c5513eca099a0a11ad"><code>22b9795</code></a> Use uv over pip (<a href="https://redirect.github.com/pypa/virtualenv/issues/2765">#2765</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/35d8269aba12a1e3c60183a2082b2c4d0cc1192f"><code>35d8269</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2764">#2764</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/ee77feb77ccb3c5deefa318630c59315bcfda521"><code>ee77feb</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2763">#2763</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/c5160566293ed098ca30e0856dbf44588dd5c3a3"><code>c516056</code></a> Update README.md</li> <li><a href="https://github.com/pypa/virtualenv/commit/f925a3a4d071cab549f776a05be39b65212ec38c"><code>f925a3a</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2761">#2761</a> from pypa/release-20.26.4</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.26.4...20.26.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.26.4&new-version=20.26.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f9ce299d906..d1069b4f590 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -279,7 +279,7 @@ uvloop==0.20.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.26.4 +virtualenv==20.26.5 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 634a1085ecd..46be3754db8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -271,7 +271,7 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.26.4 +virtualenv==20.26.5 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 0e51b0ac55c..f5bdccfd695 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,5 +119,5 @@ urllib3==2.2.3 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in -virtualenv==20.26.4 +virtualenv==20.26.5 # via pre-commit From a24d690a46f70fecb8b35ae4dda1fc41f982e226 Mon Sep 17 00:00:00 2001 From: Pierre-Louis Peeters <PLPeeters@users.noreply.github.com> Date: Wed, 18 Sep 2024 14:22:41 +0200 Subject: [PATCH 0570/1511] [PR #9160 backport][3.11] Fix badly encoded charset crashing instead of falling back to detector (#9182) Backport of #9160 to 3.11 --- CHANGES/9160.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client_reqrep.py | 2 +- tests/test_client_response.py | 32 +++++++++++++++++++++++++++++++- 4 files changed, 34 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9160.bugfix diff --git a/CHANGES/9160.bugfix b/CHANGES/9160.bugfix new file mode 100644 index 00000000000..253cfd07d50 --- /dev/null +++ b/CHANGES/9160.bugfix @@ -0,0 +1 @@ +Fixed badly encoded charset crashing when getting response text instead of falling back to charset detector. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index cf22583989f..92e1666fbc6 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -272,6 +272,7 @@ Pawel Kowalski Pawel Miech Pepe Osca Philipp A. +Pierre-Louis Peeters Pieter van Beek Qiao Han Rafael Viotti diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 7d4467dbdbb..3fe34e21968 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1159,7 +1159,7 @@ def get_encoding(self) -> str: encoding = mimetype.parameters.get("charset") if encoding: - with contextlib.suppress(LookupError): + with contextlib.suppress(LookupError, ValueError): return codecs.lookup(encoding).name if mimetype.type == "application" and ( diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 628e3d71b92..ede3950a755 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -6,7 +6,7 @@ from unittest import mock import pytest -from multidict import CIMultiDict +from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL import aiohttp @@ -423,6 +423,36 @@ def side_effect(*args, **kwargs): assert response._connection is None +async def test_text_badly_encoded_encoding_header(loop, session) -> None: + session._resolve_charset = lambda *_: "utf-8" + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + + def side_effect(*args: object, **kwargs: object): + fut = loop.create_future() + fut.set_result(b"foo") + return fut + + h = {"Content-Type": "text/html; charset=\udc81gutf-8\udc81\udc8d"} + response._headers = CIMultiDictProxy(CIMultiDict(h)) + content = response.content = mock.Mock() + content.read.side_effect = side_effect + + await response.read() + encoding = response.get_encoding() + + assert encoding == "utf-8" + + async def test_text_custom_encoding(loop, session) -> None: response = ClientResponse( "get", From b15c89558f1a52eb22ca400ff8624def096123d0 Mon Sep 17 00:00:00 2001 From: Pierre-Louis Peeters <PLPeeters@users.noreply.github.com> Date: Wed, 18 Sep 2024 14:23:09 +0200 Subject: [PATCH 0571/1511] [PR #9160 backport][3.10] Fix badly encoded charset crashing instead of falling back to detector (#9181) Backport of #9160 to 3.10 --- CHANGES/9160.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client_reqrep.py | 2 +- tests/test_client_response.py | 32 +++++++++++++++++++++++++++++++- 4 files changed, 34 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9160.bugfix diff --git a/CHANGES/9160.bugfix b/CHANGES/9160.bugfix new file mode 100644 index 00000000000..253cfd07d50 --- /dev/null +++ b/CHANGES/9160.bugfix @@ -0,0 +1 @@ +Fixed badly encoded charset crashing when getting response text instead of falling back to charset detector. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e7214dfedd4..c318f7cc669 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -271,6 +271,7 @@ Pawel Kowalski Pawel Miech Pepe Osca Philipp A. +Pierre-Louis Peeters Pieter van Beek Qiao Han Rafael Viotti diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 93e7b59a8a1..57f3323a60c 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1168,7 +1168,7 @@ def get_encoding(self) -> str: encoding = mimetype.parameters.get("charset") if encoding: - with contextlib.suppress(LookupError): + with contextlib.suppress(LookupError, ValueError): return codecs.lookup(encoding).name if mimetype.type == "application" and ( diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 628e3d71b92..ede3950a755 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -6,7 +6,7 @@ from unittest import mock import pytest -from multidict import CIMultiDict +from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL import aiohttp @@ -423,6 +423,36 @@ def side_effect(*args, **kwargs): assert response._connection is None +async def test_text_badly_encoded_encoding_header(loop, session) -> None: + session._resolve_charset = lambda *_: "utf-8" + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + + def side_effect(*args: object, **kwargs: object): + fut = loop.create_future() + fut.set_result(b"foo") + return fut + + h = {"Content-Type": "text/html; charset=\udc81gutf-8\udc81\udc8d"} + response._headers = CIMultiDictProxy(CIMultiDict(h)) + content = response.content = mock.Mock() + content.read.side_effect = side_effect + + await response.read() + encoding = response.get_encoding() + + assert encoding == "utf-8" + + async def test_text_custom_encoding(loop, session) -> None: response = ClientResponse( "get", From 71c47e45ce808366ad9d49efb6c6456b31a0db6d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 13:41:57 +0000 Subject: [PATCH 0572/1511] [PR #9171/0462ae6b backport][3.11] Switch to using `yarl.URL.absolute` over `yarl.URL.is_absolute()` (#9185) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9171.misc.rst | 3 +++ aiohttp/client.py | 2 +- aiohttp/test_utils.py | 2 +- aiohttp/web_request.py | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9171.misc.rst diff --git a/CHANGES/9171.misc.rst b/CHANGES/9171.misc.rst new file mode 100644 index 00000000000..c6742edd891 --- /dev/null +++ b/CHANGES/9171.misc.rst @@ -0,0 +1,3 @@ +Improved performance of determining if a URL is absolute -- by :user:`bdraco`. + +The property :attr:`~yarl.URL.absolute` is more performant than the method ``URL.is_absolute()`` and preferred when newer versions of yarl are used. diff --git a/aiohttp/client.py b/aiohttp/client.py index edf4090832f..d59d03fa5ec 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -447,7 +447,7 @@ def _build_url(self, str_or_url: StrOrURL) -> URL: if self._base_url is None: return url else: - assert not url.is_absolute() and url.path.startswith("/") + assert not url.absolute and url.path.startswith("/") return self._base_url.join(url) async def _request( diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 08ce5bff9e1..5ab3381f9e6 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -148,7 +148,7 @@ def make_url(self, path: StrOrURL) -> URL: assert self._root is not None url = URL(path) if not self.skip_url_asserts: - assert not url.is_absolute() + assert not url.absolute return self._root.join(url) else: return URL(str(self._root) + str(path)) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index eca5063e30e..f3521153603 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -174,7 +174,7 @@ def __init__( self._version = message.version self._cache: Dict[str, Any] = {} url = message.url - if url.is_absolute(): + if url.absolute: if scheme is not None: url = url.with_scheme(scheme) if host is not None: From df0f28ba8ee89c2856308ccb0e0a56a165c6186d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 14:18:08 +0000 Subject: [PATCH 0573/1511] [PR #9173/d3c3c6a7 backport][3.10] Avoid calling response prepare hook if its empty (#9188) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9173.misc.rst | 1 + aiohttp/web_request.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9173.misc.rst diff --git a/CHANGES/9173.misc.rst b/CHANGES/9173.misc.rst new file mode 100644 index 00000000000..6fcc098747f --- /dev/null +++ b/CHANGES/9173.misc.rst @@ -0,0 +1 @@ +Improved performance of starting web requests when there is no response prepare hook -- by :user:`bdraco`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index f233afbbd44..eca71e4413a 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -915,4 +915,5 @@ async def _prepare_hook(self, response: StreamResponse) -> None: if match_info is None: return for app in match_info._apps: - await app.on_response_prepare.send(self, response) + if on_response_prepare := app.on_response_prepare: + await on_response_prepare.send(self, response) From 45dd34cde391c62559eeb2770630d899a1e07cf5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 14:18:18 +0000 Subject: [PATCH 0574/1511] [PR #9173/d3c3c6a7 backport][3.11] Avoid calling response prepare hook if its empty (#9189) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9173.misc.rst | 1 + aiohttp/web_request.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9173.misc.rst diff --git a/CHANGES/9173.misc.rst b/CHANGES/9173.misc.rst new file mode 100644 index 00000000000..6fcc098747f --- /dev/null +++ b/CHANGES/9173.misc.rst @@ -0,0 +1 @@ +Improved performance of starting web requests when there is no response prepare hook -- by :user:`bdraco`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index f3521153603..f7e511fa477 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -915,4 +915,5 @@ async def _prepare_hook(self, response: StreamResponse) -> None: if match_info is None: return for app in match_info._apps: - await app.on_response_prepare.send(self, response) + if on_response_prepare := app.on_response_prepare: + await on_response_prepare.send(self, response) From 31c0753dc77806585029c504d349df791211bf63 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 18 Sep 2024 16:26:42 +0200 Subject: [PATCH 0575/1511] [PR #9170/eacf2e0 backport][3.10] Move reversing slice of middleware apps into the cache (#9186) --- CHANGES/9170.misc.rst | 1 + aiohttp/web_app.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9170.misc.rst diff --git a/CHANGES/9170.misc.rst b/CHANGES/9170.misc.rst new file mode 120000 index 00000000000..e41cbad0125 --- /dev/null +++ b/CHANGES/9170.misc.rst @@ -0,0 +1 @@ +9158.misc.rst \ No newline at end of file diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index b8768064507..c4199b12271 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -84,7 +84,7 @@ def _build_middlewares( handler: Handler, apps: Tuple["Application", ...] ) -> Callable[[Request], Awaitable[StreamResponse]]: """Apply middlewares to handler.""" - for app in apps: + for app in apps[::-1]: for m, _ in app._middlewares_handlers: # type: ignore[union-attr] handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc] return handler @@ -545,7 +545,7 @@ async def _handle(self, request: Request) -> StreamResponse: if self._run_middlewares: if not self._has_legacy_middlewares: - handler = _build_middlewares(handler, match_info.apps[::-1]) + handler = _build_middlewares(handler, match_info.apps) else: for app in match_info.apps[::-1]: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] From 2770e6181bd7779a15273e3b0cca88b9e46424fa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:08:47 +0000 Subject: [PATCH 0576/1511] [PR #9174/98b363e4 backport][3.10] Add a cache to must_be_empty_body (#9190) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9174.misc.rst | 1 + aiohttp/helpers.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 CHANGES/9174.misc.rst diff --git a/CHANGES/9174.misc.rst b/CHANGES/9174.misc.rst new file mode 100644 index 00000000000..13dc00ec1de --- /dev/null +++ b/CHANGES/9174.misc.rst @@ -0,0 +1 @@ +Improved performance of web requests -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 6abbe74d8cf..40705b16d71 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -973,6 +973,7 @@ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: return None +@functools.lru_cache def must_be_empty_body(method: str, code: int) -> bool: """Check if a request must return an empty body.""" return ( From 8d7a5ca6d1d825e286301ad89bd6f4b1324e36ed Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:35:45 +0000 Subject: [PATCH 0577/1511] [PR #9174/98b363e4 backport][3.11] Add a cache to must_be_empty_body (#9191) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9174.misc.rst | 1 + aiohttp/helpers.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 CHANGES/9174.misc.rst diff --git a/CHANGES/9174.misc.rst b/CHANGES/9174.misc.rst new file mode 100644 index 00000000000..13dc00ec1de --- /dev/null +++ b/CHANGES/9174.misc.rst @@ -0,0 +1 @@ +Improved performance of web requests -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 88fc7412ea8..f5540a19662 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -969,6 +969,7 @@ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: return None +@functools.lru_cache def must_be_empty_body(method: str, code: int) -> bool: """Check if a request must return an empty body.""" return ( From c717b25d100e6e727800ed1b498fd127b495aaba Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:21:18 +0000 Subject: [PATCH 0578/1511] [PR #9172/b93ef57c backport][3.10] Improve performance of starting web requests when content length is not set (#9192) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9172.misc.rst | 1 + aiohttp/web_response.py | 6 +++--- tests/test_web_response.py | 16 ++++++++++++++++ 3 files changed, 20 insertions(+), 3 deletions(-) create mode 120000 CHANGES/9172.misc.rst diff --git a/CHANGES/9172.misc.rst b/CHANGES/9172.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9172.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index f583789d82e..71a94eec248 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -742,10 +742,10 @@ async def write_eof(self, data: bytes = b"") -> None: await super().write_eof() async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - if should_remove_content_length(request.method, self.status): - if hdrs.CONTENT_LENGTH in self._headers: + if hdrs.CONTENT_LENGTH in self._headers: + if should_remove_content_length(request.method, self.status): del self._headers[hdrs.CONTENT_LENGTH] - elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + elif not self._chunked: if isinstance(self._body, Payload): if self._body.size is not None: self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 2e1e332e0a5..36642d3d244 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -666,6 +666,22 @@ async def write_headers(status_line, headers): assert resp.content_length is None +async def test_rm_content_length_if_204() -> None: + """Ensure content-length is removed for 204 responses.""" + writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True) + + async def write_headers(status_line, headers): + assert hdrs.CONTENT_LENGTH not in headers + + writer.write_headers.side_effect = write_headers + req = make_request("GET", "/", writer=writer) + payload = BytesPayload(b"answer", headers={"Content-Length": "6"}) + resp = Response(body=payload, status=204) + resp.body = payload + await resp.prepare(req) + assert resp.content_length is None + + @pytest.mark.parametrize("status", (100, 101, 204, 304)) async def test_rm_transfer_encoding_rfc_9112_6_3_http_11(status: int) -> None: """Remove transfer encoding for RFC 9112 sec 6.3 with HTTP/1.1.""" From fa1307d7239b68d7c13fa9dc3bb6bda17e59b6eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:27:26 +0000 Subject: [PATCH 0579/1511] [PR #9172/b93ef57c backport][3.11] Improve performance of starting web requests when content length is not set (#9193) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9172.misc.rst | 1 + aiohttp/web_response.py | 6 +++--- tests/test_web_response.py | 16 ++++++++++++++++ 3 files changed, 20 insertions(+), 3 deletions(-) create mode 120000 CHANGES/9172.misc.rst diff --git a/CHANGES/9172.misc.rst b/CHANGES/9172.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9172.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index d4f18271a83..2ba135f54d2 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -759,10 +759,10 @@ async def write_eof(self, data: bytes = b"") -> None: await super().write_eof() async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - if should_remove_content_length(request.method, self.status): - if hdrs.CONTENT_LENGTH in self._headers: + if hdrs.CONTENT_LENGTH in self._headers: + if should_remove_content_length(request.method, self.status): del self._headers[hdrs.CONTENT_LENGTH] - elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + elif not self._chunked: if isinstance(self._body, Payload): if self._body.size is not None: self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) diff --git a/tests/test_web_response.py b/tests/test_web_response.py index b71730868e4..264ca5e93ee 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -667,6 +667,22 @@ async def write_headers(status_line, headers): assert resp.content_length is None +async def test_rm_content_length_if_204() -> None: + """Ensure content-length is removed for 204 responses.""" + writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True) + + async def write_headers(status_line, headers): + assert hdrs.CONTENT_LENGTH not in headers + + writer.write_headers.side_effect = write_headers + req = make_request("GET", "/", writer=writer) + payload = BytesPayload(b"answer", headers={"Content-Length": "6"}) + resp = Response(body=payload, status=204) + resp.body = payload + await resp.prepare(req) + assert resp.content_length is None + + @pytest.mark.parametrize("status", (100, 101, 204, 304)) async def test_rm_transfer_encoding_rfc_9112_6_3_http_11(status: int) -> None: """Remove transfer encoding for RFC 9112 sec 6.3 with HTTP/1.1.""" From eb685564f339679d719eb748dc615fdb0d97f604 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 18 Sep 2024 18:28:19 +0100 Subject: [PATCH 0580/1511] Add and use ClientConnectionResetError (#9137) (#9194) (cherry picked from commit f95bcaf4e0b2344d09df8dbb565150dcb4e73c0f) --- CHANGES/9137.bugfix.rst | 2 ++ aiohttp/__init__.py | 2 ++ aiohttp/base_protocol.py | 3 ++- aiohttp/client.py | 2 ++ aiohttp/client_exceptions.py | 9 +++++++-- aiohttp/http_websocket.py | 5 +++-- aiohttp/http_writer.py | 3 ++- docs/client_reference.rst | 6 ++++++ tests/test_client_ws.py | 15 ++++++++++----- tests/test_client_ws_functional.py | 4 ++-- tests/test_http_writer.py | 10 ++++++---- 11 files changed, 44 insertions(+), 17 deletions(-) create mode 100644 CHANGES/9137.bugfix.rst diff --git a/CHANGES/9137.bugfix.rst b/CHANGES/9137.bugfix.rst new file mode 100644 index 00000000000..d99802095bd --- /dev/null +++ b/CHANGES/9137.bugfix.rst @@ -0,0 +1,2 @@ +Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` +will now throw this -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 15602a7dc85..c5f13c6dc49 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -6,6 +6,7 @@ from .client import ( BaseConnector, ClientConnectionError, + ClientConnectionResetError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, @@ -125,6 +126,7 @@ # client "BaseConnector", "ClientConnectionError", + "ClientConnectionResetError", "ClientConnectorCertificateError", "ClientConnectorError", "ClientConnectorSSLError", diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index dc1f24f99cd..2fc2fa65885 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .client_exceptions import ClientConnectionResetError from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -85,7 +86,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: async def _drain_helper(self) -> None: if not self.connected: - raise ConnectionResetError("Connection lost") + raise ClientConnectionResetError("Connection lost") if not self._paused: return waiter = self._drain_waiter diff --git a/aiohttp/client.py b/aiohttp/client.py index d59d03fa5ec..443335c6061 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -40,6 +40,7 @@ from .abc import AbstractCookieJar from .client_exceptions import ( ClientConnectionError, + ClientConnectionResetError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, @@ -106,6 +107,7 @@ __all__ = ( # client_exceptions "ClientConnectionError", + "ClientConnectionResetError", "ClientConnectorCertificateError", "ClientConnectorError", "ClientConnectorSSLError", diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 36bb6d1c0d8..94991c42477 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -6,7 +6,6 @@ from multidict import MultiMapping -from .http_parser import RawResponseMessage from .typedefs import StrOrURL try: @@ -19,12 +18,14 @@ if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo + from .http_parser import RawResponseMessage else: - RequestInfo = ClientResponse = ConnectionKey = None + RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None __all__ = ( "ClientError", "ClientConnectionError", + "ClientConnectionResetError", "ClientOSError", "ClientConnectorError", "ClientProxyConnectionError", @@ -159,6 +160,10 @@ class ClientConnectionError(ClientError): """Base class for client socket errors.""" +class ClientConnectionResetError(ClientConnectionError, ConnectionResetError): + """ConnectionResetError""" + + class ClientOSError(ClientConnectionError, OSError): """OSError error.""" diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 9d03d2773c7..c6521695d94 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -25,6 +25,7 @@ ) from .base_protocol import BaseProtocol +from .client_exceptions import ClientConnectionResetError from .compression_utils import ZLibCompressor, ZLibDecompressor from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue @@ -624,7 +625,7 @@ async def _send_frame( ) -> None: """Send a frame over the websocket with message as its payload.""" if self._closing and not (opcode & WSMsgType.CLOSE): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") # RSV are the reserved bits in the frame header. They are used to # indicate that the frame is using an extension. @@ -719,7 +720,7 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: def _write(self, data: bytes) -> None: if self.transport is None or self.transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") self.transport.write(data) async def pong(self, message: Union[bytes, str] = b"") -> None: diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index d6b02e6f566..f54fa0f0774 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -8,6 +8,7 @@ from .abc import AbstractStreamWriter from .base_protocol import BaseProtocol +from .client_exceptions import ClientConnectionResetError from .compression_utils import ZLibCompressor from .helpers import NO_EXTENSIONS @@ -72,7 +73,7 @@ def _write(self, chunk: bytes) -> None: self.output_size += size transport = self.transport if not self._protocol.connected or transport is None or transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") transport.write(chunk) async def write( diff --git a/docs/client_reference.rst b/docs/client_reference.rst index a16443f275e..7f88fda14c9 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2225,6 +2225,10 @@ Connection errors Derived from :exc:`ClientError` +.. class:: ClientConnectionResetError + + Derived from :exc:`ClientConnectionError` and :exc:`ConnectionResetError` + .. class:: ClientOSError Subset of connection errors that are initiated by an :exc:`OSError` @@ -2311,6 +2315,8 @@ Hierarchy of exceptions * :exc:`ClientConnectionError` + * :exc:`ClientConnectionResetError` + * :exc:`ClientOSError` * :exc:`ClientConnectorError` diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index 31ec7576c97..afe7983648f 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -2,14 +2,13 @@ import base64 import hashlib import os -from typing import Any +from typing import Any, Type from unittest import mock import pytest import aiohttp -from aiohttp import client, hdrs -from aiohttp.client_exceptions import ServerDisconnectedError +from aiohttp import ClientConnectionResetError, ServerDisconnectedError, client, hdrs from aiohttp.http import WS_KEY from aiohttp.streams import EofStream from aiohttp.test_utils import make_mocked_coro @@ -508,7 +507,13 @@ async def test_close_exc2(loop, ws_key, key_data) -> None: await resp.close() -async def test_send_data_after_close(ws_key, key_data, loop) -> None: +@pytest.mark.parametrize("exc", (ClientConnectionResetError, ConnectionResetError)) +async def test_send_data_after_close( + exc: Type[Exception], + ws_key: bytes, + key_data: bytes, + loop: asyncio.AbstractEventLoop, +) -> None: resp = mock.Mock() resp.status = 101 resp.headers = { @@ -533,7 +538,7 @@ async def test_send_data_after_close(ws_key, key_data, loop) -> None: (resp.send_bytes, (b"b",)), (resp.send_json, ({},)), ): - with pytest.raises(ConnectionResetError): + with pytest.raises(exc): # Verify exc can be caught with both classes await meth(*args) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 30da0dca802..0a8008f07ca 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -6,7 +6,7 @@ import pytest import aiohttp -from aiohttp import ServerTimeoutError, WSMsgType, hdrs, web +from aiohttp import ClientConnectionResetError, ServerTimeoutError, WSMsgType, hdrs, web from aiohttp.client_ws import ClientWSTimeout from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -681,7 +681,7 @@ async def handler(request: web.Request) -> NoReturn: # would cancel the heartbeat task and we wouldn't get a ping assert resp._conn is not None with mock.patch.object( - resp._conn.transport, "write", side_effect=ConnectionResetError + resp._conn.transport, "write", side_effect=ClientConnectionResetError ), mock.patch.object(resp._writer, "ping", wraps=resp._writer.ping) as ping: await resp.receive() ping_count = ping.call_count diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index db50ad65f67..ed853c8744a 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -5,7 +5,7 @@ import pytest from multidict import CIMultiDict -from aiohttp import http +from aiohttp import ClientConnectionResetError, http from aiohttp.test_utils import make_mocked_coro @@ -232,12 +232,12 @@ async def test_write_to_closing_transport(protocol, transport, loop) -> None: await msg.write(b"Before closing") transport.is_closing.return_value = True - with pytest.raises(ConnectionResetError): + with pytest.raises(ClientConnectionResetError): await msg.write(b"After closing") async def test_write_to_closed_transport(protocol, transport, loop) -> None: - """Test that writing to a closed transport raises ConnectionResetError. + """Test that writing to a closed transport raises ClientConnectionResetError. The StreamWriter checks to see if protocol.transport is None before writing to the transport. If it is None, it raises ConnectionResetError. @@ -247,7 +247,9 @@ async def test_write_to_closed_transport(protocol, transport, loop) -> None: await msg.write(b"Before transport close") protocol.transport = None - with pytest.raises(ConnectionResetError, match="Cannot write to closing transport"): + with pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ): await msg.write(b"After transport closed") From 1856c5995e507e35e807469d01191e571bc329c0 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 18 Sep 2024 18:28:31 +0100 Subject: [PATCH 0581/1511] Add and use ClientConnectionResetError (#9137) (#9195) (cherry picked from commit f95bcaf4e0b2344d09df8dbb565150dcb4e73c0f) --- CHANGES/9137.bugfix.rst | 2 ++ aiohttp/__init__.py | 2 ++ aiohttp/base_protocol.py | 3 ++- aiohttp/client.py | 2 ++ aiohttp/client_exceptions.py | 9 +++++++-- aiohttp/http_websocket.py | 5 +++-- aiohttp/http_writer.py | 3 ++- docs/client_reference.rst | 6 ++++++ tests/test_client_ws.py | 15 ++++++++++----- tests/test_client_ws_functional.py | 4 ++-- tests/test_http_writer.py | 10 ++++++---- 11 files changed, 44 insertions(+), 17 deletions(-) create mode 100644 CHANGES/9137.bugfix.rst diff --git a/CHANGES/9137.bugfix.rst b/CHANGES/9137.bugfix.rst new file mode 100644 index 00000000000..d99802095bd --- /dev/null +++ b/CHANGES/9137.bugfix.rst @@ -0,0 +1,2 @@ +Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` +will now throw this -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f321cdaba45..63367052646 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -6,6 +6,7 @@ from .client import ( BaseConnector, ClientConnectionError, + ClientConnectionResetError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, @@ -124,6 +125,7 @@ # client "BaseConnector", "ClientConnectionError", + "ClientConnectionResetError", "ClientConnectorCertificateError", "ClientConnectorError", "ClientConnectorSSLError", diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index dc1f24f99cd..2fc2fa65885 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .client_exceptions import ClientConnectionResetError from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -85,7 +86,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: async def _drain_helper(self) -> None: if not self.connected: - raise ConnectionResetError("Connection lost") + raise ClientConnectionResetError("Connection lost") if not self._paused: return waiter = self._drain_waiter diff --git a/aiohttp/client.py b/aiohttp/client.py index 5f9e95f4706..61bea70aa9b 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -40,6 +40,7 @@ from .abc import AbstractCookieJar from .client_exceptions import ( ClientConnectionError, + ClientConnectionResetError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, @@ -102,6 +103,7 @@ __all__ = ( # client_exceptions "ClientConnectionError", + "ClientConnectionResetError", "ClientConnectorCertificateError", "ClientConnectorError", "ClientConnectorSSLError", diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 36bb6d1c0d8..94991c42477 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -6,7 +6,6 @@ from multidict import MultiMapping -from .http_parser import RawResponseMessage from .typedefs import StrOrURL try: @@ -19,12 +18,14 @@ if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo + from .http_parser import RawResponseMessage else: - RequestInfo = ClientResponse = ConnectionKey = None + RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None __all__ = ( "ClientError", "ClientConnectionError", + "ClientConnectionResetError", "ClientOSError", "ClientConnectorError", "ClientProxyConnectionError", @@ -159,6 +160,10 @@ class ClientConnectionError(ClientError): """Base class for client socket errors.""" +class ClientConnectionResetError(ClientConnectionError, ConnectionResetError): + """ConnectionResetError""" + + class ClientOSError(ClientConnectionError, OSError): """OSError error.""" diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 2ea2c9191e1..fb00ebc7d35 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -25,6 +25,7 @@ ) from .base_protocol import BaseProtocol +from .client_exceptions import ClientConnectionResetError from .compression_utils import ZLibCompressor, ZLibDecompressor from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue @@ -624,7 +625,7 @@ async def _send_frame( ) -> None: """Send a frame over the websocket with message as its payload.""" if self._closing and not (opcode & WSMsgType.CLOSE): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") # RSV are the reserved bits in the frame header. They are used to # indicate that the frame is using an extension. @@ -719,7 +720,7 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: def _write(self, data: bytes) -> None: if self.transport is None or self.transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") self.transport.write(data) async def pong(self, message: Union[bytes, str] = b"") -> None: diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index d6b02e6f566..f54fa0f0774 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -8,6 +8,7 @@ from .abc import AbstractStreamWriter from .base_protocol import BaseProtocol +from .client_exceptions import ClientConnectionResetError from .compression_utils import ZLibCompressor from .helpers import NO_EXTENSIONS @@ -72,7 +73,7 @@ def _write(self, chunk: bytes) -> None: self.output_size += size transport = self.transport if not self._protocol.connected or transport is None or transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") + raise ClientConnectionResetError("Cannot write to closing transport") transport.write(chunk) async def write( diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 1686aa7c113..7379743ae02 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2207,6 +2207,10 @@ Connection errors Derived from :exc:`ClientError` +.. class:: ClientConnectionResetError + + Derived from :exc:`ClientConnectionError` and :exc:`ConnectionResetError` + .. class:: ClientOSError Subset of connection errors that are initiated by an :exc:`OSError` @@ -2293,6 +2297,8 @@ Hierarchy of exceptions * :exc:`ClientConnectionError` + * :exc:`ClientConnectionResetError` + * :exc:`ClientOSError` * :exc:`ClientConnectorError` diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index a790fba43ec..ec08db01e4c 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -2,14 +2,13 @@ import base64 import hashlib import os -from typing import Any +from typing import Any, Type from unittest import mock import pytest import aiohttp -from aiohttp import client, hdrs -from aiohttp.client_exceptions import ServerDisconnectedError +from aiohttp import ClientConnectionResetError, ServerDisconnectedError, client, hdrs from aiohttp.http import WS_KEY from aiohttp.streams import EofStream from aiohttp.test_utils import make_mocked_coro @@ -508,7 +507,13 @@ async def test_close_exc2(loop, ws_key, key_data) -> None: await resp.close() -async def test_send_data_after_close(ws_key, key_data, loop) -> None: +@pytest.mark.parametrize("exc", (ClientConnectionResetError, ConnectionResetError)) +async def test_send_data_after_close( + exc: Type[Exception], + ws_key: bytes, + key_data: bytes, + loop: asyncio.AbstractEventLoop, +) -> None: resp = mock.Mock() resp.status = 101 resp.headers = { @@ -533,7 +538,7 @@ async def test_send_data_after_close(ws_key, key_data, loop) -> None: (resp.send_bytes, (b"b",)), (resp.send_json, ({},)), ): - with pytest.raises(ConnectionResetError): + with pytest.raises(exc): # Verify exc can be caught with both classes await meth(*args) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 274092a189a..0421fb9616b 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -6,7 +6,7 @@ import pytest import aiohttp -from aiohttp import ServerTimeoutError, WSMsgType, hdrs, web +from aiohttp import ClientConnectionResetError, ServerTimeoutError, WSMsgType, hdrs, web from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -620,7 +620,7 @@ async def handler(request: web.Request) -> NoReturn: # would cancel the heartbeat task and we wouldn't get a ping assert resp._conn is not None with mock.patch.object( - resp._conn.transport, "write", side_effect=ConnectionResetError + resp._conn.transport, "write", side_effect=ClientConnectionResetError ), mock.patch.object(resp._writer, "ping", wraps=resp._writer.ping) as ping: await resp.receive() ping_count = ping.call_count diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 5649f32f792..82ad07d046f 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -5,7 +5,7 @@ import pytest from multidict import CIMultiDict -from aiohttp import http +from aiohttp import ClientConnectionResetError, http from aiohttp.test_utils import make_mocked_coro @@ -232,12 +232,12 @@ async def test_write_to_closing_transport(protocol, transport, loop) -> None: await msg.write(b"Before closing") transport.is_closing.return_value = True - with pytest.raises(ConnectionResetError): + with pytest.raises(ClientConnectionResetError): await msg.write(b"After closing") async def test_write_to_closed_transport(protocol, transport, loop) -> None: - """Test that writing to a closed transport raises ConnectionResetError. + """Test that writing to a closed transport raises ClientConnectionResetError. The StreamWriter checks to see if protocol.transport is None before writing to the transport. If it is None, it raises ConnectionResetError. @@ -247,7 +247,9 @@ async def test_write_to_closed_transport(protocol, transport, loop) -> None: await msg.write(b"Before transport close") protocol.transport = None - with pytest.raises(ConnectionResetError, match="Cannot write to closing transport"): + with pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ): await msg.write(b"After transport closed") From 8e4678a69e365f63b237753b1042962bec4b922e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 18 Sep 2024 21:07:33 +0200 Subject: [PATCH 0582/1511] [PR #9170/eacf2e0 backport][3.11] Move reversing slice of middleware apps into the cache (#9187) --- CHANGES/9170.misc.rst | 1 + aiohttp/web_app.py | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) create mode 120000 CHANGES/9170.misc.rst diff --git a/CHANGES/9170.misc.rst b/CHANGES/9170.misc.rst new file mode 120000 index 00000000000..e41cbad0125 --- /dev/null +++ b/CHANGES/9170.misc.rst @@ -0,0 +1 @@ +9158.misc.rst \ No newline at end of file diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 8d109f793ca..b59d0d1b0ff 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -1,7 +1,7 @@ import asyncio import logging import warnings -from functools import lru_cache, partial, update_wrapper +from functools import cache, partial, update_wrapper from typing import ( TYPE_CHECKING, Any, @@ -79,12 +79,12 @@ _Resource = TypeVar("_Resource", bound=AbstractResource) -@lru_cache(None) +@cache def _build_middlewares( handler: Handler, apps: Tuple["Application", ...] ) -> Callable[[Request], Awaitable[StreamResponse]]: """Apply middlewares to handler.""" - for app in apps: + for app in apps[::-1]: for m, _ in app._middlewares_handlers: # type: ignore[union-attr] handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc] return handler @@ -545,7 +545,7 @@ async def _handle(self, request: Request) -> StreamResponse: if self._run_middlewares: if not self._has_legacy_middlewares: - handler = _build_middlewares(handler, match_info.apps[::-1]) + handler = _build_middlewares(handler, match_info.apps) else: for app in match_info.apps[::-1]: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] From e9609ad14aef19dfd62e29f99c01a25294ff8d6b Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 19 Sep 2024 13:28:31 +0100 Subject: [PATCH 0583/1511] Disallow newlines in reason (#9167) (#9198) (cherry picked from commit 88f383427e918360fd2762a9a3256897159e2d6b) --- CHANGES/9167.bugfix.rst | 1 + aiohttp/web_response.py | 2 ++ tests/test_web_exceptions.py | 5 +++++ tests/test_web_response.py | 13 +++++++++---- 4 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9167.bugfix.rst diff --git a/CHANGES/9167.bugfix.rst b/CHANGES/9167.bugfix.rst new file mode 100644 index 00000000000..4c33c8ad355 --- /dev/null +++ b/CHANGES/9167.bugfix.rst @@ -0,0 +1 @@ +Rejected `\n` in `reason` values to avoid sending broken HTTP messages -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 2ba135f54d2..bf184980700 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -145,6 +145,8 @@ def set_status( reason = HTTPStatus(self._status).phrase except ValueError: reason = "" + if "\n" in reason: + raise ValueError("Reason cannot contain \\n") self._reason = reason @property diff --git a/tests/test_web_exceptions.py b/tests/test_web_exceptions.py index 69deb27a062..3358a947d3d 100644 --- a/tests/test_web_exceptions.py +++ b/tests/test_web_exceptions.py @@ -270,3 +270,8 @@ def test_unicode_text_body_unauthorized() -> None: ): resp = web.HTTPUnauthorized(body="text") assert resp.status == 401 + + +def test_multiline_reason() -> None: + with pytest.raises(ValueError, match=r"Reason cannot contain \\n"): + web.HTTPOk(reason="Bad\r\nInjected-header: foo") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 264ca5e93ee..ec9522b05a5 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -945,14 +945,14 @@ async def test_start_force_close() -> None: async def test___repr__() -> None: req = make_request("GET", "/path/to") - resp = StreamResponse(reason=301) + resp = StreamResponse(reason="foo") await resp.prepare(req) - assert "<StreamResponse 301 GET /path/to >" == repr(resp) + assert "<StreamResponse foo GET /path/to >" == repr(resp) def test___repr___not_prepared() -> None: - resp = StreamResponse(reason=301) - assert "<StreamResponse 301 not prepared>" == repr(resp) + resp = StreamResponse(reason="foo") + assert "<StreamResponse foo not prepared>" == repr(resp) async def test_keep_alive_http10_default() -> None: @@ -1226,6 +1226,11 @@ async def test_render_with_body(buf, writer) -> None: ) +async def test_multiline_reason(buf, writer) -> None: + with pytest.raises(ValueError, match=r"Reason cannot contain \\n"): + Response(reason="Bad\r\nInjected-header: foo") + + async def test_send_set_cookie_header(buf, writer) -> None: resp = Response() resp.cookies["name"] = "value" From dd5bb073107caa1c764158b87fb8482124aad6c1 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 19 Sep 2024 13:28:41 +0100 Subject: [PATCH 0584/1511] Disallow newlines in reason (#9167) (#9199) (cherry picked from commit 88f383427e918360fd2762a9a3256897159e2d6b) --- CHANGES/9167.bugfix.rst | 1 + aiohttp/web_response.py | 2 ++ tests/test_web_exceptions.py | 5 +++++ tests/test_web_response.py | 13 +++++++++---- 4 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9167.bugfix.rst diff --git a/CHANGES/9167.bugfix.rst b/CHANGES/9167.bugfix.rst new file mode 100644 index 00000000000..4c33c8ad355 --- /dev/null +++ b/CHANGES/9167.bugfix.rst @@ -0,0 +1 @@ +Rejected `\n` in `reason` values to avoid sending broken HTTP messages -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 71a94eec248..c14a7544d6f 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -143,6 +143,8 @@ def set_status( reason = HTTPStatus(self._status).phrase except ValueError: reason = "" + if "\n" in reason: + raise ValueError("Reason cannot contain \\n") self._reason = reason @property diff --git a/tests/test_web_exceptions.py b/tests/test_web_exceptions.py index 69deb27a062..3358a947d3d 100644 --- a/tests/test_web_exceptions.py +++ b/tests/test_web_exceptions.py @@ -270,3 +270,8 @@ def test_unicode_text_body_unauthorized() -> None: ): resp = web.HTTPUnauthorized(body="text") assert resp.status == 401 + + +def test_multiline_reason() -> None: + with pytest.raises(ValueError, match=r"Reason cannot contain \\n"): + web.HTTPOk(reason="Bad\r\nInjected-header: foo") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 36642d3d244..3694e65948b 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -944,14 +944,14 @@ async def test_start_force_close() -> None: async def test___repr__() -> None: req = make_request("GET", "/path/to") - resp = StreamResponse(reason=301) + resp = StreamResponse(reason="foo") await resp.prepare(req) - assert "<StreamResponse 301 GET /path/to >" == repr(resp) + assert "<StreamResponse foo GET /path/to >" == repr(resp) def test___repr___not_prepared() -> None: - resp = StreamResponse(reason=301) - assert "<StreamResponse 301 not prepared>" == repr(resp) + resp = StreamResponse(reason="foo") + assert "<StreamResponse foo not prepared>" == repr(resp) async def test_keep_alive_http10_default() -> None: @@ -1225,6 +1225,11 @@ async def test_render_with_body(buf, writer) -> None: ) +async def test_multiline_reason(buf, writer) -> None: + with pytest.raises(ValueError, match=r"Reason cannot contain \\n"): + Response(reason="Bad\r\nInjected-header: foo") + + async def test_send_set_cookie_header(buf, writer) -> None: resp = Response() resp.cookies["name"] = "value" From 2508faca25c0f952a99aaf9cb316bb9966132bac Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 20 Sep 2024 14:06:05 +0100 Subject: [PATCH 0585/1511] Fix lost details on HttpProcessingError (#9052) (#9205) (cherry picked from commit 89114196040e6b4e435799939dfde6141223cc12) --- CHANGES/9052.bugfix.rst | 1 + aiohttp/client_proto.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9052.bugfix.rst diff --git a/CHANGES/9052.bugfix.rst b/CHANGES/9052.bugfix.rst new file mode 100644 index 00000000000..913288d3368 --- /dev/null +++ b/CHANGES/9052.bugfix.rst @@ -0,0 +1 @@ +Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index e612450c746..9230ae5145b 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -266,7 +266,15 @@ def data_received(self, data: bytes) -> None: # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(HttpProcessingError(), underlying_exc) + if isinstance(underlying_exc, HttpProcessingError): + exc = HttpProcessingError( + code=underlying_exc.code, + message=underlying_exc.message, + headers=underlying_exc.headers, + ) + else: + exc = HttpProcessingError() + self.set_exception(exc, underlying_exc) return self._upgraded = upgraded From eba4d1c75df4551affc8dd390ba9ff2a3096d6b8 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Fri, 20 Sep 2024 14:19:43 +0100 Subject: [PATCH 0586/1511] Fix lost details on HttpProcessingError (#9052) (#9206) (cherry picked from commit 89114196040e6b4e435799939dfde6141223cc12) --- CHANGES/9052.bugfix.rst | 1 + aiohttp/client_proto.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9052.bugfix.rst diff --git a/CHANGES/9052.bugfix.rst b/CHANGES/9052.bugfix.rst new file mode 100644 index 00000000000..913288d3368 --- /dev/null +++ b/CHANGES/9052.bugfix.rst @@ -0,0 +1 @@ +Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index f8c83240209..c6c262d3bfe 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -269,7 +269,15 @@ def data_received(self, data: bytes) -> None: # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(HttpProcessingError(), underlying_exc) + if isinstance(underlying_exc, HttpProcessingError): + exc = HttpProcessingError( + code=underlying_exc.code, + message=underlying_exc.message, + headers=underlying_exc.headers, + ) + else: + exc = HttpProcessingError() + self.set_exception(exc, underlying_exc) return self._upgraded = upgraded From 1d7b0df336cf1cb35a676f56104c2e48e895ca2a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 21 Sep 2024 11:31:40 +0200 Subject: [PATCH 0587/1511] =?UTF-8?q?[PR=C2=A0#9203/6e70c0a=20backport][3.?= =?UTF-8?q?10]=20Implement=20heapq=20for=20cookie=20expire=20times=20(#920?= =?UTF-8?q?8)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGES/9203.misc.rst | 3 + aiohttp/cookiejar.py | 100 ++++++++++++++++------- tests/test_cookiejar.py | 174 +++++++++++++++++++++++++++++++++++++++- 3 files changed, 246 insertions(+), 31 deletions(-) create mode 100644 CHANGES/9203.misc.rst diff --git a/CHANGES/9203.misc.rst b/CHANGES/9203.misc.rst new file mode 100644 index 00000000000..766fdc01a57 --- /dev/null +++ b/CHANGES/9203.misc.rst @@ -0,0 +1,3 @@ +Significantly improved performance of expiring cookies -- by :user:`bdraco`. + +Expiring cookies has been redesigned to use :mod:`heapq` instead of a linear search, to better scale. diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index e9997ce2935..72c431a275c 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -2,6 +2,7 @@ import calendar import contextlib import datetime +import heapq import itertools import os # noqa import pathlib @@ -10,7 +11,6 @@ import time from collections import defaultdict from http.cookies import BaseCookie, Morsel, SimpleCookie -from math import ceil from typing import ( DefaultDict, Dict, @@ -40,6 +40,11 @@ _FORMAT_PATH = "{}/{}".format _FORMAT_DOMAIN_REVERSED = "{1}.{0}".format +# The minimum number of scheduled cookie expirations before we start cleaning up +# the expiration heap. This is a performance optimization to avoid cleaning up the +# heap too often when there are only a few scheduled expirations. +_MIN_SCHEDULED_COOKIE_EXPIRATION = 100 + class CookieJar(AbstractCookieJar): """Implements cookie storage adhering to RFC 6265.""" @@ -105,7 +110,7 @@ def __init__( for url in treat_as_secure_origin ] self._treat_as_secure_origin = treat_as_secure_origin - self._next_expiration: float = ceil(time.time()) + self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] self._expirations: Dict[Tuple[str, str, str], float] = {} def save(self, file_path: PathLike) -> None: @@ -120,34 +125,25 @@ def load(self, file_path: PathLike) -> None: def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: if predicate is None: - self._next_expiration = ceil(time.time()) + self._expire_heap.clear() self._cookies.clear() self._host_only_cookies.clear() self._expirations.clear() return - to_del = [] now = time.time() - for (domain, path), cookie in self._cookies.items(): - for name, morsel in cookie.items(): - key = (domain, path, name) - if ( - key in self._expirations and self._expirations[key] <= now - ) or predicate(morsel): - to_del.append(key) - - for domain, path, name in to_del: - self._host_only_cookies.discard((domain, name)) - key = (domain, path, name) - if key in self._expirations: - del self._expirations[(domain, path, name)] - self._cookies[(domain, path)].pop(name, None) - - self._next_expiration = ( - min(*self._expirations.values(), self.SUB_MAX_TIME) + 1 - if self._expirations - else self.MAX_TIME - ) + to_del = [ + key + for (domain, path), cookie in self._cookies.items() + for name, morsel in cookie.items() + if ( + (key := (domain, path, name)) in self._expirations + and self._expirations[key] <= now + ) + or predicate(morsel) + ] + if to_del: + self._delete_cookies(to_del) def clear_domain(self, domain: str) -> None: self.clear(lambda x: self._is_domain_match(domain, x["domain"])) @@ -166,11 +162,61 @@ def __len__(self) -> int: return sum(len(cookie.values()) for cookie in self._cookies.values()) def _do_expiration(self) -> None: - self.clear(lambda x: False) + """Remove expired cookies.""" + if not (expire_heap_len := len(self._expire_heap)): + return + + # If the expiration heap grows larger than the number expirations + # times two, we clean it up to avoid keeping expired entries in + # the heap and consuming memory. We guard this with a minimum + # threshold to avoid cleaning up the heap too often when there are + # only a few scheduled expirations. + if ( + expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION + and expire_heap_len > len(self._expirations) * 2 + ): + # Remove any expired entries from the expiration heap + # that do not match the expiration time in the expirations + # as it means the cookie has been re-added to the heap + # with a different expiration time. + self._expire_heap = [ + entry + for entry in self._expire_heap + if self._expirations.get(entry[1]) == entry[0] + ] + heapq.heapify(self._expire_heap) + + now = time.time() + to_del: List[Tuple[str, str, str]] = [] + # Find any expired cookies and add them to the to-delete list + while self._expire_heap: + when, cookie_key = self._expire_heap[0] + if when > now: + break + heapq.heappop(self._expire_heap) + # Check if the cookie hasn't been re-added to the heap + # with a different expiration time as it will be removed + # later when it reaches the top of the heap and its + # expiration time is met. + if self._expirations.get(cookie_key) == when: + to_del.append(cookie_key) + + if to_del: + self._delete_cookies(to_del) + + def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + self._cookies[(domain, path)].pop(name, None) + self._expirations.pop((domain, path, name), None) def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: - self._next_expiration = min(self._next_expiration, when) - self._expirations[(domain, path, name)] = when + cookie_key = (domain, path, name) + if self._expirations.get(cookie_key) == when: + # Avoid adding duplicates to the heap + return + heapq.heappush(self._expire_heap, (when, cookie_key)) + self._expirations[cookie_key] = when def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: """Update cookies.""" diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 91352f50c3d..248d0d419e3 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -1,10 +1,12 @@ import asyncio import datetime +import heapq import itertools import pathlib import pickle import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie +from operator import not_ from unittest import mock import pytest @@ -847,12 +849,98 @@ async def test_cookie_jar_clear_expired(): with freeze_time("1980-01-01"): sut.update_cookies(cookie) - sut.clear(lambda x: False) - with freeze_time("1980-01-01"): - assert len(sut) == 0 + for _ in range(2): + sut.clear(not_) + with freeze_time("1980-01-01"): + assert len(sut) == 0 + + +async def test_cookie_jar_expired_changes() -> None: + """Test that expire time changes are handled as expected.""" + jar = CookieJar() + + cookie_eleven_am = SimpleCookie() + cookie_eleven_am["foo"] = "bar" + cookie_eleven_am["foo"]["expires"] = "Tue, 1 Jan 1990 11:00:00 GMT" + + cookie_noon = SimpleCookie() + cookie_noon["foo"] = "bar" + cookie_noon["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" + + cookie_one_pm = SimpleCookie() + cookie_one_pm["foo"] = "bar" + cookie_one_pm["foo"]["expires"] = "Tue, 1 Jan 1990 13:00:00 GMT" + + cookie_two_pm = SimpleCookie() + cookie_two_pm["foo"] = "bar" + cookie_two_pm["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 10:00:00+00:00") + jar.update_cookies(cookie_noon) + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_eleven_am) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_one_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_two_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + freezer.move_to("1990-01-01 13:00:00+00:00") + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + freezer.move_to("1990-01-01 14:00:00+00:00") + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 0 + + +async def test_cookie_jar_duplicates_with_expire_heap() -> None: + """Test that duplicate cookies do not grow the expires heap.""" + jar = CookieJar() + + cookie_eleven_am = SimpleCookie() + cookie_eleven_am["foo"] = "bar" + cookie_eleven_am["foo"]["expires"] = "Tue, 1 Jan 1990 11:00:00 GMT" + + cookie_two_pm = SimpleCookie() + cookie_two_pm["foo"] = "bar" + cookie_two_pm["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 10:00:00+00:00") + for _ in range(10): + jar.update_cookies(cookie_eleven_am) -async def test_cookie_jar_filter_cookies_expires(): + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + assert len(jar._expire_heap) == 1 + + freezer.move_to("1990-01-01 16:00:00+00:00") + jar.update_cookies(cookie_two_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 0 + assert len(jar._expire_heap) == 0 + + +async def test_cookie_jar_filter_cookies_expires() -> None: """Test that calling filter_cookies will expire stale cookies.""" jar = CookieJar() assert len(jar) == 0 @@ -873,6 +961,84 @@ async def test_cookie_jar_filter_cookies_expires(): assert len(jar) == 0 +async def test_cookie_jar_heap_cleanup() -> None: + """Test that the heap gets cleaned up when there are many old expirations.""" + jar = CookieJar() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_cookies_to_cleanup = 100 + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 09:00:00+00:00") + + start_time = datetime.datetime( + 1990, 1, 1, 10, 0, 0, tzinfo=datetime.timezone.utc + ) + for i in range(min_cookies_to_cleanup): + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["expires"] = ( + start_time + datetime.timedelta(seconds=i) + ).strftime("%a, %d %b %Y %H:%M:%S GMT") + jar.update_cookies(cookie) + assert len(jar._expire_heap) == i + 1 + + assert len(jar._expire_heap) == min_cookies_to_cleanup + + # Now that we reached the minimum number of cookies to cleanup, + # add one more cookie to trigger the cleanup + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["expires"] = ( + start_time + datetime.timedelta(seconds=i + 1) + ).strftime("%a, %d %b %Y %H:%M:%S GMT") + jar.update_cookies(cookie) + + # Verify that the heap has been cleaned up + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + # The heap should have been cleaned up + assert len(jar._expire_heap) == 1 + + +async def test_cookie_jar_heap_maintains_order_after_cleanup() -> None: + """Test that order is maintained after cleanup.""" + jar = CookieJar() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_cookies_to_cleanup = 100 + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 09:00:00+00:00") + + for hour in (12, 13): + for i in range(min_cookies_to_cleanup): + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["domain"] = f"example{i}.com" + cookie["foo"]["expires"] = f"Tue, 1 Jan 1990 {hour}:00:00 GMT" + jar.update_cookies(cookie) + + # Get the jar into a state where the next cookie will trigger the cleanup + assert len(jar._expire_heap) == min_cookies_to_cleanup * 2 + assert len(jar._expirations) == min_cookies_to_cleanup + + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["domain"] = "example0.com" + cookie["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + jar.update_cookies(cookie) + + assert len(jar) == 100 + # The heap should have been cleaned up + assert len(jar._expire_heap) == 100 + + # Verify that the heap is still ordered + heap_before = jar._expire_heap.copy() + heapq.heapify(jar._expire_heap) + assert heap_before == jar._expire_heap + + async def test_cookie_jar_clear_domain() -> None: sut = CookieJar() cookie = SimpleCookie() From 6608ffee20a1cec8e83765dbcc041cdbc08b23b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 21 Sep 2024 11:45:03 +0200 Subject: [PATCH 0588/1511] =?UTF-8?q?[PR=C2=A0#9203/6e70c0a=20backport][3.?= =?UTF-8?q?11]=20Implement=20heapq=20for=20cookie=20expire=20times=20(#920?= =?UTF-8?q?9)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGES/9203.misc.rst | 3 + aiohttp/cookiejar.py | 100 ++++++++++++++++------- tests/test_cookiejar.py | 174 +++++++++++++++++++++++++++++++++++++++- 3 files changed, 246 insertions(+), 31 deletions(-) create mode 100644 CHANGES/9203.misc.rst diff --git a/CHANGES/9203.misc.rst b/CHANGES/9203.misc.rst new file mode 100644 index 00000000000..766fdc01a57 --- /dev/null +++ b/CHANGES/9203.misc.rst @@ -0,0 +1,3 @@ +Significantly improved performance of expiring cookies -- by :user:`bdraco`. + +Expiring cookies has been redesigned to use :mod:`heapq` instead of a linear search, to better scale. diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index c57604b5e59..241a9e3e0a4 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -2,6 +2,7 @@ import calendar import contextlib import datetime +import heapq import itertools import os # noqa import pathlib @@ -10,7 +11,6 @@ import time from collections import defaultdict from http.cookies import BaseCookie, Morsel, SimpleCookie -from math import ceil from typing import ( DefaultDict, Dict, @@ -40,6 +40,11 @@ _FORMAT_PATH = "{}/{}".format _FORMAT_DOMAIN_REVERSED = "{1}.{0}".format +# The minimum number of scheduled cookie expirations before we start cleaning up +# the expiration heap. This is a performance optimization to avoid cleaning up the +# heap too often when there are only a few scheduled expirations. +_MIN_SCHEDULED_COOKIE_EXPIRATION = 100 + class CookieJar(AbstractCookieJar): """Implements cookie storage adhering to RFC 6265.""" @@ -105,7 +110,7 @@ def __init__( for url in treat_as_secure_origin ] self._treat_as_secure_origin = treat_as_secure_origin - self._next_expiration: float = ceil(time.time()) + self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] self._expirations: Dict[Tuple[str, str, str], float] = {} def save(self, file_path: PathLike) -> None: @@ -120,34 +125,25 @@ def load(self, file_path: PathLike) -> None: def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: if predicate is None: - self._next_expiration = ceil(time.time()) + self._expire_heap.clear() self._cookies.clear() self._host_only_cookies.clear() self._expirations.clear() return - to_del = [] now = time.time() - for (domain, path), cookie in self._cookies.items(): - for name, morsel in cookie.items(): - key = (domain, path, name) - if ( - key in self._expirations and self._expirations[key] <= now - ) or predicate(morsel): - to_del.append(key) - - for domain, path, name in to_del: - self._host_only_cookies.discard((domain, name)) - key = (domain, path, name) - if key in self._expirations: - del self._expirations[(domain, path, name)] - self._cookies[(domain, path)].pop(name, None) - - self._next_expiration = ( - min(*self._expirations.values(), self.SUB_MAX_TIME) + 1 - if self._expirations - else self.MAX_TIME - ) + to_del = [ + key + for (domain, path), cookie in self._cookies.items() + for name, morsel in cookie.items() + if ( + (key := (domain, path, name)) in self._expirations + and self._expirations[key] <= now + ) + or predicate(morsel) + ] + if to_del: + self._delete_cookies(to_del) def clear_domain(self, domain: str) -> None: self.clear(lambda x: self._is_domain_match(domain, x["domain"])) @@ -166,11 +162,61 @@ def __len__(self) -> int: return sum(len(cookie.values()) for cookie in self._cookies.values()) def _do_expiration(self) -> None: - self.clear(lambda x: False) + """Remove expired cookies.""" + if not (expire_heap_len := len(self._expire_heap)): + return + + # If the expiration heap grows larger than the number expirations + # times two, we clean it up to avoid keeping expired entries in + # the heap and consuming memory. We guard this with a minimum + # threshold to avoid cleaning up the heap too often when there are + # only a few scheduled expirations. + if ( + expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION + and expire_heap_len > len(self._expirations) * 2 + ): + # Remove any expired entries from the expiration heap + # that do not match the expiration time in the expirations + # as it means the cookie has been re-added to the heap + # with a different expiration time. + self._expire_heap = [ + entry + for entry in self._expire_heap + if self._expirations.get(entry[1]) == entry[0] + ] + heapq.heapify(self._expire_heap) + + now = time.time() + to_del: List[Tuple[str, str, str]] = [] + # Find any expired cookies and add them to the to-delete list + while self._expire_heap: + when, cookie_key = self._expire_heap[0] + if when > now: + break + heapq.heappop(self._expire_heap) + # Check if the cookie hasn't been re-added to the heap + # with a different expiration time as it will be removed + # later when it reaches the top of the heap and its + # expiration time is met. + if self._expirations.get(cookie_key) == when: + to_del.append(cookie_key) + + if to_del: + self._delete_cookies(to_del) + + def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + self._cookies[(domain, path)].pop(name, None) + self._expirations.pop((domain, path, name), None) def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: - self._next_expiration = min(self._next_expiration, when) - self._expirations[(domain, path, name)] = when + cookie_key = (domain, path, name) + if self._expirations.get(cookie_key) == when: + # Avoid adding duplicates to the heap + return + heapq.heappush(self._expire_heap, (when, cookie_key)) + self._expirations[cookie_key] = when def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: """Update cookies.""" diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 91352f50c3d..248d0d419e3 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -1,10 +1,12 @@ import asyncio import datetime +import heapq import itertools import pathlib import pickle import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie +from operator import not_ from unittest import mock import pytest @@ -847,12 +849,98 @@ async def test_cookie_jar_clear_expired(): with freeze_time("1980-01-01"): sut.update_cookies(cookie) - sut.clear(lambda x: False) - with freeze_time("1980-01-01"): - assert len(sut) == 0 + for _ in range(2): + sut.clear(not_) + with freeze_time("1980-01-01"): + assert len(sut) == 0 + + +async def test_cookie_jar_expired_changes() -> None: + """Test that expire time changes are handled as expected.""" + jar = CookieJar() + + cookie_eleven_am = SimpleCookie() + cookie_eleven_am["foo"] = "bar" + cookie_eleven_am["foo"]["expires"] = "Tue, 1 Jan 1990 11:00:00 GMT" + + cookie_noon = SimpleCookie() + cookie_noon["foo"] = "bar" + cookie_noon["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" + + cookie_one_pm = SimpleCookie() + cookie_one_pm["foo"] = "bar" + cookie_one_pm["foo"]["expires"] = "Tue, 1 Jan 1990 13:00:00 GMT" + + cookie_two_pm = SimpleCookie() + cookie_two_pm["foo"] = "bar" + cookie_two_pm["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 10:00:00+00:00") + jar.update_cookies(cookie_noon) + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_eleven_am) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_one_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + jar.update_cookies(cookie_two_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + freezer.move_to("1990-01-01 13:00:00+00:00") + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + freezer.move_to("1990-01-01 14:00:00+00:00") + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 0 + + +async def test_cookie_jar_duplicates_with_expire_heap() -> None: + """Test that duplicate cookies do not grow the expires heap.""" + jar = CookieJar() + + cookie_eleven_am = SimpleCookie() + cookie_eleven_am["foo"] = "bar" + cookie_eleven_am["foo"]["expires"] = "Tue, 1 Jan 1990 11:00:00 GMT" + + cookie_two_pm = SimpleCookie() + cookie_two_pm["foo"] = "bar" + cookie_two_pm["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 10:00:00+00:00") + for _ in range(10): + jar.update_cookies(cookie_eleven_am) -async def test_cookie_jar_filter_cookies_expires(): + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + + assert len(jar._expire_heap) == 1 + + freezer.move_to("1990-01-01 16:00:00+00:00") + jar.update_cookies(cookie_two_pm) + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 0 + assert len(jar._expire_heap) == 0 + + +async def test_cookie_jar_filter_cookies_expires() -> None: """Test that calling filter_cookies will expire stale cookies.""" jar = CookieJar() assert len(jar) == 0 @@ -873,6 +961,84 @@ async def test_cookie_jar_filter_cookies_expires(): assert len(jar) == 0 +async def test_cookie_jar_heap_cleanup() -> None: + """Test that the heap gets cleaned up when there are many old expirations.""" + jar = CookieJar() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_cookies_to_cleanup = 100 + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 09:00:00+00:00") + + start_time = datetime.datetime( + 1990, 1, 1, 10, 0, 0, tzinfo=datetime.timezone.utc + ) + for i in range(min_cookies_to_cleanup): + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["expires"] = ( + start_time + datetime.timedelta(seconds=i) + ).strftime("%a, %d %b %Y %H:%M:%S GMT") + jar.update_cookies(cookie) + assert len(jar._expire_heap) == i + 1 + + assert len(jar._expire_heap) == min_cookies_to_cleanup + + # Now that we reached the minimum number of cookies to cleanup, + # add one more cookie to trigger the cleanup + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["expires"] = ( + start_time + datetime.timedelta(seconds=i + 1) + ).strftime("%a, %d %b %Y %H:%M:%S GMT") + jar.update_cookies(cookie) + + # Verify that the heap has been cleaned up + assert len(jar) == 1 + matched_cookies = jar.filter_cookies(URL("/")) + assert len(matched_cookies) == 1 + assert "foo" in matched_cookies + # The heap should have been cleaned up + assert len(jar._expire_heap) == 1 + + +async def test_cookie_jar_heap_maintains_order_after_cleanup() -> None: + """Test that order is maintained after cleanup.""" + jar = CookieJar() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_cookies_to_cleanup = 100 + + with freeze_time() as freezer: + freezer.move_to("1990-01-01 09:00:00+00:00") + + for hour in (12, 13): + for i in range(min_cookies_to_cleanup): + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["domain"] = f"example{i}.com" + cookie["foo"]["expires"] = f"Tue, 1 Jan 1990 {hour}:00:00 GMT" + jar.update_cookies(cookie) + + # Get the jar into a state where the next cookie will trigger the cleanup + assert len(jar._expire_heap) == min_cookies_to_cleanup * 2 + assert len(jar._expirations) == min_cookies_to_cleanup + + cookie = SimpleCookie() + cookie["foo"] = "bar" + cookie["foo"]["domain"] = "example0.com" + cookie["foo"]["expires"] = "Tue, 1 Jan 1990 14:00:00 GMT" + jar.update_cookies(cookie) + + assert len(jar) == 100 + # The heap should have been cleaned up + assert len(jar._expire_heap) == 100 + + # Verify that the heap is still ordered + heap_before = jar._expire_heap.copy() + heapq.heapify(jar._expire_heap) + assert heap_before == jar._expire_heap + + async def test_cookie_jar_clear_domain() -> None: sut = CookieJar() cookie = SimpleCookie() From 0408ba675de2c008028752df653e73e28a9f64d1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 21 Sep 2024 09:58:13 +0000 Subject: [PATCH 0589/1511] [PR #9168/5e15ea61 backport][3.11] Avoid creating handler waiter until shutdown (#9211) --- CHANGES/9168.misc.rst | 1 + aiohttp/web_protocol.py | 14 +++++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 120000 CHANGES/9168.misc.rst diff --git a/CHANGES/9168.misc.rst b/CHANGES/9168.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9168.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a7f7b546903..dd819de7236 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -162,6 +162,7 @@ class RequestHandler(BaseProtocol): "_force_close", "_current_request", "_timeout_ceil_threshold", + "_request_in_progress", ) def __init__( @@ -238,6 +239,7 @@ def __init__( self._close = False self._force_close = False + self._request_in_progress = False def __repr__(self) -> str: return "<{} {}>".format( @@ -261,7 +263,11 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: self._keepalive_handle.cancel() # Wait for graceful handler completion - if self._handler_waiter is not None: + if self._request_in_progress: + # The future is only created when we are shutting + # down while the handler is still processing a request + # to avoid creating a future for every request. + self._handler_waiter = self._loop.create_future() with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): await self._handler_waiter @@ -446,7 +452,7 @@ async def _handle_request( start_time: float, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: - self._handler_waiter = self._loop.create_future() + self._request_in_progress = True try: try: self._current_request = request @@ -477,7 +483,9 @@ async def _handle_request( resp, reset = await self.finish_response(request, resp, start_time) finally: - self._handler_waiter.set_result(None) + self._request_in_progress = False + if self._handler_waiter is not None: + self._handler_waiter.set_result(None) return resp, reset From 2b11f5e5252d7696b4089c7498cc1612404304de Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 21 Sep 2024 09:58:13 +0000 Subject: [PATCH 0590/1511] [PR #9168/5e15ea61 backport][3.10] Avoid creating handler waiter until shutdown (#9210) --- CHANGES/9168.misc.rst | 1 + aiohttp/web_protocol.py | 14 +++++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 120000 CHANGES/9168.misc.rst diff --git a/CHANGES/9168.misc.rst b/CHANGES/9168.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9168.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a7f7b546903..dd819de7236 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -162,6 +162,7 @@ class RequestHandler(BaseProtocol): "_force_close", "_current_request", "_timeout_ceil_threshold", + "_request_in_progress", ) def __init__( @@ -238,6 +239,7 @@ def __init__( self._close = False self._force_close = False + self._request_in_progress = False def __repr__(self) -> str: return "<{} {}>".format( @@ -261,7 +263,11 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: self._keepalive_handle.cancel() # Wait for graceful handler completion - if self._handler_waiter is not None: + if self._request_in_progress: + # The future is only created when we are shutting + # down while the handler is still processing a request + # to avoid creating a future for every request. + self._handler_waiter = self._loop.create_future() with suppress(asyncio.CancelledError, asyncio.TimeoutError): async with ceil_timeout(timeout): await self._handler_waiter @@ -446,7 +452,7 @@ async def _handle_request( start_time: float, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: - self._handler_waiter = self._loop.create_future() + self._request_in_progress = True try: try: self._current_request = request @@ -477,7 +483,9 @@ async def _handle_request( resp, reset = await self.finish_response(request, resp, start_time) finally: - self._handler_waiter.set_result(None) + self._request_in_progress = False + if self._handler_waiter is not None: + self._handler_waiter.set_result(None) return resp, reset From e232f61b8433b058f1763c5fa4b985ef65fc21c6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 21 Sep 2024 20:17:14 +0000 Subject: [PATCH 0591/1511] [PR #9175/756fae80 backport][3.11] Speed up finding the reason if its unset (#9213) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9175.misc.rst | 1 + aiohttp/web_response.py | 14 ++++++++------ tests/test_web_response.py | 8 ++++++++ 3 files changed, 17 insertions(+), 6 deletions(-) create mode 120000 CHANGES/9175.misc.rst diff --git a/CHANGES/9175.misc.rst b/CHANGES/9175.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9175.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index bf184980700..0cbbe84260f 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -42,6 +42,8 @@ from .payload import Payload from .typedefs import JSONEncoder, LooseHeaders +REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} + __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") @@ -102,7 +104,7 @@ def __init__( else: self._headers = CIMultiDict() - self.set_status(status, reason) + self._set_status(status, reason) @property def prepared(self) -> bool: @@ -139,13 +141,13 @@ def set_status( assert ( not self.prepared ), "Cannot change the response status code after the headers have been sent" + self._set_status(status, reason) + + def _set_status(self, status: int, reason: Optional[str]) -> None: self._status = int(status) if reason is None: - try: - reason = HTTPStatus(self._status).phrase - except ValueError: - reason = "" - if "\n" in reason: + reason = REASON_PHRASES.get(self._status, "") + elif "\n" in reason: raise ValueError("Reason cannot contain \\n") self._reason = reason diff --git a/tests/test_web_response.py b/tests/test_web_response.py index ec9522b05a5..080edaf57c3 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -933,6 +933,14 @@ def test_set_status_with_reason() -> None: assert "Everything is fine!" == resp.reason +def test_set_status_with_empty_reason() -> None: + resp = StreamResponse() + + resp.set_status(200, "") + assert resp.status == 200 + assert resp.reason == "" + + async def test_start_force_close() -> None: req = make_request("GET", "/") resp = StreamResponse() From 00e06f038c99f1236af5f632b9d15ebf262aa679 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 21 Sep 2024 22:17:17 +0200 Subject: [PATCH 0592/1511] [PR #9175/756fae80 backport][3.10] Speed up finding the reason if its unset (#9214) Co-authored-by: pre-commit-ci[bot] --- CHANGES/9175.misc.rst | 1 + aiohttp/web_response.py | 20 +++++++++++--------- tests/test_web_response.py | 8 ++++++++ 3 files changed, 20 insertions(+), 9 deletions(-) create mode 120000 CHANGES/9175.misc.rst diff --git a/CHANGES/9175.misc.rst b/CHANGES/9175.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9175.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index c14a7544d6f..3188ac59c77 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -41,6 +41,8 @@ from .payload import Payload from .typedefs import JSONEncoder, LooseHeaders +REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} + __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") @@ -100,7 +102,7 @@ def __init__( else: self._headers = CIMultiDict() - self.set_status(status, reason) + self._set_status(status, reason) @property def prepared(self) -> bool: @@ -134,16 +136,16 @@ def set_status( status: int, reason: Optional[str] = None, ) -> None: - assert not self.prepared, ( - "Cannot change the response status code after " "the headers have been sent" - ) + assert ( + not self.prepared + ), "Cannot change the response status code after the headers have been sent" + self._set_status(status, reason) + + def _set_status(self, status: int, reason: Optional[str]) -> None: self._status = int(status) if reason is None: - try: - reason = HTTPStatus(self._status).phrase - except ValueError: - reason = "" - if "\n" in reason: + reason = REASON_PHRASES.get(self._status, "") + elif "\n" in reason: raise ValueError("Reason cannot contain \\n") self._reason = reason diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 3694e65948b..25e464f7bed 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -932,6 +932,14 @@ def test_set_status_with_reason() -> None: assert "Everything is fine!" == resp.reason +def test_set_status_with_empty_reason() -> None: + resp = StreamResponse() + + resp.set_status(200, "") + assert resp.status == 200 + assert resp.reason == "" + + async def test_start_force_close() -> None: req = make_request("GET", "/") resp = StreamResponse() From 206afde308a9138254129eb829d8c52e318e3f33 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 22 Sep 2024 13:28:18 +0100 Subject: [PATCH 0593/1511] Backport some changes from #9215 (#9217) --- aiohttp/pytest_plugin.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index c862b409566..55964ead041 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -16,8 +16,6 @@ import pytest -from aiohttp.web import Application - from .test_utils import ( BaseTestServer, RawTestServer, @@ -28,14 +26,14 @@ teardown_test_loop, unused_port as _unused_port, ) +from .web import Application +from .web_protocol import _RequestHandler try: import uvloop except ImportError: # pragma: no cover uvloop = None # type: ignore[assignment] -AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] - class AiohttpClient(Protocol): def __call__( @@ -53,6 +51,12 @@ def __call__( ) -> Awaitable[TestServer]: ... +class AiohttpRawServer(Protocol): + def __call__( + self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[RawTestServer]: ... + + def pytest_addoption(parser): # type: ignore[no-untyped-def] parser.addoption( "--aiohttp-fast", @@ -321,7 +325,9 @@ def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawSe """ servers = [] - async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] + async def go( + handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> RawTestServer: server = RawTestServer(handler, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) From ec5de6c9391fa4682f9b6966f6ceb7ac26f14f23 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 14:42:14 +0100 Subject: [PATCH 0594/1511] [PR #9217/206afde3 backport][3.10] Backport some changes from #9215 (#9218) **This is a backport of PR #9217 as merged into 3.11 (206afde308a9138254129eb829d8c52e318e3f33).** Co-authored-by: Sam Bull <git@sambull.org> --- aiohttp/pytest_plugin.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index c862b409566..55964ead041 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -16,8 +16,6 @@ import pytest -from aiohttp.web import Application - from .test_utils import ( BaseTestServer, RawTestServer, @@ -28,14 +26,14 @@ teardown_test_loop, unused_port as _unused_port, ) +from .web import Application +from .web_protocol import _RequestHandler try: import uvloop except ImportError: # pragma: no cover uvloop = None # type: ignore[assignment] -AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] - class AiohttpClient(Protocol): def __call__( @@ -53,6 +51,12 @@ def __call__( ) -> Awaitable[TestServer]: ... +class AiohttpRawServer(Protocol): + def __call__( + self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[RawTestServer]: ... + + def pytest_addoption(parser): # type: ignore[no-untyped-def] parser.addoption( "--aiohttp-fast", @@ -321,7 +325,9 @@ def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawSe """ servers = [] - async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] + async def go( + handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> RawTestServer: server = RawTestServer(handler, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) From 689bf1f4706a07ba346422b4528ec24e69279a07 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 11:00:05 -0400 Subject: [PATCH 0595/1511] [PR #9216/3644101 backport][3.10] Small cleanup to should_close in client_proto (#9219) --- aiohttp/client_proto.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index c6c262d3bfe..8055811e40d 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -50,15 +50,13 @@ def upgraded(self) -> bool: @property def should_close(self) -> bool: - if self._payload is not None and not self._payload.is_eof() or self._upgraded: - return True - return ( self._should_close + or (self._payload is not None and not self._payload.is_eof()) or self._upgraded - or self.exception() is not None + or self._exception is not None or self._payload_parser is not None - or len(self) > 0 + or bool(self._buffer) or bool(self._tail) ) From c8f89ce7d2a512757974f69bda45f978de0c7d77 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 11:00:05 -0400 Subject: [PATCH 0596/1511] [PR #9216/3644101 backport][3.11] Small cleanup to should_close in client_proto (#9220) --- aiohttp/client_proto.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 9230ae5145b..a3e29c01cc6 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -50,15 +50,13 @@ def upgraded(self) -> bool: @property def should_close(self) -> bool: - if self._payload is not None and not self._payload.is_eof() or self._upgraded: - return True - return ( self._should_close + or (self._payload is not None and not self._payload.is_eof()) or self._upgraded - or self.exception() is not None + or self._exception is not None or self._payload_parser is not None - or len(self) > 0 + or bool(self._buffer) or bool(self._tail) ) From 985c00f91e2d69b53f6b81a4ba028e7bd3374ac2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 15:31:14 +0000 Subject: [PATCH 0597/1511] [PR #9169/fce4f8ec backport][3.10] Speed up starting compression (#9222) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9169.misc.rst | 1 + aiohttp/web_response.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9169.misc.rst diff --git a/CHANGES/9169.misc.rst b/CHANGES/9169.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9169.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 3188ac59c77..4d5095a4fea 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -65,6 +65,8 @@ class ContentCoding(enum.Enum): identity = "identity" +CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding} + ############################################################ # HTTP Response classes ############################################################ @@ -410,8 +412,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for coding in ContentCoding: - if coding.value in accept_encoding: + for value, coding in CONTENT_CODINGS.items(): + if value in accept_encoding: await self._do_start_compression(coding) return From 1401f6edf445b1d9951b1978d3386b9305cfdfdf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 15:31:34 +0000 Subject: [PATCH 0598/1511] [PR #9169/fce4f8ec backport][3.11] Speed up starting compression (#9223) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9169.misc.rst | 1 + aiohttp/web_response.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9169.misc.rst diff --git a/CHANGES/9169.misc.rst b/CHANGES/9169.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9169.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 0cbbe84260f..5c0a3be1d21 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -66,6 +66,8 @@ class ContentCoding(enum.Enum): identity = "identity" +CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding} + ############################################################ # HTTP Response classes ############################################################ @@ -427,8 +429,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for coding in ContentCoding: - if coding.value in accept_encoding: + for value, coding in CONTENT_CODINGS.items(): + if value in accept_encoding: await self._do_start_compression(coding) return From f43762449252cb6ee4b38f53a685fee8dd652215 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 11:44:41 -0400 Subject: [PATCH 0599/1511] [PR #9064/7b11e23 backport][3.11] Fix sendfile test fixture (#9225) Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_web_sendfile_functional.py | 31 ++++++++++++++++----------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index e2cfb7a1f0e..c9189a21fb0 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -5,6 +5,7 @@ import socket import zlib from typing import Any, Iterable, Optional +from unittest import mock import pytest @@ -47,15 +48,6 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: return hello[encoding] -@pytest.fixture -def loop_without_sendfile(loop): - def sendfile(*args, **kwargs): - raise NotImplementedError - - loop.sendfile = sendfile - return loop - - @pytest.fixture def loop_with_mocked_native_sendfile(loop: Any): def sendfile(transport, fobj, offset, count): @@ -68,14 +60,27 @@ def sendfile(transport, fobj, offset, count): @pytest.fixture(params=["sendfile", "no_sendfile"], ids=["sendfile", "no_sendfile"]) -def sender(request, loop_without_sendfile): +def sender(request: Any, loop: Any): + sendfile_mock = None + def maker(*args, **kwargs): ret = web.FileResponse(*args, **kwargs) - if request.param == "no_sendfile": - asyncio.set_event_loop(loop_without_sendfile) + rloop = asyncio.get_running_loop() + is_patched = rloop.sendfile is sendfile_mock + assert is_patched if request.param == "no_sendfile" else not is_patched return ret - return maker + if request.param == "no_sendfile": + with mock.patch.object( + loop, + "sendfile", + autospec=True, + spec_set=True, + side_effect=NotImplementedError, + ) as sendfile_mock: + yield maker + else: + yield maker @pytest.fixture From 34ac151daa2a6be6910f7c97d368cb391a35eb7c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 11:44:48 -0400 Subject: [PATCH 0600/1511] [PR #9064/7b11e23 backport][3.10] Fix sendfile test fixture (#9224) Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_web_sendfile_functional.py | 31 ++++++++++++++++----------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index e2cfb7a1f0e..c9189a21fb0 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -5,6 +5,7 @@ import socket import zlib from typing import Any, Iterable, Optional +from unittest import mock import pytest @@ -47,15 +48,6 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: return hello[encoding] -@pytest.fixture -def loop_without_sendfile(loop): - def sendfile(*args, **kwargs): - raise NotImplementedError - - loop.sendfile = sendfile - return loop - - @pytest.fixture def loop_with_mocked_native_sendfile(loop: Any): def sendfile(transport, fobj, offset, count): @@ -68,14 +60,27 @@ def sendfile(transport, fobj, offset, count): @pytest.fixture(params=["sendfile", "no_sendfile"], ids=["sendfile", "no_sendfile"]) -def sender(request, loop_without_sendfile): +def sender(request: Any, loop: Any): + sendfile_mock = None + def maker(*args, **kwargs): ret = web.FileResponse(*args, **kwargs) - if request.param == "no_sendfile": - asyncio.set_event_loop(loop_without_sendfile) + rloop = asyncio.get_running_loop() + is_patched = rloop.sendfile is sendfile_mock + assert is_patched if request.param == "no_sendfile" else not is_patched return ret - return maker + if request.param == "no_sendfile": + with mock.patch.object( + loop, + "sendfile", + autospec=True, + spec_set=True, + side_effect=NotImplementedError, + ) as sendfile_mock: + yield maker + else: + yield maker @pytest.fixture From a3699f1059a9e76e4ba0ed81794063fb3cb47b4d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 16:31:13 +0000 Subject: [PATCH 0601/1511] [PR #9221/e079c413 backport][3.10] Small cleanups to cookiejar (#9227) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/cookiejar.py | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 72c431a275c..0a6e35461f7 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -238,7 +238,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No domain = cookie["domain"] # ignore domains with trailing dots - if domain.endswith("."): + if domain and domain[-1] == ".": domain = "" del cookie["domain"] @@ -248,7 +248,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No self._host_only_cookies.add((hostname, name)) domain = cookie["domain"] = hostname - if domain.startswith("."): + if domain and domain[0] == ".": # Remove leading dot domain = domain[1:] cookie["domain"] = domain @@ -258,7 +258,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No continue path = cookie["path"] - if not path or not path.startswith("/"): + if not path or path[0] != "/": # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): @@ -269,8 +269,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No cookie["path"] = path path = path.rstrip("/") - max_age = cookie["max-age"] - if max_age: + if max_age := cookie["max-age"]: try: delta_seconds = int(max_age) max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) @@ -278,14 +277,11 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No except ValueError: cookie["max-age"] = "" - else: - expires = cookie["expires"] - if expires: - expire_time = self._parse_date(expires) - if expire_time: - self._expire_cookie(expire_time, domain, path, name) - else: - cookie["expires"] = "" + elif expires := cookie["expires"]: + if expire_time := self._parse_date(expires): + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" self._cookies[(domain, path)][name] = cookie @@ -341,9 +337,8 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": name = cookie.key domain = cookie["domain"] - if (domain, name) in self._host_only_cookies: - if domain != hostname: - continue + if (domain, name) in self._host_only_cookies and domain != hostname: + continue # Skip edge case when the cookie has a trailing slash but request doesn't. if len(cookie["path"]) > path_len: From d0a55c28cc55be94b74d51b6660e7bd59a12f10e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 17:50:51 +0000 Subject: [PATCH 0602/1511] [PR #9221/e079c413 backport][3.11] Small cleanups to cookiejar (#9228) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/cookiejar.py | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 241a9e3e0a4..448ab7aa7e0 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -238,7 +238,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No domain = cookie["domain"] # ignore domains with trailing dots - if domain.endswith("."): + if domain and domain[-1] == ".": domain = "" del cookie["domain"] @@ -248,7 +248,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No self._host_only_cookies.add((hostname, name)) domain = cookie["domain"] = hostname - if domain.startswith("."): + if domain and domain[0] == ".": # Remove leading dot domain = domain[1:] cookie["domain"] = domain @@ -258,7 +258,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No continue path = cookie["path"] - if not path or not path.startswith("/"): + if not path or path[0] != "/": # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): @@ -269,8 +269,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No cookie["path"] = path path = path.rstrip("/") - max_age = cookie["max-age"] - if max_age: + if max_age := cookie["max-age"]: try: delta_seconds = int(max_age) max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) @@ -278,14 +277,11 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No except ValueError: cookie["max-age"] = "" - else: - expires = cookie["expires"] - if expires: - expire_time = self._parse_date(expires) - if expire_time: - self._expire_cookie(expire_time, domain, path, name) - else: - cookie["expires"] = "" + elif expires := cookie["expires"]: + if expire_time := self._parse_date(expires): + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" self._cookies[(domain, path)][name] = cookie @@ -341,9 +337,8 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": name = cookie.key domain = cookie["domain"] - if (domain, name) in self._host_only_cookies: - if domain != hostname: - continue + if (domain, name) in self._host_only_cookies and domain != hostname: + continue # Skip edge case when the cookie has a trailing slash but request doesn't. if len(cookie["path"]) > path_len: From 16b4bc4e081d19a6d6ee14850ce38576d8fdef03 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 18:10:59 +0000 Subject: [PATCH 0603/1511] [PR #8983/ceb7ae51 backport][3.10] Add more info to add_static() warning (#9229) Co-authored-by: Sam Bull <git@sambull.org> Fixes #7232. --- docs/web_reference.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 4efba726fa9..06c1c03f598 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1878,7 +1878,10 @@ Application and Router Use :meth:`add_static` for development only. In production, static content should be processed by web servers like *nginx* - or *apache*. + or *apache*. Such web servers will be able to provide significantly + better performance and security for static assets. Several past security + vulnerabilities in aiohttp only affected applications using + :meth:`add_static`. :param str prefix: URL path prefix for handled static files From 75459a5ac68947b48c8782a91d0a6a6cc6438d87 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 18:11:02 +0000 Subject: [PATCH 0604/1511] [PR #8983/ceb7ae51 backport][3.11] Add more info to add_static() warning (#9230) Co-authored-by: Sam Bull <git@sambull.org> Fixes #7232. --- docs/web_reference.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index f0da3237bd0..931210894c8 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1881,7 +1881,10 @@ Application and Router Use :meth:`add_static` for development only. In production, static content should be processed by web servers like *nginx* - or *apache*. + or *apache*. Such web servers will be able to provide significantly + better performance and security for static assets. Several past security + vulnerabilities in aiohttp only affected applications using + :meth:`add_static`. :param str prefix: URL path prefix for handled static files From 066e69a8eba6e0b2777099824bb4146d7dc6ea09 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 14:11:06 -0400 Subject: [PATCH 0605/1511] [PR #9200/42930b0 backport][3.11] Improve middleware performance (#9232) --- CHANGES/9200.breaking.rst | 3 +++ aiohttp/web_middlewares.py | 7 ++++++- aiohttp/web_urldispatcher.py | 10 ++-------- 3 files changed, 11 insertions(+), 9 deletions(-) create mode 100644 CHANGES/9200.breaking.rst diff --git a/CHANGES/9200.breaking.rst b/CHANGES/9200.breaking.rst new file mode 100644 index 00000000000..0282e165c41 --- /dev/null +++ b/CHANGES/9200.breaking.rst @@ -0,0 +1,3 @@ +Improved middleware performance -- by :user:`bdraco`. + +The ``set_current_app`` method was removed from ``UrlMappingMatchInfo`` because it is no longer used, and it was unlikely external caller would ever use it. diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index 5da1533c0df..2f1f5f58e6e 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -110,7 +110,12 @@ async def impl(request: Request, handler: Handler) -> StreamResponse: def _fix_request_current_app(app: "Application") -> Middleware: @middleware async def impl(request: Request, handler: Handler) -> StreamResponse: - with request.match_info.set_current_app(app): + match_info = request.match_info + prev = match_info.current_app + match_info.current_app = app + try: return await handler(request) + finally: + match_info.current_app = prev return impl diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 07c8f6e6ff3..9c07f4ee9ad 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -10,7 +10,6 @@ import re import sys import warnings -from contextlib import contextmanager from functools import wraps from pathlib import Path from types import MappingProxyType @@ -293,8 +292,8 @@ def current_app(self) -> "Application": assert app is not None return app - @contextmanager - def set_current_app(self, app: "Application") -> Generator[None, None, None]: + @current_app.setter + def current_app(self, app: "Application") -> None: if DEBUG: # pragma: no cover if app not in self._apps: raise RuntimeError( @@ -302,12 +301,7 @@ def set_current_app(self, app: "Application") -> Generator[None, None, None]: self._apps, app ) ) - prev = self._current_app self._current_app = app - try: - yield - finally: - self._current_app = prev def freeze(self) -> None: self._frozen = True From febf52528877c88df7db5fbfc53bf581dff05772 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 14:11:19 -0400 Subject: [PATCH 0606/1511] [PR #9200/42930b0 backport][3.10] Improve middleware performance (#9231) --- CHANGES/9200.breaking.rst | 3 +++ aiohttp/web_middlewares.py | 7 ++++++- aiohttp/web_urldispatcher.py | 10 ++-------- 3 files changed, 11 insertions(+), 9 deletions(-) create mode 100644 CHANGES/9200.breaking.rst diff --git a/CHANGES/9200.breaking.rst b/CHANGES/9200.breaking.rst new file mode 100644 index 00000000000..0282e165c41 --- /dev/null +++ b/CHANGES/9200.breaking.rst @@ -0,0 +1,3 @@ +Improved middleware performance -- by :user:`bdraco`. + +The ``set_current_app`` method was removed from ``UrlMappingMatchInfo`` because it is no longer used, and it was unlikely external caller would ever use it. diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index 5da1533c0df..2f1f5f58e6e 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -110,7 +110,12 @@ async def impl(request: Request, handler: Handler) -> StreamResponse: def _fix_request_current_app(app: "Application") -> Middleware: @middleware async def impl(request: Request, handler: Handler) -> StreamResponse: - with request.match_info.set_current_app(app): + match_info = request.match_info + prev = match_info.current_app + match_info.current_app = app + try: return await handler(request) + finally: + match_info.current_app = prev return impl diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index c302351500b..0f6d1b2bcd6 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -10,7 +10,6 @@ import re import sys import warnings -from contextlib import contextmanager from functools import wraps from pathlib import Path from types import MappingProxyType @@ -293,8 +292,8 @@ def current_app(self) -> "Application": assert app is not None return app - @contextmanager - def set_current_app(self, app: "Application") -> Generator[None, None, None]: + @current_app.setter + def current_app(self, app: "Application") -> None: if DEBUG: # pragma: no cover if app not in self._apps: raise RuntimeError( @@ -302,12 +301,7 @@ def set_current_app(self, app: "Application") -> Generator[None, None, None]: self._apps, app ) ) - prev = self._current_app self._current_app = app - try: - yield - finally: - self._current_app = prev def freeze(self) -> None: self._frozen = True From 871fedcb014c0e568f4eed3ef5c74ae94c59c936 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 14:14:08 -0400 Subject: [PATCH 0607/1511] [PR #9033/d0e60d5 backport][3.11] Don't listen on TCP when only path is passed (#9233) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9033.misc.rst | 1 + aiohttp/web.py | 18 ++++++++++++------ docs/web_quickstart.rst | 4 ++++ tests/test_web_cli.py | 32 +++++++++++++++++++++++++++++++- 4 files changed, 48 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9033.misc.rst diff --git a/CHANGES/9033.misc.rst b/CHANGES/9033.misc.rst new file mode 100644 index 00000000000..07a017ffdda --- /dev/null +++ b/CHANGES/9033.misc.rst @@ -0,0 +1 @@ +Changed web entry point to not listen on TCP when only a Unix path is passed -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web.py b/aiohttp/web.py index 1d18691f401..f975b665331 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -545,21 +545,21 @@ def main(argv: List[str]) -> None: arg_parser.add_argument( "-H", "--hostname", - help="TCP/IP hostname to serve on (default: %(default)r)", - default="localhost", + help="TCP/IP hostname to serve on (default: localhost)", + default=None, ) arg_parser.add_argument( "-P", "--port", help="TCP/IP port to serve on (default: %(default)r)", type=int, - default="8080", + default=8080, ) arg_parser.add_argument( "-U", "--path", - help="Unix file system path to serve on. Specifying a path will cause " - "hostname and port arguments to be ignored.", + help="Unix file system path to serve on. Can be combined with hostname " + "to serve on both Unix and TCP.", ) args, extra_argv = arg_parser.parse_known_args(argv) @@ -586,8 +586,14 @@ def main(argv: List[str]) -> None: logging.basicConfig(level=logging.DEBUG) + if args.path and args.hostname is None: + host = port = None + else: + host = args.hostname or "localhost" + port = args.port + app = func(extra_argv) - run_app(app, host=args.hostname, port=args.port, path=args.path) + run_app(app, host=host, port=port, path=args.path) arg_parser.exit(message="Stopped\n") diff --git a/docs/web_quickstart.rst b/docs/web_quickstart.rst index fcd2b686d06..2d8c3368b82 100644 --- a/docs/web_quickstart.rst +++ b/docs/web_quickstart.rst @@ -85,6 +85,10 @@ accepts a list of any non-parsed command-line arguments and returns an return app +.. note:: + For local development we typically recommend using + `aiohttp-devtools <https://github.com/aio-libs/aiohttp-devtools>`_. + .. _aiohttp-web-handler: Handler diff --git a/tests/test_web_cli.py b/tests/test_web_cli.py index 381aaf6cd82..b320ed35667 100644 --- a/tests/test_web_cli.py +++ b/tests/test_web_cli.py @@ -1,3 +1,7 @@ +import sys +from typing import Any +from unittest import mock + import pytest from aiohttp import web @@ -80,7 +84,33 @@ def test_entry_func_non_existent_attribute(mocker) -> None: ) -def test_path_when_unsupported(mocker, monkeypatch) -> None: +@pytest.mark.skipif(sys.platform.startswith("win32"), reason="Windows not Unix") +def test_path_no_host(mocker: Any, monkeypatch: Any) -> None: + argv = "--path=test_path.sock alpha.beta:func".split() + mocker.patch("aiohttp.web.import_module") + + run_app = mocker.patch("aiohttp.web.run_app") + with pytest.raises(SystemExit): + web.main(argv) + + run_app.assert_called_with(mock.ANY, path="test_path.sock", host=None, port=None) + + +@pytest.mark.skipif(sys.platform.startswith("win32"), reason="Windows not Unix") +def test_path_and_host(mocker: Any, monkeypatch: Any) -> None: + argv = "--path=test_path.sock --host=localhost --port=8000 alpha.beta:func".split() + mocker.patch("aiohttp.web.import_module") + + run_app = mocker.patch("aiohttp.web.run_app") + with pytest.raises(SystemExit): + web.main(argv) + + run_app.assert_called_with( + mock.ANY, path="test_path.sock", host="localhost", port=8000 + ) + + +def test_path_when_unsupported(mocker: Any, monkeypatch: Any) -> None: argv = "--path=test_path.sock alpha.beta:func".split() mocker.patch("aiohttp.web.import_module") monkeypatch.delattr("socket.AF_UNIX", raising=False) From d8c040e72adfcb9e37c154f59e77ff485e75eee0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 14:18:44 -0400 Subject: [PATCH 0608/1511] [PR #9018/63813fe backport][3.10] Reject data after close message (#9235) Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9018.bugfix.rst | 1 + aiohttp/http_parser.py | 5 +++++ tests/test_http_parser.py | 11 ++++++++++- 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9018.bugfix.rst diff --git a/CHANGES/9018.bugfix.rst b/CHANGES/9018.bugfix.rst new file mode 100644 index 00000000000..2de6d142900 --- /dev/null +++ b/CHANGES/9018.bugfix.rst @@ -0,0 +1 @@ +Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index f46cf833c03..686a2d02e28 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -317,6 +317,7 @@ def feed_data( start_pos = 0 loop = self.loop + should_close = False while start_pos < data_len: # read HTTP message (request/response line + headers), \r\n\r\n @@ -329,6 +330,9 @@ def feed_data( continue if pos >= start_pos: + if should_close: + raise BadHttpMessage("Data after `Connection: close`") + # line found line = data[start_pos:pos] if SEP == b"\n": # For lax response parsing @@ -438,6 +442,7 @@ def get_content_length() -> Optional[int]: payload = EMPTY_PAYLOAD messages.append((msg, payload)) + should_close = msg.should_close else: self._tail = data[start_pos:] data = EMPTY diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 78abe528cb0..09f4f0746a5 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -17,6 +17,7 @@ NO_EXTENSIONS, DeflateBuffer, HttpPayloadParser, + HttpRequestParser, HttpRequestParserPy, HttpResponseParserPy, HttpVersion, @@ -826,7 +827,15 @@ def test_http_request_bad_status_line_whitespace(parser: Any) -> None: parser.feed_data(text) -def test_http_request_upgrade(parser: Any) -> None: +def test_http_request_message_after_close(parser: HttpRequestParser) -> None: + text = b"GET / HTTP/1.1\r\nConnection: close\r\n\r\nInvalid\r\n\r\n" + with pytest.raises( + http_exceptions.BadHttpMessage, match="Data after `Connection: close`" + ): + parser.feed_data(text) + + +def test_http_request_upgrade(parser: HttpRequestParser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"connection: upgrade\r\n" From d2b52d7c32a744379438c466e68ad2cb471deeba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 14:27:52 -0400 Subject: [PATCH 0609/1511] [PR #9018/63813fe backport][3.11] Reject data after close message (#9236) Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9018.bugfix.rst | 1 + aiohttp/http_parser.py | 5 +++++ tests/test_http_parser.py | 11 ++++++++++- 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9018.bugfix.rst diff --git a/CHANGES/9018.bugfix.rst b/CHANGES/9018.bugfix.rst new file mode 100644 index 00000000000..2de6d142900 --- /dev/null +++ b/CHANGES/9018.bugfix.rst @@ -0,0 +1 @@ +Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index f46cf833c03..686a2d02e28 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -317,6 +317,7 @@ def feed_data( start_pos = 0 loop = self.loop + should_close = False while start_pos < data_len: # read HTTP message (request/response line + headers), \r\n\r\n @@ -329,6 +330,9 @@ def feed_data( continue if pos >= start_pos: + if should_close: + raise BadHttpMessage("Data after `Connection: close`") + # line found line = data[start_pos:pos] if SEP == b"\n": # For lax response parsing @@ -438,6 +442,7 @@ def get_content_length() -> Optional[int]: payload = EMPTY_PAYLOAD messages.append((msg, payload)) + should_close = msg.should_close else: self._tail = data[start_pos:] data = EMPTY diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 75276df1a07..edd676190f6 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -17,6 +17,7 @@ NO_EXTENSIONS, DeflateBuffer, HttpPayloadParser, + HttpRequestParser, HttpRequestParserPy, HttpResponseParserPy, HttpVersion, @@ -826,7 +827,15 @@ def test_http_request_bad_status_line_whitespace(parser: Any) -> None: parser.feed_data(text) -def test_http_request_upgrade(parser: Any) -> None: +def test_http_request_message_after_close(parser: HttpRequestParser) -> None: + text = b"GET / HTTP/1.1\r\nConnection: close\r\n\r\nInvalid\r\n\r\n" + with pytest.raises( + http_exceptions.BadHttpMessage, match="Data after `Connection: close`" + ): + parser.feed_data(text) + + +def test_http_request_upgrade(parser: HttpRequestParser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"connection: upgrade\r\n" From 6fbe6771b95eb54cef11eefe12b5d35eef31777e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 18:28:25 +0000 Subject: [PATCH 0610/1511] [PR #9063/3c1ca455 backport][3.10] Fix If-None-Match not using weak comparison (#9237) Co-authored-by: Sam Bull <git@sambull.org> Fixes #8462. --- CHANGES/9063.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 17 +++++++++---- tests/test_web_sendfile_functional.py | 36 +++++++++++++++++++++++++-- 3 files changed, 47 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9063.bugfix.rst diff --git a/CHANGES/9063.bugfix.rst b/CHANGES/9063.bugfix.rst new file mode 100644 index 00000000000..e512677b9c8 --- /dev/null +++ b/CHANGES/9063.bugfix.rst @@ -0,0 +1 @@ +Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 0c23e375d25..f0de75e9f1b 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -136,10 +136,12 @@ async def _sendfile( return writer @staticmethod - def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool: if len(etags) == 1 and etags[0].value == ETAG_ANY: return True - return any(etag.value == etag_value for etag in etags if not etag.is_weak) + return any( + etag.value == etag_value for etag in etags if weak or not etag.is_weak + ) async def _not_modified( self, request: "BaseRequest", etag_value: str, last_modified: float @@ -208,9 +210,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime - # https://tools.ietf.org/html/rfc7232#section-6 + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 ifmatch = request.if_match - if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + if ifmatch is not None and not self._etag_match( + etag_value, ifmatch, weak=False + ): return await self._precondition_failed(request) unmodsince = request.if_unmodified_since @@ -221,8 +225,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter ): return await self._precondition_failed(request) + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 ifnonematch = request.if_none_match - if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + if ifnonematch is not None and self._etag_match( + etag_value, ifnonematch, weak=True + ): return await self._not_modified(request, etag_value, last_modified) modsince = request.if_modified_since diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index c9189a21fb0..256cf4d243a 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -513,10 +513,9 @@ async def test_static_file_if_none_match( resp = await client.get("/") assert 200 == resp.status - original_etag = resp.headers.get("ETag") + original_etag = resp.headers["ETag"] assert resp.headers.get("Last-Modified") is not None - assert original_etag is not None resp.close() await resp.release() @@ -555,6 +554,39 @@ async def test_static_file_if_none_match_star( await client.close() +@pytest.mark.parametrize("if_modified_since", ("", "Fri, 31 Dec 9999 23:59:59 GMT")) +async def test_static_file_if_none_match_weak( + aiohttp_client: Any, + app_with_static_route: web.Application, + if_modified_since: str, +) -> None: + client = await aiohttp_client(app_with_static_route) + + resp = await client.get("/") + assert 200 == resp.status + original_etag = resp.headers["ETag"] + + assert resp.headers.get("Last-Modified") is not None + resp.close() + resp.release() + + weak_etag = f"W/{original_etag}" + + resp = await client.get( + "/", + headers={"If-None-Match": weak_etag, "If-Modified-Since": if_modified_since}, + ) + body = await resp.read() + assert 304 == resp.status + assert resp.headers.get("Content-Length") is None + assert resp.headers.get("ETag") == original_etag + assert b"" == body + resp.close() + resp.release() + + await client.close() + + @pytest.mark.skipif(not ssl, reason="ssl not supported") async def test_static_file_ssl( aiohttp_server, From 51870a9d1fd13cc9393e803795428bfba3999606 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 18:40:32 +0000 Subject: [PATCH 0611/1511] [PR #9063/3c1ca455 backport][3.11] Fix If-None-Match not using weak comparison (#9238) Co-authored-by: Sam Bull <git@sambull.org> Fixes #8462. --- CHANGES/9063.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 17 +++++++++---- tests/test_web_sendfile_functional.py | 36 +++++++++++++++++++++++++-- 3 files changed, 47 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9063.bugfix.rst diff --git a/CHANGES/9063.bugfix.rst b/CHANGES/9063.bugfix.rst new file mode 100644 index 00000000000..e512677b9c8 --- /dev/null +++ b/CHANGES/9063.bugfix.rst @@ -0,0 +1 @@ +Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 2c253e03b0a..e7951acea16 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -132,10 +132,12 @@ async def _sendfile( return writer @staticmethod - def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool: if len(etags) == 1 and etags[0].value == ETAG_ANY: return True - return any(etag.value == etag_value for etag in etags if not etag.is_weak) + return any( + etag.value == etag_value for etag in etags if weak or not etag.is_weak + ) async def _not_modified( self, request: "BaseRequest", etag_value: str, last_modified: float @@ -204,9 +206,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime - # https://tools.ietf.org/html/rfc7232#section-6 + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 ifmatch = request.if_match - if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + if ifmatch is not None and not self._etag_match( + etag_value, ifmatch, weak=False + ): return await self._precondition_failed(request) unmodsince = request.if_unmodified_since @@ -217,8 +221,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter ): return await self._precondition_failed(request) + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 ifnonematch = request.if_none_match - if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + if ifnonematch is not None and self._etag_match( + etag_value, ifnonematch, weak=True + ): return await self._not_modified(request, etag_value, last_modified) modsince = request.if_modified_since diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index c9189a21fb0..256cf4d243a 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -513,10 +513,9 @@ async def test_static_file_if_none_match( resp = await client.get("/") assert 200 == resp.status - original_etag = resp.headers.get("ETag") + original_etag = resp.headers["ETag"] assert resp.headers.get("Last-Modified") is not None - assert original_etag is not None resp.close() await resp.release() @@ -555,6 +554,39 @@ async def test_static_file_if_none_match_star( await client.close() +@pytest.mark.parametrize("if_modified_since", ("", "Fri, 31 Dec 9999 23:59:59 GMT")) +async def test_static_file_if_none_match_weak( + aiohttp_client: Any, + app_with_static_route: web.Application, + if_modified_since: str, +) -> None: + client = await aiohttp_client(app_with_static_route) + + resp = await client.get("/") + assert 200 == resp.status + original_etag = resp.headers["ETag"] + + assert resp.headers.get("Last-Modified") is not None + resp.close() + resp.release() + + weak_etag = f"W/{original_etag}" + + resp = await client.get( + "/", + headers={"If-None-Match": weak_etag, "If-Modified-Since": if_modified_since}, + ) + body = await resp.read() + assert 304 == resp.status + assert resp.headers.get("Content-Length") is None + assert resp.headers.get("ETag") == original_etag + assert b"" == body + resp.close() + resp.release() + + await client.close() + + @pytest.mark.skipif(not ssl, reason="ssl not supported") async def test_static_file_ssl( aiohttp_server, From 255ec0281a55992c5521f449786e0f9e491b3a73 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 21:19:45 +0000 Subject: [PATCH 0612/1511] [PR #9204/34a626da backport][3.10] Significantly speed up filter_cookies (#9243) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9204.misc.rst | 1 + aiohttp/cookiejar.py | 53 +++++++++++++++++++++++++++---------------- 2 files changed, 34 insertions(+), 20 deletions(-) create mode 100644 CHANGES/9204.misc.rst diff --git a/CHANGES/9204.misc.rst b/CHANGES/9204.misc.rst new file mode 100644 index 00000000000..da12a7df6f7 --- /dev/null +++ b/CHANGES/9204.misc.rst @@ -0,0 +1 @@ +Significantly speed up filtering cookies -- by :user:`bdraco`. diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 0a6e35461f7..c78d5fa7e72 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -95,6 +95,9 @@ def __init__( self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( SimpleCookie ) + self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = ( + defaultdict(dict) + ) self._host_only_cookies: Set[Tuple[str, str]] = set() self._unsafe = unsafe self._quote_cookie = quote_cookie @@ -127,6 +130,7 @@ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: if predicate is None: self._expire_heap.clear() self._cookies.clear() + self._morsel_cache.clear() self._host_only_cookies.clear() self._expirations.clear() return @@ -208,6 +212,7 @@ def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: for domain, path, name in to_del: self._host_only_cookies.discard((domain, name)) self._cookies[(domain, path)].pop(name, None) + self._morsel_cache[(domain, path)].pop(name, None) self._expirations.pop((domain, path, name), None) def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: @@ -283,7 +288,12 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No else: cookie["expires"] = "" - self._cookies[(domain, path)][name] = cookie + key = (domain, path) + if self._cookies[key].get(name) != cookie: + # Don't blow away the cache if the same + # cookie gets set again + self._cookies[key][name] = cookie + self._morsel_cache[key].pop(name, None) self._do_expiration() @@ -328,30 +338,33 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": # Create every combination of (domain, path) pairs. pairs = itertools.product(domains, paths) - # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 - cookies = itertools.chain.from_iterable( - self._cookies[p].values() for p in pairs - ) path_len = len(request_url.path) - for cookie in cookies: - name = cookie.key - domain = cookie["domain"] + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 + for p in pairs: + for name, cookie in self._cookies[p].items(): + domain = cookie["domain"] - if (domain, name) in self._host_only_cookies and domain != hostname: - continue + if (domain, name) in self._host_only_cookies and domain != hostname: + continue - # Skip edge case when the cookie has a trailing slash but request doesn't. - if len(cookie["path"]) > path_len: - continue + # Skip edge case when the cookie has a trailing slash but request doesn't. + if len(cookie["path"]) > path_len: + continue - if is_not_secure and cookie["secure"]: - continue + if is_not_secure and cookie["secure"]: + continue + + # We already built the Morsel so reuse it here + if name in self._morsel_cache[p]: + filtered[name] = self._morsel_cache[p][name] + continue - # It's critical we use the Morsel so the coded_value - # (based on cookie version) is preserved - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) - filtered[name] = mrsl_val + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + self._morsel_cache[p][name] = mrsl_val + filtered[name] = mrsl_val return filtered From 643369bd304fc6528a11e4c7494f7a70540020e2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 21:19:52 +0000 Subject: [PATCH 0613/1511] [PR #9204/34a626da backport][3.11] Significantly speed up filter_cookies (#9244) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9204.misc.rst | 1 + aiohttp/cookiejar.py | 53 +++++++++++++++++++++++++++---------------- 2 files changed, 34 insertions(+), 20 deletions(-) create mode 100644 CHANGES/9204.misc.rst diff --git a/CHANGES/9204.misc.rst b/CHANGES/9204.misc.rst new file mode 100644 index 00000000000..da12a7df6f7 --- /dev/null +++ b/CHANGES/9204.misc.rst @@ -0,0 +1 @@ +Significantly speed up filtering cookies -- by :user:`bdraco`. diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 448ab7aa7e0..3ffa4198fc7 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -95,6 +95,9 @@ def __init__( self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( SimpleCookie ) + self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = ( + defaultdict(dict) + ) self._host_only_cookies: Set[Tuple[str, str]] = set() self._unsafe = unsafe self._quote_cookie = quote_cookie @@ -127,6 +130,7 @@ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: if predicate is None: self._expire_heap.clear() self._cookies.clear() + self._morsel_cache.clear() self._host_only_cookies.clear() self._expirations.clear() return @@ -208,6 +212,7 @@ def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: for domain, path, name in to_del: self._host_only_cookies.discard((domain, name)) self._cookies[(domain, path)].pop(name, None) + self._morsel_cache[(domain, path)].pop(name, None) self._expirations.pop((domain, path, name), None) def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: @@ -283,7 +288,12 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No else: cookie["expires"] = "" - self._cookies[(domain, path)][name] = cookie + key = (domain, path) + if self._cookies[key].get(name) != cookie: + # Don't blow away the cache if the same + # cookie gets set again + self._cookies[key][name] = cookie + self._morsel_cache[key].pop(name, None) self._do_expiration() @@ -328,30 +338,33 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": # Create every combination of (domain, path) pairs. pairs = itertools.product(domains, paths) - # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 - cookies = itertools.chain.from_iterable( - self._cookies[p].values() for p in pairs - ) path_len = len(request_url.path) - for cookie in cookies: - name = cookie.key - domain = cookie["domain"] + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 + for p in pairs: + for name, cookie in self._cookies[p].items(): + domain = cookie["domain"] - if (domain, name) in self._host_only_cookies and domain != hostname: - continue + if (domain, name) in self._host_only_cookies and domain != hostname: + continue - # Skip edge case when the cookie has a trailing slash but request doesn't. - if len(cookie["path"]) > path_len: - continue + # Skip edge case when the cookie has a trailing slash but request doesn't. + if len(cookie["path"]) > path_len: + continue - if is_not_secure and cookie["secure"]: - continue + if is_not_secure and cookie["secure"]: + continue + + # We already built the Morsel so reuse it here + if name in self._morsel_cache[p]: + filtered[name] = self._morsel_cache[p][name] + continue - # It's critical we use the Morsel so the coded_value - # (based on cookie version) is preserved - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) - filtered[name] = mrsl_val + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + self._morsel_cache[p][name] = mrsl_val + filtered[name] = mrsl_val return filtered From 8b5a90c407766615476d921f2e63d475b7f0e2cb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 17:15:54 -0500 Subject: [PATCH 0614/1511] [PR #9234/0246f2d backport][3.11] Remove unnecessary string operations in update_content_encoding (#9246) --- aiohttp/client_reqrep.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 3fe34e21968..10144f2a9c4 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -499,8 +499,7 @@ def update_content_encoding(self, data: Any) -> None: self.compress = None return - enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() - if enc: + if self.headers.get(hdrs.CONTENT_ENCODING): if self.compress: raise ValueError( "compress can not be set if Content-Encoding header is set" From 595a01d99693e282be8cff12eabad6ff5e2824b5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 17:16:07 -0500 Subject: [PATCH 0615/1511] [PR #9234/0246f2d backport][3.10] Remove unnecessary string operations in update_content_encoding (#9247) --- aiohttp/client_reqrep.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 57f3323a60c..cfe44c3c563 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -508,8 +508,7 @@ def update_content_encoding(self, data: Any) -> None: self.compress = None return - enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() - if enc: + if self.headers.get(hdrs.CONTENT_ENCODING): if self.compress: raise ValueError( "compress can not be set " "if Content-Encoding header is set" From b31b82de5ee25774cfa2d1dcd9c46bec09041dda Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 22 Sep 2024 23:18:39 +0100 Subject: [PATCH 0616/1511] Add Generic for TestClient.app (#8977) (#9245) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit 6db2c747572b3e77a2d64e3f6c5a01ecae74fcff) --- CHANGES/8977.bugfix.rst | 1 + aiohttp/pytest_plugin.py | 41 +++++++++++++++++++++------ aiohttp/test_utils.py | 49 +++++++++++++++++++++++++++++---- tests/test_client_functional.py | 2 +- tests/test_test_utils.py | 49 +++++++++++++++++++++------------ 5 files changed, 108 insertions(+), 34 deletions(-) create mode 100644 CHANGES/8977.bugfix.rst diff --git a/CHANGES/8977.bugfix.rst b/CHANGES/8977.bugfix.rst new file mode 100644 index 00000000000..7d21fe0c3fa --- /dev/null +++ b/CHANGES/8977.bugfix.rst @@ -0,0 +1 @@ +Made ``TestClient.app`` a ``Generic`` so type checkers will know the correct type (avoiding unneeded ``client.app is not None`` checks) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 55964ead041..6da4852ab46 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -12,6 +12,7 @@ Protocol, Type, Union, + overload, ) import pytest @@ -26,7 +27,7 @@ teardown_test_loop, unused_port as _unused_port, ) -from .web import Application +from .web import Application, BaseRequest, Request from .web_protocol import _RequestHandler try: @@ -36,13 +37,22 @@ class AiohttpClient(Protocol): - def __call__( + @overload + async def __call__( self, - __param: Union[Application, BaseTestServer], + __param: Application, *, server_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any - ) -> Awaitable[TestClient]: ... + ) -> TestClient[Request, Application]: ... + @overload + async def __call__( + self, + __param: BaseTestServer, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient[BaseRequest, None]: ... class AiohttpServer(Protocol): @@ -355,9 +365,7 @@ def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover @pytest.fixture -def aiohttp_client( - loop: asyncio.AbstractEventLoop, -) -> Iterator[AiohttpClient]: +def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]: """Factory to create a TestClient instance. aiohttp_client(app, **kwargs) @@ -366,13 +374,28 @@ def aiohttp_client( """ clients = [] + @overload + async def go( + __param: Application, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient[Request, Application]: ... + + @overload + async def go( + __param: BaseTestServer, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient[BaseRequest, None]: ... + async def go( __param: Union[Application, BaseTestServer], *args: Any, server_kwargs: Optional[Dict[str, Any]] = None, **kwargs: Any - ) -> TestClient: - + ) -> TestClient[Any, Any]: if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] __param, (Application, BaseTestServer) ): diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 5ab3381f9e6..a85662b9fb2 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -11,7 +11,19 @@ import warnings from abc import ABC, abstractmethod from types import TracebackType -from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, cast +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Generic, + Iterator, + List, + Optional, + Type, + TypeVar, + cast, + overload, +) from unittest import IsolatedAsyncioTestCase, mock from aiosignal import Signal @@ -36,6 +48,7 @@ from .web import ( Application, AppRunner, + BaseRequest, BaseRunner, Request, Server, @@ -53,6 +66,14 @@ if sys.version_info >= (3, 11) and TYPE_CHECKING: from typing import Unpack +if sys.version_info >= (3, 11): + from typing import Self +else: + Self = Any + +_ApplicationNone = TypeVar("_ApplicationNone", Application, None) +_Request = TypeVar("_Request", bound=BaseRequest) + REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" @@ -249,7 +270,7 @@ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: return ServerRunner(srv, debug=debug, **kwargs) -class TestClient: +class TestClient(Generic[_Request, _ApplicationNone]): """ A test client implementation. @@ -259,6 +280,22 @@ class TestClient: __test__ = False + @overload + def __init__( + self: "TestClient[Request, Application]", + server: TestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + **kwargs: Any, + ) -> None: ... + @overload + def __init__( + self: "TestClient[_Request, None]", + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + **kwargs: Any, + ) -> None: ... def __init__( self, server: BaseTestServer, @@ -296,8 +333,8 @@ def server(self) -> BaseTestServer: return self._server @property - def app(self) -> Optional[Application]: - return cast(Optional[Application], getattr(self._server, "app", None)) + def app(self) -> _ApplicationNone: + return getattr(self._server, "app", None) # type: ignore[return-value] @property def session(self) -> ClientSession: @@ -465,7 +502,7 @@ def __exit__( # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover - async def __aenter__(self) -> "TestClient": + async def __aenter__(self) -> Self: await self.start_server() return self @@ -530,7 +567,7 @@ async def get_server(self, app: Application) -> TestServer: """Return a TestServer instance.""" return TestServer(app, loop=self.loop) - async def get_client(self, server: TestServer) -> TestClient: + async def get_client(self, server: TestServer) -> TestClient[Request, Application]: """Return a TestClient instance.""" return TestClient(server, loop=self.loop) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 70c5bf16096..137164c7d0b 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3699,7 +3699,7 @@ async def handler(request): await resp.write(b"1" * 1000) await asyncio.sleep(0.01) - async def request(client): + async def request(client: TestClient[web.Request, web.Application]) -> None: timeout = aiohttp.ClientTimeout(total=0.5) async with await client.get("/", timeout=timeout) as resp: with pytest.raises(asyncio.TimeoutError): diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index a9c5179aedc..70d74fb69f0 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -1,6 +1,8 @@ +import asyncio import gzip +import sys from socket import socket -from typing import Any +from typing import Any, Iterator from unittest import mock import pytest @@ -12,14 +14,19 @@ from aiohttp.test_utils import ( AioHTTPTestCase, RawTestServer as _RawTestServer, - TestClient as _TestClient, - TestServer as _TestServer, + TestClient, + TestServer, get_port_socket, loop_context, make_mocked_request, unittest_run_loop, ) +if sys.version_info >= (3, 11): + from typing import assert_type + +_TestClient = TestClient[web.Request, web.Application] + _hello_world_str = "Hello, world" _hello_world_bytes = _hello_world_str.encode("utf-8") _hello_world_gz = gzip.compress(_hello_world_bytes) @@ -67,9 +74,11 @@ def app(): @pytest.fixture -def test_client(loop, app) -> None: - async def make_client(): - return _TestClient(_TestServer(app, loop=loop), loop=loop) +def test_client( + loop: asyncio.AbstractEventLoop, app: web.Application +) -> Iterator[_TestClient]: + async def make_client() -> TestClient[web.Request, web.Application]: + return TestClient(TestServer(app)) client = loop.run_until_complete(make_client()) @@ -81,14 +90,14 @@ async def make_client(): def test_with_test_server_fails(loop) -> None: app = _create_example_app() with pytest.raises(TypeError): - with _TestServer(app, loop=loop): + with TestServer(app, loop=loop): pass async def test_with_client_fails(loop) -> None: app = _create_example_app() with pytest.raises(TypeError): - with _TestClient(_TestServer(app, loop=loop), loop=loop): + with _TestClient(TestServer(app, loop=loop), loop=loop): pass @@ -96,7 +105,7 @@ async def test_aiohttp_client_close_is_idempotent() -> None: # a test client, called multiple times, should # not attempt to close the server again. app = _create_example_app() - client = _TestClient(_TestServer(app)) + client = _TestClient(TestServer(app)) await client.close() await client.close() @@ -252,12 +261,14 @@ def test_make_mocked_request_transport() -> None: async def test_test_client_props(loop) -> None: app = _create_example_app() - client = _TestClient(_TestServer(app, host="127.0.0.1", loop=loop), loop=loop) + client = _TestClient(TestServer(app, host="127.0.0.1", loop=loop), loop=loop) assert client.host == "127.0.0.1" assert client.port is None async with client: assert isinstance(client.port, int) assert client.server is not None + if sys.version_info >= (3, 11): + assert_type(client.app, web.Application) assert client.app is not None assert client.port is None @@ -272,13 +283,15 @@ async def hello(request): async with client: assert isinstance(client.port, int) assert client.server is not None + if sys.version_info >= (3, 11): + assert_type(client.app, None) assert client.app is None assert client.port is None async def test_test_server_context_manager(loop) -> None: app = _create_example_app() - async with _TestServer(app, loop=loop) as server: + async with TestServer(app, loop=loop) as server: client = aiohttp.ClientSession(loop=loop) resp = await client.head(server.make_url("/")) assert resp.status == 200 @@ -288,7 +301,7 @@ async def test_test_server_context_manager(loop) -> None: def test_client_unsupported_arg() -> None: with pytest.raises(TypeError) as e: - _TestClient("string") + TestClient("string") # type: ignore[call-overload] assert ( str(e.value) == "server must be TestServer instance, found type: <class 'str'>" @@ -297,7 +310,7 @@ def test_client_unsupported_arg() -> None: async def test_server_make_url_yarl_compatibility(loop) -> None: app = _create_example_app() - async with _TestServer(app, loop=loop) as server: + async with TestServer(app, loop=loop) as server: make_url = server.make_url assert make_url(URL("/foo")) == make_url("/foo") with pytest.raises(AssertionError): @@ -322,7 +335,7 @@ def test_noop(self) -> None: async def test_server_context_manager(app, loop) -> None: - async with _TestServer(app, loop=loop) as server: + async with TestServer(app, loop=loop) as server: async with aiohttp.ClientSession(loop=loop) as client: async with client.head(server.make_url("/")) as resp: assert resp.status == 200 @@ -332,7 +345,7 @@ async def test_server_context_manager(app, loop) -> None: "method", ["head", "get", "post", "options", "post", "put", "patch", "delete"] ) async def test_client_context_manager_response(method, app, loop) -> None: - async with _TestClient(_TestServer(app), loop=loop) as client: + async with _TestClient(TestServer(app), loop=loop) as client: async with getattr(client, method)("/") as resp: assert resp.status == 200 if method != "head": @@ -342,7 +355,7 @@ async def test_client_context_manager_response(method, app, loop) -> None: async def test_custom_port(loop, app, aiohttp_unused_port) -> None: port = aiohttp_unused_port() - client = _TestClient(_TestServer(app, loop=loop, port=port), loop=loop) + client = _TestClient(TestServer(app, loop=loop, port=port), loop=loop) await client.start_server() assert client.server.port == port @@ -355,7 +368,7 @@ async def test_custom_port(loop, app, aiohttp_unused_port) -> None: await client.close() -@pytest.mark.parametrize("test_server_cls", [_TestServer, _RawTestServer]) +@pytest.mark.parametrize("test_server_cls", [TestServer, _RawTestServer]) async def test_base_test_server_socket_factory( test_server_cls: type, app: Any, loop: Any ) -> None: @@ -379,7 +392,7 @@ def factory(*args, **kwargs) -> socket: ) async def test_test_server_hostnames(hostname, expected_host, loop) -> None: app = _create_example_app() - server = _TestServer(app, host=hostname, loop=loop) + server = TestServer(app, host=hostname, loop=loop) async with server: pass assert server.host == expected_host From 8c20e730b2a2f7a8cf9f0b46ef48c90ab60ffea4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:22:51 +0000 Subject: [PATCH 0617/1511] [PR #9242/76c6010f backport][3.10] Remove duplicate checks in the StreamWriter (#9248) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/http_writer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index f54fa0f0774..dc07a358c70 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -71,8 +71,8 @@ def _write(self, chunk: bytes) -> None: size = len(chunk) self.buffer_size += size self.output_size += size - transport = self.transport - if not self._protocol.connected or transport is None or transport.is_closing(): + transport = self._protocol.transport + if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") transport.write(chunk) From ae4ba606bf6b968dd71d6b3b22c5001b1e5656fc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:23:21 +0000 Subject: [PATCH 0618/1511] [PR #9242/76c6010f backport][3.11] Remove duplicate checks in the StreamWriter (#9249) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/http_writer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index f54fa0f0774..dc07a358c70 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -71,8 +71,8 @@ def _write(self, chunk: bytes) -> None: size = len(chunk) self.buffer_size += size self.output_size += size - transport = self.transport - if not self._protocol.connected or transport is None or transport.is_closing(): + transport = self._protocol.transport + if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") transport.write(chunk) From b823b4ecd452cd43317522fc30fa6f5742ff89f7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 18:24:26 -0500 Subject: [PATCH 0619/1511] [PR #9240/9604461 backport][3.11] Small cleanups to Application._handle (#9251) --- aiohttp/web_app.py | 42 ++++++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index b59d0d1b0ff..c29f32df413 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -533,32 +533,30 @@ async def _handle(self, request: Request) -> StreamResponse: match_info.freeze() - resp = None request._match_info = match_info - expect = request.headers.get(hdrs.EXPECT) - if expect: + + if request.headers.get(hdrs.EXPECT): resp = await match_info.expect_handler(request) await request.writer.drain() + if resp is not None: + return resp + + handler = match_info.handler - if resp is None: - handler = match_info.handler - - if self._run_middlewares: - if not self._has_legacy_middlewares: - handler = _build_middlewares(handler, match_info.apps) - else: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler # type: ignore[misc] - ) - else: - handler = await m(app, handler) # type: ignore[arg-type,assignment] - - resp = await handler(request) - - return resp + if self._run_middlewares: + if not self._has_legacy_middlewares: + handler = _build_middlewares(handler, match_info.apps) + else: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler # type: ignore[misc] + ) + else: + handler = await m(app, handler) # type: ignore[arg-type,assignment] + + return await handler(request) def __call__(self) -> "Application": """gunicorn compatibility""" From 2abb604c01b8ca551e8fa69758094bebd39acb54 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 18:24:26 -0500 Subject: [PATCH 0620/1511] [PR #9240/9604461 backport][3.10] Small cleanups to Application._handle (#9250) --- aiohttp/web_app.py | 42 ++++++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index c4199b12271..78b1a67bacc 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -533,32 +533,30 @@ async def _handle(self, request: Request) -> StreamResponse: match_info.freeze() - resp = None request._match_info = match_info - expect = request.headers.get(hdrs.EXPECT) - if expect: + + if request.headers.get(hdrs.EXPECT): resp = await match_info.expect_handler(request) await request.writer.drain() + if resp is not None: + return resp + + handler = match_info.handler - if resp is None: - handler = match_info.handler - - if self._run_middlewares: - if not self._has_legacy_middlewares: - handler = _build_middlewares(handler, match_info.apps) - else: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler # type: ignore[misc] - ) - else: - handler = await m(app, handler) # type: ignore[arg-type,assignment] - - resp = await handler(request) - - return resp + if self._run_middlewares: + if not self._has_legacy_middlewares: + handler = _build_middlewares(handler, match_info.apps) + else: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler # type: ignore[misc] + ) + else: + handler = await m(app, handler) # type: ignore[arg-type,assignment] + + return await handler(request) def __call__(self) -> "Application": """gunicorn compatibility""" From 80730b5eb73a5e550401a31f126425fd429e6494 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 19:00:28 -0500 Subject: [PATCH 0621/1511] [PR #9241/7e0ef07 backport][3.11] Small speed up to starting web requests (#9253) --- CHANGES/9241.misc.rst | 1 + aiohttp/web_protocol.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9241.misc.rst diff --git a/CHANGES/9241.misc.rst b/CHANGES/9241.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9241.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index dd819de7236..8fa8535b93a 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -27,7 +27,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout, set_exception +from .helpers import ceil_timeout from .http import ( HttpProcessingError, HttpRequestParser, @@ -84,6 +84,9 @@ class PayloadAccessError(Exception): """Payload was accessed after response was sent.""" +_PAYLOAD_ACCESS_ERROR = PayloadAccessError() + + @attr.s(auto_attribs=True, frozen=True, slots=True) class _ErrInfo: status: int @@ -578,7 +581,7 @@ async def start(self) -> None: self.log_debug("Uncompleted request.") self.close() - set_exception(payload, PayloadAccessError()) + payload.set_exception(_PAYLOAD_ACCESS_ERROR) except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") From 76284b5abbe1d78f1813d6568b0611fe4843173c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 19:00:34 -0500 Subject: [PATCH 0622/1511] [PR #9241/7e0ef07 backport][3.10] Small speed up to starting web requests (#9252) --- CHANGES/9241.misc.rst | 1 + aiohttp/web_protocol.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9241.misc.rst diff --git a/CHANGES/9241.misc.rst b/CHANGES/9241.misc.rst new file mode 120000 index 00000000000..d6a2f2aaaab --- /dev/null +++ b/CHANGES/9241.misc.rst @@ -0,0 +1 @@ +9174.misc.rst \ No newline at end of file diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index dd819de7236..8fa8535b93a 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -27,7 +27,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout, set_exception +from .helpers import ceil_timeout from .http import ( HttpProcessingError, HttpRequestParser, @@ -84,6 +84,9 @@ class PayloadAccessError(Exception): """Payload was accessed after response was sent.""" +_PAYLOAD_ACCESS_ERROR = PayloadAccessError() + + @attr.s(auto_attribs=True, frozen=True, slots=True) class _ErrInfo: status: int @@ -578,7 +581,7 @@ async def start(self) -> None: self.log_debug("Uncompleted request.") self.close() - set_exception(payload, PayloadAccessError()) + payload.set_exception(_PAYLOAD_ACCESS_ERROR) except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") From 7ecc9c943104db7185fda7d51aa14749e4ba2992 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 23 Sep 2024 01:55:50 +0100 Subject: [PATCH 0623/1511] Stop raise_for_status() releasing when in a context (#9239) (#9255) (cherry picked from commit 63cfd5f96b3ce819c7feac67dfa4183ac4731acd) --- CHANGES/9239.bugfix.rst | 1 + aiohttp/client.py | 32 ++++++------------------- aiohttp/client_reqrep.py | 10 +++++++- aiohttp/client_ws.py | 14 ++++++++++- tests/test_client_functional.py | 41 +++++++++++++++++++++++++++++++-- 5 files changed, 69 insertions(+), 29 deletions(-) create mode 100644 CHANGES/9239.bugfix.rst diff --git a/CHANGES/9239.bugfix.rst b/CHANGES/9239.bugfix.rst new file mode 100644 index 00000000000..95b229742ce --- /dev/null +++ b/CHANGES/9239.bugfix.rst @@ -0,0 +1 @@ +Changed :py:meth:`ClientResponse.raise_for_status() <aiohttp.ClientResponse.raise_for_status>` to only release the connection when invoked outside an ``async with`` context -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 443335c6061..c893b06bb11 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -218,7 +218,7 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) -_RetType = TypeVar("_RetType") +_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse) _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -1364,7 +1364,7 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType __slots__ = ("_coro", "_resp") def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro = coro + self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro def send(self, arg: None) -> "asyncio.Future[Any]": return self._coro.send(arg) @@ -1383,12 +1383,8 @@ def __iter__(self) -> Generator[Any, None, _RetType]: return self.__await__() async def __aenter__(self) -> _RetType: - self._resp = await self._coro - return self._resp - - -class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): - __slots__ = () + self._resp: _RetType = await self._coro + return await self._resp.__aenter__() async def __aexit__( self, @@ -1396,25 +1392,11 @@ async def __aexit__( exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: - # We're basing behavior on the exception as it can be caused by - # user code unrelated to the status of the connection. If you - # would like to close a connection you must do that - # explicitly. Otherwise connection error handling should kick in - # and close/recycle the connection as required. - self._resp.release() - await self._resp.wait_for_close() + await self._resp.__aexit__(exc_type, exc, tb) -class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): - __slots__ = () - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self._resp.close() +_RequestContextManager = _BaseRequestContextManager[ClientResponse] +_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse] class _SessionRequestContextManager: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 10144f2a9c4..09b8659264e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -805,6 +805,7 @@ class ClientResponse(HeadersMixin): # post-init stage allows to not change ctor signature _closed = True # to allow __del__ for non-initialized properly response _released = False + _in_context = False __writer = None def __init__( @@ -1094,7 +1095,12 @@ def raise_for_status(self) -> None: if not self.ok: # reason should always be not None for a started response assert self.reason is not None - self.release() + + # If we're in a context we can rely on __aexit__() to release as the + # exception propagates. + if not self._in_context: + self.release() + raise ClientResponseError( self.request_info, self.history, @@ -1221,6 +1227,7 @@ async def json( return loads(stripped.decode(encoding)) async def __aenter__(self) -> "ClientResponse": + self._in_context = True return self async def __aexit__( @@ -1229,6 +1236,7 @@ async def __aexit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: + self._in_context = False # similar to _RequestContextManager, we do not need to check # for exceptions, response object can close connection # if state is broken diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 6246234b8e0..58409ed71e5 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -2,7 +2,8 @@ import asyncio import sys -from typing import Any, Optional, cast +from types import TracebackType +from typing import Any, Optional, Type, cast import attr @@ -393,3 +394,14 @@ async def __anext__(self) -> WSMessage: if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration return msg + + async def __aenter__(self) -> "ClientWebSocketResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 137164c7d0b..d39addc29a1 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -23,6 +23,7 @@ from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( + ClientResponseError, InvalidURL, InvalidUrlClientError, InvalidUrlRedirectClientError, @@ -3592,8 +3593,44 @@ async def handler(request): await resp.read() -async def test_read_from_closed_content(aiohttp_client) -> None: - async def handler(request): +async def test_read_after_catch_raise_for_status(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"data", status=404) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + async with client.get("/") as resp: + with pytest.raises(ClientResponseError, match="404"): + # Should not release response when in async with context. + resp.raise_for_status() + + result = await resp.read() + assert result == b"data" + + +async def test_read_after_raise_outside_context(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"data", status=404) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + resp = await client.get("/") + with pytest.raises(ClientResponseError, match="404"): + # No async with, so should release and therefore read() will fail. + resp.raise_for_status() + + with pytest.raises(aiohttp.ClientConnectionError, match=r"^Connection closed$"): + await resp.read() + + +async def test_read_from_closed_content(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: return web.Response(body=b"data") app = web.Application() From 5fe63257ad49813a753ba5f0591779eafe7c4e29 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 23 Sep 2024 01:56:24 +0100 Subject: [PATCH 0624/1511] Stop raise_for_status() releasing when in a context (#9239) (#9256) (cherry picked from commit 63cfd5f96b3ce819c7feac67dfa4183ac4731acd) --- CHANGES/9239.bugfix.rst | 1 + aiohttp/client.py | 32 ++++++------------------- aiohttp/client_reqrep.py | 10 +++++++- aiohttp/client_ws.py | 14 ++++++++++- tests/test_client_functional.py | 41 +++++++++++++++++++++++++++++++-- 5 files changed, 69 insertions(+), 29 deletions(-) create mode 100644 CHANGES/9239.bugfix.rst diff --git a/CHANGES/9239.bugfix.rst b/CHANGES/9239.bugfix.rst new file mode 100644 index 00000000000..95b229742ce --- /dev/null +++ b/CHANGES/9239.bugfix.rst @@ -0,0 +1 @@ +Changed :py:meth:`ClientResponse.raise_for_status() <aiohttp.ClientResponse.raise_for_status>` to only release the connection when invoked outside an ``async with`` context -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 61bea70aa9b..da89ee2a790 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -213,7 +213,7 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) -_RetType = TypeVar("_RetType") +_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse) _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -1333,7 +1333,7 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType __slots__ = ("_coro", "_resp") def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro = coro + self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro def send(self, arg: None) -> "asyncio.Future[Any]": return self._coro.send(arg) @@ -1352,12 +1352,8 @@ def __iter__(self) -> Generator[Any, None, _RetType]: return self.__await__() async def __aenter__(self) -> _RetType: - self._resp = await self._coro - return self._resp - - -class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): - __slots__ = () + self._resp: _RetType = await self._coro + return await self._resp.__aenter__() async def __aexit__( self, @@ -1365,25 +1361,11 @@ async def __aexit__( exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: - # We're basing behavior on the exception as it can be caused by - # user code unrelated to the status of the connection. If you - # would like to close a connection you must do that - # explicitly. Otherwise connection error handling should kick in - # and close/recycle the connection as required. - self._resp.release() - await self._resp.wait_for_close() + await self._resp.__aexit__(exc_type, exc, tb) -class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): - __slots__ = () - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self._resp.close() +_RequestContextManager = _BaseRequestContextManager[ClientResponse] +_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse] class _SessionRequestContextManager: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index cfe44c3c563..a80d28f7fd8 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -814,6 +814,7 @@ class ClientResponse(HeadersMixin): # post-init stage allows to not change ctor signature _closed = True # to allow __del__ for non-initialized properly response _released = False + _in_context = False __writer = None def __init__( @@ -1103,7 +1104,12 @@ def raise_for_status(self) -> None: if not self.ok: # reason should always be not None for a started response assert self.reason is not None - self.release() + + # If we're in a context we can rely on __aexit__() to release as the + # exception propagates. + if not self._in_context: + self.release() + raise ClientResponseError( self.request_info, self.history, @@ -1230,6 +1236,7 @@ async def json( return loads(stripped.decode(encoding)) async def __aenter__(self) -> "ClientResponse": + self._in_context = True return self async def __aexit__( @@ -1238,6 +1245,7 @@ async def __aexit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: + self._in_context = False # similar to _RequestContextManager, we do not need to check # for exceptions, response object can close connection # if state is broken diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 7b3a5bf952d..c6b5da5103b 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -2,7 +2,8 @@ import asyncio import sys -from typing import Any, Optional, cast +from types import TracebackType +from typing import Any, Optional, Type, cast from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse @@ -384,3 +385,14 @@ async def __anext__(self) -> WSMessage: if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration return msg + + async def __aenter__(self) -> "ClientWebSocketResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 082db6f3e9a..60af4930f14 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -23,6 +23,7 @@ from aiohttp import Fingerprint, ServerFingerprintMismatch, client_reqrep, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( + ClientResponseError, InvalidURL, InvalidUrlClientError, InvalidUrlRedirectClientError, @@ -3474,8 +3475,44 @@ async def handler(request): await resp.read() -async def test_read_from_closed_content(aiohttp_client) -> None: - async def handler(request): +async def test_read_after_catch_raise_for_status(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"data", status=404) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + async with client.get("/") as resp: + with pytest.raises(ClientResponseError, match="404"): + # Should not release response when in async with context. + resp.raise_for_status() + + result = await resp.read() + assert result == b"data" + + +async def test_read_after_raise_outside_context(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"data", status=404) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + resp = await client.get("/") + with pytest.raises(ClientResponseError, match="404"): + # No async with, so should release and therefore read() will fail. + resp.raise_for_status() + + with pytest.raises(aiohttp.ClientConnectionError, match=r"^Connection closed$"): + await resp.read() + + +async def test_read_from_closed_content(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: return web.Response(body=b"data") app = web.Application() From 3f1a8b10328ac8e65e87af9e66e61dcbc960ccb3 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 23 Sep 2024 02:34:30 +0100 Subject: [PATCH 0625/1511] Fix cancellations being swallowed (#9030) (#9257) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit 1a77ad933f07ab0e7ba0c16f7ca8f02fa8ab044e) --- CHANGES/9030.bugfix.rst | 1 + aiohttp/client_reqrep.py | 37 +++++++++++++++++++++++++-------- aiohttp/web_protocol.py | 38 ++++++++++++++++++++++++++-------- tests/test_client_request.py | 19 ++++++++++++++++- tests/test_web_functional.py | 40 ++++++++++++++++++++++++++++++++++-- 5 files changed, 115 insertions(+), 20 deletions(-) create mode 100644 CHANGES/9030.bugfix.rst diff --git a/CHANGES/9030.bugfix.rst b/CHANGES/9030.bugfix.rst new file mode 100644 index 00000000000..2e9d48f5359 --- /dev/null +++ b/CHANGES/9030.bugfix.rst @@ -0,0 +1 @@ +Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 09b8659264e..627966dbca6 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -625,11 +625,8 @@ async def write_bytes( """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: - try: - await writer.drain() - await self._continue - except asyncio.CancelledError: - return + await writer.drain() + await self._continue protocol = conn.protocol assert protocol is not None @@ -658,6 +655,7 @@ async def write_bytes( except asyncio.CancelledError: # Body hasn't been fully sent, so connection can't be reused. conn.close() + raise except Exception as underlying_exc: set_exception( protocol, @@ -764,8 +762,15 @@ async def send(self, conn: "Connection") -> "ClientResponse": async def close(self) -> None: if self._writer is not None: - with contextlib.suppress(asyncio.CancelledError): + try: await self._writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise def terminate(self) -> None: if self._writer is not None: @@ -1119,7 +1124,15 @@ def _release_connection(self) -> None: async def _wait_released(self) -> None: if self._writer is not None: - await self._writer + try: + await self._writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise self._release_connection() def _cleanup_writer(self) -> None: @@ -1135,7 +1148,15 @@ def _notify_content(self) -> None: async def wait_for_close(self) -> None: if self._writer is not None: - await self._writer + try: + await self._writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise self.release() async def read(self) -> bytes: diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 8fa8535b93a..85eb70d5a0b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -271,17 +271,32 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: # down while the handler is still processing a request # to avoid creating a future for every request. self._handler_waiter = self._loop.create_future() - with suppress(asyncio.CancelledError, asyncio.TimeoutError): + try: async with ceil_timeout(timeout): await self._handler_waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._handler_waiter = None + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise # Then cancel handler and wait - with suppress(asyncio.CancelledError, asyncio.TimeoutError): + try: async with ceil_timeout(timeout): if self._current_request is not None: self._current_request._cancel(asyncio.CancelledError()) if self._task_handler is not None and not self._task_handler.done(): - await self._task_handler + await asyncio.shield(self._task_handler) + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise # force-close non-idle handler if self._task_handler is not None: @@ -517,8 +532,6 @@ async def start(self) -> None: # wait for next request self._waiter = loop.create_future() await self._waiter - except asyncio.CancelledError: - break finally: self._waiter = None @@ -545,7 +558,7 @@ async def start(self) -> None: task = loop.create_task(coro) try: resp, reset = await task - except (asyncio.CancelledError, ConnectionError): + except ConnectionError: self.log_debug("Ignored premature client disconnection") break @@ -569,12 +582,19 @@ async def start(self) -> None: now = loop.time() end_t = now + lingering_time - with suppress(asyncio.TimeoutError, asyncio.CancelledError): + try: while not payload.is_eof() and now < end_t: async with ceil_timeout(end_t - now): # read and ignore await payload.readany() now = loop.time() + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (t := asyncio.current_task()) + and t.cancelling() + ): + raise # if payload still uncompleted if not payload.is_eof() and not self._force_close: @@ -584,8 +604,8 @@ async def start(self) -> None: payload.set_exception(_PAYLOAD_ACCESS_ERROR) except asyncio.CancelledError: - self.log_debug("Ignored premature client disconnection ") - break + self.log_debug("Ignored premature client disconnection") + raise except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 2d70ebdd4f2..f2eff019504 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -2,6 +2,7 @@ import hashlib import io import pathlib +import sys import urllib.parse import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie @@ -1213,7 +1214,23 @@ async def test_oserror_on_write_bytes(loop, conn) -> None: await req.close() -async def test_terminate(loop, conn) -> None: +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Needs Task.cancelling()") +async def test_cancel_close(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: + req = ClientRequest("get", URL("http://python.org"), loop=loop) + req._writer = asyncio.Future() # type: ignore[assignment] + + t = asyncio.create_task(req.close()) + + # Start waiting on _writer + await asyncio.sleep(0) + + t.cancel() + # Cancellation should not be suppressed. + with pytest.raises(asyncio.CancelledError): + await t + + +async def test_terminate(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) async def _mock_write_bytes(*args, **kwargs): diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 5b2e5fe9353..e46a23c5857 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -3,6 +3,7 @@ import json import pathlib import socket +import sys import zlib from typing import Any, NoReturn, Optional from unittest import mock @@ -22,6 +23,7 @@ web, ) from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.test_utils import make_mocked_coro from aiohttp.typedefs import Handler from aiohttp.web_protocol import RequestHandler @@ -187,8 +189,42 @@ async def handler(request): await resp.release() -async def test_post_form(aiohttp_client) -> None: - async def handler(request): +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Needs Task.cancelling()") +async def test_cancel_shutdown(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + t = asyncio.create_task(request.protocol.shutdown()) + # Ensure it's started waiting + await asyncio.sleep(0) + + t.cancel() + # Cancellation should not be suppressed + with pytest.raises(asyncio.CancelledError): + await t + + # Repeat for second waiter in shutdown() + with mock.patch.object(request.protocol, "_request_in_progress", False): + with mock.patch.object(request.protocol, "_current_request", None): + t = asyncio.create_task(request.protocol.shutdown()) + await asyncio.sleep(0) + + t.cancel() + with pytest.raises(asyncio.CancelledError): + await t + + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + txt = await resp.text() + assert txt == "OK" + + +async def test_post_form(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: data = await request.post() assert {"a": "1", "b": "2", "c": ""} == data return web.Response(body=b"OK") From 9876a61beb4413c2ca720754291b71a79d2fce21 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 23 Sep 2024 02:34:44 +0100 Subject: [PATCH 0626/1511] Fix cancellations being swallowed (#9030) (#9258) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit 1a77ad933f07ab0e7ba0c16f7ca8f02fa8ab044e) --- CHANGES/9030.bugfix.rst | 1 + aiohttp/client_reqrep.py | 37 +++++++++++++++++++++++++-------- aiohttp/web_protocol.py | 38 ++++++++++++++++++++++++++-------- tests/test_client_request.py | 19 ++++++++++++++++- tests/test_web_functional.py | 40 ++++++++++++++++++++++++++++++++++-- 5 files changed, 115 insertions(+), 20 deletions(-) create mode 100644 CHANGES/9030.bugfix.rst diff --git a/CHANGES/9030.bugfix.rst b/CHANGES/9030.bugfix.rst new file mode 100644 index 00000000000..2e9d48f5359 --- /dev/null +++ b/CHANGES/9030.bugfix.rst @@ -0,0 +1 @@ +Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a80d28f7fd8..aa8f54e67b8 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -634,11 +634,8 @@ async def write_bytes( """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: - try: - await writer.drain() - await self._continue - except asyncio.CancelledError: - return + await writer.drain() + await self._continue protocol = conn.protocol assert protocol is not None @@ -667,6 +664,7 @@ async def write_bytes( except asyncio.CancelledError: # Body hasn't been fully sent, so connection can't be reused. conn.close() + raise except Exception as underlying_exc: set_exception( protocol, @@ -773,8 +771,15 @@ async def send(self, conn: "Connection") -> "ClientResponse": async def close(self) -> None: if self._writer is not None: - with contextlib.suppress(asyncio.CancelledError): + try: await self._writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise def terminate(self) -> None: if self._writer is not None: @@ -1128,7 +1133,15 @@ def _release_connection(self) -> None: async def _wait_released(self) -> None: if self._writer is not None: - await self._writer + try: + await self._writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise self._release_connection() def _cleanup_writer(self) -> None: @@ -1144,7 +1157,15 @@ def _notify_content(self) -> None: async def wait_for_close(self) -> None: if self._writer is not None: - await self._writer + try: + await self._writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise self.release() async def read(self) -> bytes: diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 8fa8535b93a..85eb70d5a0b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -271,17 +271,32 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: # down while the handler is still processing a request # to avoid creating a future for every request. self._handler_waiter = self._loop.create_future() - with suppress(asyncio.CancelledError, asyncio.TimeoutError): + try: async with ceil_timeout(timeout): await self._handler_waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._handler_waiter = None + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise # Then cancel handler and wait - with suppress(asyncio.CancelledError, asyncio.TimeoutError): + try: async with ceil_timeout(timeout): if self._current_request is not None: self._current_request._cancel(asyncio.CancelledError()) if self._task_handler is not None and not self._task_handler.done(): - await self._task_handler + await asyncio.shield(self._task_handler) + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise # force-close non-idle handler if self._task_handler is not None: @@ -517,8 +532,6 @@ async def start(self) -> None: # wait for next request self._waiter = loop.create_future() await self._waiter - except asyncio.CancelledError: - break finally: self._waiter = None @@ -545,7 +558,7 @@ async def start(self) -> None: task = loop.create_task(coro) try: resp, reset = await task - except (asyncio.CancelledError, ConnectionError): + except ConnectionError: self.log_debug("Ignored premature client disconnection") break @@ -569,12 +582,19 @@ async def start(self) -> None: now = loop.time() end_t = now + lingering_time - with suppress(asyncio.TimeoutError, asyncio.CancelledError): + try: while not payload.is_eof() and now < end_t: async with ceil_timeout(end_t - now): # read and ignore await payload.readany() now = loop.time() + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (t := asyncio.current_task()) + and t.cancelling() + ): + raise # if payload still uncompleted if not payload.is_eof() and not self._force_close: @@ -584,8 +604,8 @@ async def start(self) -> None: payload.set_exception(_PAYLOAD_ACCESS_ERROR) except asyncio.CancelledError: - self.log_debug("Ignored premature client disconnection ") - break + self.log_debug("Ignored premature client disconnection") + raise except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 2d70ebdd4f2..f2eff019504 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -2,6 +2,7 @@ import hashlib import io import pathlib +import sys import urllib.parse import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie @@ -1213,7 +1214,23 @@ async def test_oserror_on_write_bytes(loop, conn) -> None: await req.close() -async def test_terminate(loop, conn) -> None: +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Needs Task.cancelling()") +async def test_cancel_close(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: + req = ClientRequest("get", URL("http://python.org"), loop=loop) + req._writer = asyncio.Future() # type: ignore[assignment] + + t = asyncio.create_task(req.close()) + + # Start waiting on _writer + await asyncio.sleep(0) + + t.cancel() + # Cancellation should not be suppressed. + with pytest.raises(asyncio.CancelledError): + await t + + +async def test_terminate(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) async def _mock_write_bytes(*args, **kwargs): diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 969153b1603..eadb43b1ecb 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -3,6 +3,7 @@ import json import pathlib import socket +import sys import zlib from typing import Any, NoReturn, Optional from unittest import mock @@ -22,6 +23,7 @@ web, ) from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.test_utils import make_mocked_coro from aiohttp.typedefs import Handler from aiohttp.web_protocol import RequestHandler @@ -187,8 +189,42 @@ async def handler(request): await resp.release() -async def test_post_form(aiohttp_client) -> None: - async def handler(request): +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Needs Task.cancelling()") +async def test_cancel_shutdown(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + t = asyncio.create_task(request.protocol.shutdown()) + # Ensure it's started waiting + await asyncio.sleep(0) + + t.cancel() + # Cancellation should not be suppressed + with pytest.raises(asyncio.CancelledError): + await t + + # Repeat for second waiter in shutdown() + with mock.patch.object(request.protocol, "_request_in_progress", False): + with mock.patch.object(request.protocol, "_current_request", None): + t = asyncio.create_task(request.protocol.shutdown()) + await asyncio.sleep(0) + + t.cancel() + with pytest.raises(asyncio.CancelledError): + await t + + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + txt = await resp.text() + assert txt == "OK" + + +async def test_post_form(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: data = await request.post() assert {"a": "1", "b": "2", "c": ""} == data return web.Response(body=b"OK") From 750e333c2f992d299f87a270252268b6a7e50a1c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 21:28:44 -0500 Subject: [PATCH 0627/1511] [PR #9261/2383b9ca backport][3.10] Cleanup changelog messages (#9262) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/4650.bugfix | 2 +- CHANGES/8845.bugfix.rst | 2 +- CHANGES/9031.misc.rst | 2 +- CHANGES/9204.misc.rst | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGES/4650.bugfix b/CHANGES/4650.bugfix index 5c9fc17ff60..e3e17b00ae8 100644 --- a/CHANGES/4650.bugfix +++ b/CHANGES/4650.bugfix @@ -1 +1 @@ -Implement binding to IPv6 addresses in the pytest server fixture. +Implemented binding to IPv6 addresses in the pytest server fixture. diff --git a/CHANGES/8845.bugfix.rst b/CHANGES/8845.bugfix.rst index ff0016ac14b..c37a0095ed3 100644 --- a/CHANGES/8845.bugfix.rst +++ b/CHANGES/8845.bugfix.rst @@ -1 +1 @@ -Changed behaviour when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. +Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9031.misc.rst b/CHANGES/9031.misc.rst index 1deab5230f7..1874a4deddd 100644 --- a/CHANGES/9031.misc.rst +++ b/CHANGES/9031.misc.rst @@ -1 +1 @@ -Tracing overhead is avoided in the http writer when there are no active traces -- by user:`bdraco`. +Avoided tracing overhead in the http writer when there are no active traces -- by user:`bdraco`. diff --git a/CHANGES/9204.misc.rst b/CHANGES/9204.misc.rst index da12a7df6f7..9f3196fa5be 100644 --- a/CHANGES/9204.misc.rst +++ b/CHANGES/9204.misc.rst @@ -1 +1 @@ -Significantly speed up filtering cookies -- by :user:`bdraco`. +Significantly sped up filtering cookies -- by :user:`bdraco`. From 3eb72824a7ddddb41e57a4275224df7b46b58134 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 22 Sep 2024 21:33:43 -0500 Subject: [PATCH 0628/1511] Release 3.10.6rc0 --- CHANGES.rst | 449 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 450 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 2bd19de71d6..9a944423642 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,455 @@ .. towncrier release notes start +3.10.6rc0 (2024-09-22) +====================== + +Bug fixes +--------- + +- Implemented binding to IPv6 addresses in the pytest server fixture. + + + *Related issues and pull requests on GitHub:* + :issue:`4650`. + + + +- Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. + + + *Related issues and pull requests on GitHub:* + :issue:`5343`. + + + +- Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`6485`. + + + +- Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` + + + *Related issues and pull requests on GitHub:* + :issue:`6494`. + + + +- Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. + + + *Related issues and pull requests on GitHub:* + :issue:`6732`. + + + +- Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`6807`. + + + +- Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. + + + *Related issues and pull requests on GitHub:* + :issue:`7167`. + + + +- Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8768`. + + + +- Fixed Python parser chunked handling with multiple Transfer-Encoding values -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8823`. + + + +- Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8845`. + + + +- Stopped adding a default Content-Type header when response has no content -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8858`. + + + +- Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8875`. + + + +- Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8876`. + + + +- Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8878`. + + + +- Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8898`. + + + +- Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8908`. + + + +- Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8929`. + + + +- Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8967`. + + + +- Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8990`. + + + +- Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8992`. + + + +- Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8998`. + + + +- Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9018`. + + + +- Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9029`. + + + +- Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9030`. + + + +- Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` + + Link-Local IPv6 addresses can now be handled by the Resolver correctly. + + + *Related issues and pull requests on GitHub:* + :issue:`9032`. + + + +- Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9052`. + + + +- Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9063`. + + + +- Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9108`. + + + +- Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` + will now throw this -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9137`. + + + +- Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamosorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9140`. + + + +- Fixed badly encoded charset crashing when getting response text instead of falling back to charset detector. + + + *Related issues and pull requests on GitHub:* + :issue:`9160`. + + + +- Rejected `\n` in `reason` values to avoid sending broken HTTP messages -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9167`. + + + +- Changed :py:meth:`ClientResponse.raise_for_status() <aiohttp.ClientResponse.raise_for_status>` to only release the connection when invoked outside an ``async with`` context -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9239`. + + + + +Features +-------- + +- Improved type on ``params`` to match the underlying type allowed by ``yarl`` -- by :user:`lpetre`. + + + *Related issues and pull requests on GitHub:* + :issue:`8564`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Improved middleware performance -- by :user:`bdraco`. + + The ``set_current_app`` method was removed from ``UrlMappingMatchInfo`` because it is no longer used, and it was unlikely external caller would ever use it. + + + *Related issues and pull requests on GitHub:* + :issue:`9200`. + + + + +Improved documentation +---------------------- + +- Clarified that ``GracefulExit`` needs to be handled in ``AppRunner`` and ``ServerRunner`` when using ``handle_signals=True``. -- by :user:`Daste745` + + + *Related issues and pull requests on GitHub:* + :issue:`4414`. + + + +- Clarified that auth parameter in ClientSession will persist and be included with any request to any origin, even during redirects to different origins. -- by :user:`MaximZemskov`. + + + *Related issues and pull requests on GitHub:* + :issue:`6764`. + + + +- Clarified which timeout exceptions happen on which timeouts -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8968`. + + + +- Updated ``ClientSession`` parameters to match current code -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8991`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Fixed ``test_client_session_timeout_zero`` to not require internet access -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9004`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8847`. + + + +- Exported ``aiohttp.TraceRequestHeadersSentParams`` -- by :user:`Hadock-is-ok`. + + + *Related issues and pull requests on GitHub:* + :issue:`8947`. + + + +- Avoided tracing overhead in the http writer when there are no active traces -- by user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9031`. + + + +- Improved performance of reify Cython implementation -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9054`. + + + +- Use :meth:`URL.extend_query() <yarl.URL.extend_query>` to extend query params (requires yarl 1.11.0+) -- by :user:`bdraco`. + + If yarl is older than 1.11.0, the previous slower hand rolled version will be used. + + + *Related issues and pull requests on GitHub:* + :issue:`9068`. + + + +- Improved performance of checking if a host is an IP Address -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9095`. + + + +- Significantly improved performance of middlewares -- by :user:`bdraco`. + + The construction of the middleware wrappers is now cached and is built once per handler instead of on every request. + + + *Related issues and pull requests on GitHub:* + :issue:`9158`, :issue:`9170`. + + + +- Improved performance of web requests -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9168`, :issue:`9169`, :issue:`9172`, :issue:`9174`, :issue:`9175`, :issue:`9241`. + + + +- Improved performance of starting web requests when there is no response prepare hook -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9173`. + + + +- Significantly improved performance of expiring cookies -- by :user:`bdraco`. + + Expiring cookies has been redesigned to use :mod:`heapq` instead of a linear search, to better scale. + + + *Related issues and pull requests on GitHub:* + :issue:`9203`. + + + +- Significantly sped up filtering cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9204`. + + + + +---- + + 3.10.5 (2024-08-19) ========================= diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 63367052646..111ce98440c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.6.dev0" +__version__ = "3.10.6rc0" from typing import TYPE_CHECKING, Tuple From bf1490951b6c26d71e2febd5e2b58f3e871fabd5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 03:31:00 +0000 Subject: [PATCH 0629/1511] [PR #9261/2383b9ca backport][3.11] Cleanup changelog messages (#9263) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/4650.bugfix | 2 +- CHANGES/8845.bugfix.rst | 2 +- CHANGES/9031.misc.rst | 2 +- CHANGES/9204.misc.rst | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGES/4650.bugfix b/CHANGES/4650.bugfix index 5c9fc17ff60..e3e17b00ae8 100644 --- a/CHANGES/4650.bugfix +++ b/CHANGES/4650.bugfix @@ -1 +1 @@ -Implement binding to IPv6 addresses in the pytest server fixture. +Implemented binding to IPv6 addresses in the pytest server fixture. diff --git a/CHANGES/8845.bugfix.rst b/CHANGES/8845.bugfix.rst index ff0016ac14b..c37a0095ed3 100644 --- a/CHANGES/8845.bugfix.rst +++ b/CHANGES/8845.bugfix.rst @@ -1 +1 @@ -Changed behaviour when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. +Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9031.misc.rst b/CHANGES/9031.misc.rst index 1deab5230f7..1874a4deddd 100644 --- a/CHANGES/9031.misc.rst +++ b/CHANGES/9031.misc.rst @@ -1 +1 @@ -Tracing overhead is avoided in the http writer when there are no active traces -- by user:`bdraco`. +Avoided tracing overhead in the http writer when there are no active traces -- by user:`bdraco`. diff --git a/CHANGES/9204.misc.rst b/CHANGES/9204.misc.rst index da12a7df6f7..9f3196fa5be 100644 --- a/CHANGES/9204.misc.rst +++ b/CHANGES/9204.misc.rst @@ -1 +1 @@ -Significantly speed up filtering cookies -- by :user:`bdraco`. +Significantly sped up filtering cookies -- by :user:`bdraco`. From 84a69ce291ff1652ce1b2474566cd44182a467c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:51:49 +0000 Subject: [PATCH 0630/1511] Bump proxy-py from 2.4.7 to 2.4.8 (#9264) Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.7 to 2.4.8. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/48842552a130b028ee0409d9ba549b581d1137fb"><code>4884255</code></a> Bump actions/download-artifact from 3 to 4 in /.github/workflows in the githu...</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/ebf3599abba0e4267b7a1c0362561f63ad5d8430"><code>ebf3599</code></a> Bump pip group for benchmark comparisions to fix security scanner reports (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1">#1</a>...</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/c07436c084f6142f52e6e026c0172312a49cc206"><code>c07436c</code></a> Bump workflows to use Ubuntu 24.04 (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1478">#1478</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/f5f18e4aead7ecf7f998e466bd6526122c8d6352"><code>f5f18e4</code></a> TLS Intercept conditionally (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1476">#1476</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/a51ddaae4149f5b5452c1f4283539e1a3fea3846"><code>a51ddaa</code></a> Add clarity on how <code>HttpParser</code> class works (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1466">#1466</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/05ac28862016509326a0ab4da75d7d454ea55982"><code>05ac288</code></a> Add support for <code>Upgrade: Derp</code> custom protocol (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1463">#1463</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/447b68e924432c4ddbd4c67b159b4acc4efd22be"><code>447b68e</code></a> Add <code>pip</code> and <code>Selenium Base</code> to projects using <code>proxy.py</code></li> <li>See full diff in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.7...v2.4.8">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.7&new-version=2.4.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d1069b4f590..6ce841dc2c9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -146,7 +146,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.7 +proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index 46be3754db8..e4e6c6ac172 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -143,7 +143,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.7 +proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index bf1be8a9e33..a479b9a5e38 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,7 +71,7 @@ packaging==24.1 # pytest pluggy==1.5.0 # via pytest -proxy-py==2.4.7 +proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 # via aiodns From c5a3d812e519f2d94425f50b41cfd92eb8341867 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 23 Sep 2024 17:07:23 -0500 Subject: [PATCH 0631/1511] [PR #9267/947b9c4 backport][3.10] Fix double unquoting in url dispatcher (#9269) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9267.breaking.rst | 1 + CHANGES/9267.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 24 ++++++++++++------------ requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.in | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- setup.cfg | 2 +- tests/test_urldispatch.py | 17 ++++++++++++++--- tests/test_web_urldispatcher.py | 18 +++++++++++++++++- 12 files changed, 52 insertions(+), 23 deletions(-) create mode 100644 CHANGES/9267.breaking.rst create mode 120000 CHANGES/9267.bugfix.rst diff --git a/CHANGES/9267.breaking.rst b/CHANGES/9267.breaking.rst new file mode 100644 index 00000000000..82fec1d21b4 --- /dev/null +++ b/CHANGES/9267.breaking.rst @@ -0,0 +1 @@ +Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. diff --git a/CHANGES/9267.bugfix.rst b/CHANGES/9267.bugfix.rst new file mode 120000 index 00000000000..2a85c7ec63c --- /dev/null +++ b/CHANGES/9267.bugfix.rst @@ -0,0 +1 @@ +8898.bugfix.rst \ No newline at end of file diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 0f6d1b2bcd6..89abdc43fa6 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -375,7 +375,7 @@ def register_route(self, route: "ResourceRoute") -> None: async def resolve(self, request: Request) -> _Resolve: allowed_methods: Set[str] = set() - match_dict = self._match(request.rel_url.path) + match_dict = self._match(request.rel_url.path_safe) if match_dict is None: return None, allowed_methods @@ -425,8 +425,7 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: # string comparison is about 10 times faster than regexp matching if self._path == path: return {} - else: - return None + return None def raw_match(self, path: str) -> bool: return self._path == path @@ -497,10 +496,9 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: match = self._pattern.fullmatch(path) if match is None: return None - else: - return { - key: _unquote_path(value) for key, value in match.groupdict().items() - } + return { + key: _unquote_path_safe(value) for key, value in match.groupdict().items() + } def raw_match(self, path: str) -> bool: return self._orig_path == path @@ -645,7 +643,7 @@ def set_options_route(self, handler: Handler) -> None: ) async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.path + path = request.rel_url.path_safe method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: @@ -654,7 +652,7 @@ async def resolve(self, request: Request) -> _Resolve: if method not in allowed_methods: return None, allowed_methods - match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])} return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) def __len__(self) -> int: @@ -1035,7 +1033,7 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: # candidates for a given url part because there are multiple resources # registered for the same canonical path, we resolve them in a linear # fashion to ensure registration order is respected. - url_part = request.rel_url.path + url_part = request.rel_url.path_safe while url_part: for candidate in resource_index.get(url_part, ()): match_dict, allowed = await candidate.resolve(request) @@ -1286,8 +1284,10 @@ def _quote_path(value: str) -> str: return URL.build(path=value, encoded=False).raw_path -def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path.replace("%2F", "/") +def _unquote_path_safe(value: str) -> str: + if "%" not in value: + return value + return value.replace("%2F", "/").replace("%25", "%") def _requote_path(value: str) -> str: diff --git a/requirements/base.txt b/requirements/base.txt index 6604aa0f6c5..4dd5dc1f05b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.21 # via cffi uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.11.0 +yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f3109af5fb7..59f7a130b75 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -274,7 +274,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.11.0 +yarl==1.12.0 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 54c0157b01a..d6ee12fe9d4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -261,7 +261,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.11.0 +yarl==1.12.0 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 2299584a463..9a199453d55 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -9,4 +9,4 @@ Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 -yarl >= 1.0, < 2.0 +yarl >= 1.12.0, < 2.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 279a9525fc5..b7a3828955e 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.11.0 +yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index ad9ec0ace39..a42a5c0dbb3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -125,5 +125,5 @@ uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.11.0 +yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/setup.cfg b/setup.cfg index bd93b00cb2f..a3edc74fc8c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,7 +55,7 @@ install_requires = attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 - yarl >= 1.0, < 2.0 + yarl >= 1.12.0, < 2.0 [options.exclude_package_data] * = diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index f06f73edc21..de6815589b0 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,7 +1,7 @@ import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized -from urllib.parse import unquote +from urllib.parse import quote, unquote import pytest from re_assert import Matches @@ -457,7 +457,7 @@ def test_add_static_quoting(router) -> None: ) assert router["static"] is resource url = resource.url_for(filename="/1 2/файл%2F.txt") - assert url.path == "/пре %2Fфикс/1 2/файл%2F.txt" + assert url.path == "/пре /фикс/1 2/файл%2F.txt" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%84%D0%B0%D0%B9%D0%BB%252F.txt" @@ -630,7 +630,7 @@ def test_route_dynamic_quoting(router) -> None: route = router.add_route("GET", r"/пре %2Fфикс/{arg}", handler) url = route.url_for(arg="1 2/текст%2F") - assert url.path == "/пре %2Fфикс/1 2/текст%2F" + assert url.path == "/пре /фикс/1 2/текст%2F" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%82%D0%B5%D0%BA%D1%81%D1%82%252F" @@ -742,6 +742,17 @@ async def test_dynamic_match_unquoted_path(router) -> None: assert match_info == {"path": "path", "subpath": unquote(resource_id)} +async def test_dynamic_match_double_quoted_path(router: web.UrlDispatcher) -> None: + """Verify that double-quoted path is unquoted only once.""" + handler = make_handler() + router.add_route("GET", "/{path}/{subpath}", handler) + resource_id = quote("my/path|with!some%strange$characters", safe="") + double_quoted_resource_id = quote(resource_id, safe="") + req = make_mocked_request("GET", f"/path/{double_quoted_resource_id}") + match_info = await router.resolve(req) + assert match_info == {"path": "path", "subpath": resource_id} + + def test_add_route_not_started_with_slash(router) -> None: with pytest.raises(ValueError): handler = make_handler() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 7991cfe821e..eca365d2a25 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -5,7 +5,7 @@ import socket import sys from stat import S_IFIFO, S_IMODE -from typing import Any, Generator, Optional +from typing import Any, Generator, NoReturn, Optional import pytest import yarl @@ -885,6 +885,22 @@ async def handler(request: web.Request) -> web.Response: assert resp.status == expected_http_resp_status +async def test_decoded_raw_match_regex(aiohttp_client: AiohttpClient) -> None: + """Verify that raw_match only matches decoded url.""" + app = web.Application() + + async def handler(request: web.Request) -> NoReturn: + assert False + + app.router.add_get("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", handler) + client = await aiohttp_client(app) + + async with client.get( + yarl.URL("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", encoded=True) + ) as resp: + assert resp.status == 404 # should only match decoded url + + async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: """Test route order is preserved. From be348691da356ea45b88f2074831a22168c41ee6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 23 Sep 2024 17:11:55 -0500 Subject: [PATCH 0632/1511] Release 3.10.6rc1 --- CHANGES.rst | 18 ++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9a944423642..36f05700df3 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.10.6rc1 (2024-09-22) +====================== + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9267`. + + + + +---- + + 3.10.6rc0 (2024-09-22) ====================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 111ce98440c..25d7f39ffe5 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.6rc0" +__version__ = "3.10.6rc1" from typing import TYPE_CHECKING, Tuple From 22a9243a32d7dc16149a0ca74170acc7ab6a7372 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 23 Sep 2024 17:14:47 -0500 Subject: [PATCH 0633/1511] [PR #9267/947b9c4 backport][3.11] Fix double unquoting in url dispatcher (#9270) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9267.breaking.rst | 1 + CHANGES/9267.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 24 ++++++++++++------------ requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.in | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- setup.cfg | 2 +- tests/test_urldispatch.py | 17 ++++++++++++++--- tests/test_web_urldispatcher.py | 18 +++++++++++++++++- 12 files changed, 52 insertions(+), 23 deletions(-) create mode 100644 CHANGES/9267.breaking.rst create mode 120000 CHANGES/9267.bugfix.rst diff --git a/CHANGES/9267.breaking.rst b/CHANGES/9267.breaking.rst new file mode 100644 index 00000000000..82fec1d21b4 --- /dev/null +++ b/CHANGES/9267.breaking.rst @@ -0,0 +1 @@ +Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. diff --git a/CHANGES/9267.bugfix.rst b/CHANGES/9267.bugfix.rst new file mode 120000 index 00000000000..2a85c7ec63c --- /dev/null +++ b/CHANGES/9267.bugfix.rst @@ -0,0 +1 @@ +8898.bugfix.rst \ No newline at end of file diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 9c07f4ee9ad..8c1eef9094a 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -375,7 +375,7 @@ def register_route(self, route: "ResourceRoute") -> None: async def resolve(self, request: Request) -> _Resolve: allowed_methods: Set[str] = set() - match_dict = self._match(request.rel_url.path) + match_dict = self._match(request.rel_url.path_safe) if match_dict is None: return None, allowed_methods @@ -425,8 +425,7 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: # string comparison is about 10 times faster than regexp matching if self._path == path: return {} - else: - return None + return None def raw_match(self, path: str) -> bool: return self._path == path @@ -497,10 +496,9 @@ def _match(self, path: str) -> Optional[Dict[str, str]]: match = self._pattern.fullmatch(path) if match is None: return None - else: - return { - key: _unquote_path(value) for key, value in match.groupdict().items() - } + return { + key: _unquote_path_safe(value) for key, value in match.groupdict().items() + } def raw_match(self, path: str) -> bool: return self._orig_path == path @@ -645,7 +643,7 @@ def set_options_route(self, handler: Handler) -> None: ) async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.path + path = request.rel_url.path_safe method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: @@ -654,7 +652,7 @@ async def resolve(self, request: Request) -> _Resolve: if method not in allowed_methods: return None, allowed_methods - match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])} return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) def __len__(self) -> int: @@ -1035,7 +1033,7 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: # candidates for a given url part because there are multiple resources # registered for the same canonical path, we resolve them in a linear # fashion to ensure registration order is respected. - url_part = request.rel_url.path + url_part = request.rel_url.path_safe while url_part: for candidate in resource_index.get(url_part, ()): match_dict, allowed = await candidate.resolve(request) @@ -1286,8 +1284,10 @@ def _quote_path(value: str) -> str: return URL.build(path=value, encoded=False).raw_path -def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path.replace("%2F", "/") +def _unquote_path_safe(value: str) -> str: + if "%" not in value: + return value + return value.replace("%2F", "/").replace("%25", "%") def _requote_path(value: str) -> str: diff --git a/requirements/base.txt b/requirements/base.txt index d947f437f98..1eb566b4627 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,5 +40,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.11.1 +yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6ce841dc2c9..4f90ea80755 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -287,7 +287,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.11.1 +yarl==1.12.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index e4e6c6ac172..611f438c8b4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.11.1 +yarl==1.12.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 1b440bc7c68..9a199453d55 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -9,4 +9,4 @@ Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 -yarl >= 1.11.0, < 2.0 +yarl >= 1.12.0, < 2.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index eea3d44a539..87d55bfa80c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -34,5 +34,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.11.1 +yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index a479b9a5e38..c2b9653fd4a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -137,5 +137,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.11.1 +yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/setup.cfg b/setup.cfg index c5258115f11..91f4385aedc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,7 @@ install_requires = attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 - yarl >= 1.11.0, < 2.0 + yarl >= 1.12.0, < 2.0 [options.exclude_package_data] * = diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index d0efa91593e..8c3eaed13b7 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,7 +1,7 @@ import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized -from urllib.parse import unquote +from urllib.parse import quote, unquote import pytest from re_assert import Matches @@ -457,7 +457,7 @@ def test_add_static_quoting(router) -> None: ) assert router["static"] is resource url = resource.url_for(filename="/1 2/файл%2F.txt") - assert url.path == "/пре %2Fфикс/1 2/файл%2F.txt" + assert url.path == "/пре /фикс/1 2/файл%2F.txt" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%84%D0%B0%D0%B9%D0%BB%252F.txt" @@ -630,7 +630,7 @@ def test_route_dynamic_quoting(router) -> None: route = router.add_route("GET", r"/пре %2Fфикс/{arg}", handler) url = route.url_for(arg="1 2/текст%2F") - assert url.path == "/пре %2Fфикс/1 2/текст%2F" + assert url.path == "/пре /фикс/1 2/текст%2F" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%82%D0%B5%D0%BA%D1%81%D1%82%252F" @@ -742,6 +742,17 @@ async def test_dynamic_match_unquoted_path(router) -> None: assert match_info == {"path": "path", "subpath": unquote(resource_id)} +async def test_dynamic_match_double_quoted_path(router: web.UrlDispatcher) -> None: + """Verify that double-quoted path is unquoted only once.""" + handler = make_handler() + router.add_route("GET", "/{path}/{subpath}", handler) + resource_id = quote("my/path|with!some%strange$characters", safe="") + double_quoted_resource_id = quote(resource_id, safe="") + req = make_mocked_request("GET", f"/path/{double_quoted_resource_id}") + match_info = await router.resolve(req) + assert match_info == {"path": "path", "subpath": resource_id} + + def test_add_route_not_started_with_slash(router) -> None: with pytest.raises(ValueError): handler = make_handler() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 7991cfe821e..eca365d2a25 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -5,7 +5,7 @@ import socket import sys from stat import S_IFIFO, S_IMODE -from typing import Any, Generator, Optional +from typing import Any, Generator, NoReturn, Optional import pytest import yarl @@ -885,6 +885,22 @@ async def handler(request: web.Request) -> web.Response: assert resp.status == expected_http_resp_status +async def test_decoded_raw_match_regex(aiohttp_client: AiohttpClient) -> None: + """Verify that raw_match only matches decoded url.""" + app = web.Application() + + async def handler(request: web.Request) -> NoReturn: + assert False + + app.router.add_get("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", handler) + client = await aiohttp_client(app) + + async with client.get( + yarl.URL("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", encoded=True) + ) as resp: + assert resp.status == 404 # should only match decoded url + + async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: """Test route order is preserved. From 361db7c64bb69c91fc5dee7869e5c64ee254573e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 23 Sep 2024 20:23:31 -0500 Subject: [PATCH 0634/1511] Release 3.10.6rc2 --- CHANGES.rst | 9 +++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 36f05700df3..cff8a8a0e2e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,15 @@ .. towncrier release notes start +3.10.6rc2 (2024-09-23) +====================== + +No significant changes. + + +---- + + 3.10.6rc1 (2024-09-22) ====================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 25d7f39ffe5..02fde739bdb 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.6rc1" +__version__ = "3.10.6rc2" from typing import TYPE_CHECKING, Tuple From eb938088cb88dc6169f3688ef03cb1034066d2e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 10:58:18 +0000 Subject: [PATCH 0635/1511] Bump yarl from 1.12.0 to 1.12.1 (#9274) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.12.0 to 1.12.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.12.1</h2> <p>No significant changes.</p> <p>This release was created because the signatures failed to upload for 1.12.0</p> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.12.1</h1> <p><em>(2024-09-23)</em></p> <p>No significant changes.</p> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/19f25164d5f941cf3b6d47174b6ad876be96123f"><code>19f2516</code></a> Release 1.12.1</li> <li><a href="https://github.com/aio-libs/yarl/commit/cf5a99630719ab15966c2e85639c54c8a78b2d70"><code>cf5a996</code></a> Increase release timeout from 7 to 14 minutes (<a href="https://redirect.github.com/aio-libs/yarl/issues/1153">#1153</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/yarl/compare/v1.12.0...v1.12.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.12.0&new-version=1.12.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1eb566b4627..c508dd9665e 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,5 +40,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.12.0 +yarl==1.12.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4f90ea80755..7af5c4eea6c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -287,7 +287,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.12.0 +yarl==1.12.1 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 611f438c8b4..13a28f247ae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.12.0 +yarl==1.12.1 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 87d55bfa80c..9a28aaf8c00 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -34,5 +34,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.12.0 +yarl==1.12.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index c2b9653fd4a..15f15d16bc5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -137,5 +137,5 @@ uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.12.0 +yarl==1.12.1 # via -r requirements/runtime-deps.in From cf1cad30bd586b1e376e5733a4cbd72bb40019a5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 14:18:31 +0100 Subject: [PATCH 0636/1511] [PR #9276/ccbd2c5e backport][3.11] Bump uvloop (#9277) **This is a backport of PR #9276 as merged into master (ccbd2c5e3364dbf0425f097ae911ab84f44758dc).** Co-authored-by: Sam Bull <git@sambull.org> --- .github/workflows/ci-cd.yml | 4 ++-- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 54ceb6b74cd..0c32a97f647 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,7 +125,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.9, '3.10', '3.11', '3.12'] + pyver: [3.9, '3.10', '3.11', '3.12', '3.13'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -140,7 +140,7 @@ jobs: os: ubuntu experimental: false - os: ubuntu - pyver: "3.13" + pyver: "3.14" experimental: true no-extensions: 'Y' fail-fast: true diff --git a/requirements/base.txt b/requirements/base.txt index c508dd9665e..c4c189c3e4e 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,7 +38,7 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.12.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7af5c4eea6c..fdf1e30f283 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -275,7 +275,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.2.3 # via requests -uvloop==0.20.0 ; platform_system != "Windows" +uvloop==0.21.0b1 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 13a28f247ae..81b1c1f6988 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -267,7 +267,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.2.3 # via requests -uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/lint.txt b/requirements/lint.txt index f5bdccfd695..299b2b09f4a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -117,7 +117,7 @@ typing-extensions==4.12.2 # typer urllib3==2.2.3 # via requests -uvloop==0.20.0 ; platform_system != "Windows" +uvloop==0.21.0b1 ; platform_system != "Windows" # via -r requirements/lint.in virtualenv==20.26.5 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 15f15d16bc5..2280e990965 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -133,7 +133,7 @@ typing-extensions==4.12.2 # typer urllib3==2.2.3 # via requests -uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in From 91c3162a285ffeff25887c8654ab6bfb801724c8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 24 Sep 2024 09:17:52 -0500 Subject: [PATCH 0637/1511] [3.10] Bump pydantic and deps for python 3.13 compat (#9279) --- requirements/constraints.txt | 6 +++--- requirements/dev.txt | 6 +++--- requirements/lint.txt | 6 +++--- requirements/test.txt | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 59f7a130b75..9badce63e06 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.12 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via @@ -148,9 +148,9 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -pydantic==2.2.0 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.23.4 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index d6ee12fe9d4..1f42ee2d4bc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.13 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via @@ -143,9 +143,9 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -pydantic==2.2.0 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.23.4 # via pydantic pygments==2.15.1 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 7ca49ba88d7..e1c8ca0e234 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 # via -r requirements/lint.in aioredis==2.0.1 # via -r requirements/lint.in -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 # via aioredis @@ -62,9 +62,9 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.7.1 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.18.2 +pydantic-core==2.23.4 # via pydantic pygments==2.17.2 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index a42a5c0dbb3..83592f6ef70 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in @@ -73,9 +73,9 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -pydantic==2.2.0 +pydantic==2.9.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.23.4 # via pydantic pytest==8.3.2 # via From 84c25b78a8a7b67b4915d5dd0e77112b623f043f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 15:18:58 +0100 Subject: [PATCH 0638/1511] [PR #8748/2816002b backport][3.10] Add Python 3.13 classifier (#9280) **This is a backport of PR #8748 as merged into master (2816002b0b67bd517ec841afa65f70402997507d).** Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8748.feature.rst | 1 + setup.cfg | 1 + 2 files changed, 2 insertions(+) create mode 100644 CHANGES/8748.feature.rst diff --git a/CHANGES/8748.feature.rst b/CHANGES/8748.feature.rst new file mode 100644 index 00000000000..7794d16e4dc --- /dev/null +++ b/CHANGES/8748.feature.rst @@ -0,0 +1 @@ +Declared Python 3.13 supported -- by :user:`bdraco`. diff --git a/setup.cfg b/setup.cfg index a3edc74fc8c..d998e736b45 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,6 +38,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Topic :: Internet :: WWW/HTTP From 1928fea21d6bc77199b2b312ff2771f00c3ee2a9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 14:34:05 +0000 Subject: [PATCH 0639/1511] [PR #8748/2816002b backport][3.11] Add Python 3.13 classifier (#9281) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/8748.feature.rst | 1 + setup.cfg | 1 + 2 files changed, 2 insertions(+) create mode 100644 CHANGES/8748.feature.rst diff --git a/CHANGES/8748.feature.rst b/CHANGES/8748.feature.rst new file mode 100644 index 00000000000..7794d16e4dc --- /dev/null +++ b/CHANGES/8748.feature.rst @@ -0,0 +1 @@ +Declared Python 3.13 supported -- by :user:`bdraco`. diff --git a/setup.cfg b/setup.cfg index 91f4385aedc..8168dac4408 100644 --- a/setup.cfg +++ b/setup.cfg @@ -37,6 +37,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Topic :: Internet :: WWW/HTTP From 3ba15873997ec8dadb1f0cf7aaf9ba2d81bf70ec Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 24 Sep 2024 09:54:14 -0500 Subject: [PATCH 0640/1511] [3.10] Bump typing-extensions to 4.12.2 (#9282) --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9badce63e06..d08dae712ab 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -250,7 +250,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types diff --git a/requirements/cython.txt b/requirements/cython.txt index 72b9a67af98..053c390dd02 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.10 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 1f42ee2d4bc..f34ba86729f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -236,7 +236,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types diff --git a/requirements/lint.txt b/requirements/lint.txt index e1c8ca0e234..57f4824ceac 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -95,7 +95,7 @@ trustme==1.1.0 # via -r requirements/lint.in typer==0.12.3 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types diff --git a/requirements/test.txt b/requirements/test.txt index 83592f6ef70..2685f179b00 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -111,7 +111,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # annotated-types # mypy From 6ff4ac97e6b0e98bdf761bf7620258968ecd6fa1 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 24 Sep 2024 16:48:58 +0100 Subject: [PATCH 0641/1511] Bump uvloop (#9276) (#9278) (cherry picked from commit ccbd2c5e3364dbf0425f097ae911ab84f44758dc) --- .github/workflows/ci-cd.yml | 4 ++-- requirements/base.txt | 2 +- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 2 +- requirements/test.txt | 4 ++-- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 17632dba6e6..4d77978d1e3 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,7 +125,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] + pyver: [3.8, 3.9, '3.10', '3.11', '3.12', '3.13'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -142,7 +142,7 @@ jobs: os: ubuntu experimental: false - os: ubuntu - pyver: "3.13" + pyver: "3.14" experimental: true no-extensions: 'Y' fail-fast: true diff --git a/requirements/base.txt b/requirements/base.txt index 4dd5dc1f05b..6359f4d60c7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.12.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d08dae712ab..485480490a6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,7 @@ pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2021.11.10 +regex==2024.9.11 # via re-assert requests==2.31.0 # via @@ -262,7 +262,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==1.26.7 # via requests -uvloop==0.19.0 ; platform_system != "Windows" +uvloop==0.21.0b1 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index f34ba86729f..a7e4fd44ecb 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -179,7 +179,7 @@ pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.9.11 # via re-assert requests==2.31.0 # via @@ -249,7 +249,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.0.4 # via requests -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 57f4824ceac..5f1b068cb1c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -107,7 +107,7 @@ typing-extensions==4.12.2 # typer urllib3==2.2.1 # via requests -uvloop==0.19.0 ; platform_system != "Windows" +uvloop==0.21.0b1 ; platform_system != "Windows" # via -r requirements/lint.in virtualenv==20.24.2 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 2685f179b00..9900af5bd19 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -92,7 +92,7 @@ python-on-whales==0.72.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.9.11 # via re-assert requests==2.31.0 # via python-on-whales @@ -121,7 +121,7 @@ typing-extensions==4.12.2 # typer urllib3==2.0.4 # via requests -uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in From 81bb9cb46c644ed6e0803af75fed1ac37017ab2c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 19:09:39 +0100 Subject: [PATCH 0642/1511] [PR #9284/a4f9eca4 backport][3.10] Bump aiohttp-theme (#9286) **This is a backport of PR #9284 as merged into master (a4f9eca487d09cf4b960aa27b4ea5e8129e29697).** Co-authored-by: Sam Bull <git@sambull.org> --- .readthedocs.yml | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7e9c11160e5..1b66ee7c0e4 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -11,7 +11,7 @@ submodules: recursive: true build: - os: ubuntu-22.04 + os: ubuntu-24.04 tools: python: "3.11" diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 485480490a6..b9a6c80e86f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -10,7 +10,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # -r requirements/runtime-deps.in aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in aioredis==2.0.1 # via -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index a7e4fd44ecb..92ec10aa4c1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -10,7 +10,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # -r requirements/runtime-deps.in aiohappyeyeballs==2.3.4 # via -r requirements/runtime-deps.in -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in aioredis==2.0.1 # via -r requirements/lint.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9ee15189662..5b8419574af 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/doc-spelling.txt --resolver=backtracking --strip-extras requirements/doc-spelling.in # -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index d9e7fb0ad7f..5a27f896ad9 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/doc.txt --resolver=backtracking --strip-extras requirements/doc.in # -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx From 238991c9f5aadbfe7b743d66e560b3e2c9e027ca Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 19:17:09 +0100 Subject: [PATCH 0643/1511] [PR #9284/a4f9eca4 backport][3.11] Bump aiohttp-theme (#9287) **This is a backport of PR #9284 as merged into master (a4f9eca487d09cf4b960aa27b4ea5e8129e29697).** Co-authored-by: Sam Bull <git@sambull.org> --- .readthedocs.yml | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7e9c11160e5..1b66ee7c0e4 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -11,7 +11,7 @@ submodules: recursive: true build: - os: ubuntu-22.04 + os: ubuntu-24.04 tools: python: "3.11" diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fdf1e30f283..64ab607cf31 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -10,7 +10,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # -r requirements/runtime-deps.in aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in aioredis==2.0.1 # via -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 81b1c1f6988..e170cac365f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -10,7 +10,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # -r requirements/runtime-deps.in aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in aioredis==2.0.1 # via -r requirements/lint.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 57cfa253fe7..fae36c2f105 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/doc-spelling.txt --resolver=backtracking --strip-extras requirements/doc-spelling.in # -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index 49c8f3864ac..e696c59b1a6 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/doc.txt --resolver=backtracking --strip-extras requirements/doc.in # -aiohttp-theme==0.1.6 +aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx From 2272c2b952e87048922cb69ce6daa3a7340677c1 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 24 Sep 2024 19:40:11 +0100 Subject: [PATCH 0644/1511] Release v3.10.6 (#9288) --- CHANGES.rst | 169 +++++++++++++++++-------------------- CHANGES/4414.doc | 1 - CHANGES/4650.bugfix | 1 - CHANGES/5343.bugfix | 1 - CHANGES/6485.bugfix.rst | 1 - CHANGES/6494.bugfix.rst | 1 - CHANGES/6732.bugfix | 1 - CHANGES/6764.doc.rst | 1 - CHANGES/6807.bugfix.rst | 1 - CHANGES/7167.bugfix.rst | 1 - CHANGES/8564.feature.rst | 1 - CHANGES/8748.feature.rst | 1 - CHANGES/8768.bugfix.rst | 1 - CHANGES/8823.bugfix.rst | 1 - CHANGES/8845.bugfix.rst | 1 - CHANGES/8847.misc.rst | 1 - CHANGES/8858.bugfix.rst | 1 - CHANGES/8875.bugfix.rst | 1 - CHANGES/8876.bugfix.rst | 1 - CHANGES/8878.bugfix.rst | 1 - CHANGES/8898.bugfix.rst | 1 - CHANGES/8908.bugfix.rst | 1 - CHANGES/8929.bugfix.rst | 1 - CHANGES/8947.misc.rst | 1 - CHANGES/8967.bugfix.rst | 1 - CHANGES/8968.doc.rst | 1 - CHANGES/8990.bugfix.rst | 1 - CHANGES/8991.doc.rst | 1 - CHANGES/8992.bugfix.rst | 1 - CHANGES/8998.bugfix.rst | 1 - CHANGES/9004.packaging.rst | 1 - CHANGES/9018.bugfix.rst | 1 - CHANGES/9029.bugfix.rst | 1 - CHANGES/9030.bugfix.rst | 1 - CHANGES/9031.misc.rst | 1 - CHANGES/9032.bugfix.rst | 3 - CHANGES/9052.bugfix.rst | 1 - CHANGES/9054.misc.rst | 1 - CHANGES/9063.bugfix.rst | 1 - CHANGES/9068.misc.rst | 3 - CHANGES/9095.misc.rst | 1 - CHANGES/9108.bugfix.rst | 1 - CHANGES/9137.bugfix.rst | 2 - CHANGES/9140.bugfix.rst | 1 - CHANGES/9158.misc.rst | 3 - CHANGES/9160.bugfix | 1 - CHANGES/9167.bugfix.rst | 1 - CHANGES/9168.misc.rst | 1 - CHANGES/9169.misc.rst | 1 - CHANGES/9170.misc.rst | 1 - CHANGES/9172.misc.rst | 1 - CHANGES/9173.misc.rst | 1 - CHANGES/9174.misc.rst | 1 - CHANGES/9175.misc.rst | 1 - CHANGES/9200.breaking.rst | 3 - CHANGES/9203.misc.rst | 3 - CHANGES/9204.misc.rst | 1 - CHANGES/9239.bugfix.rst | 1 - CHANGES/9241.misc.rst | 1 - CHANGES/9267.breaking.rst | 1 - CHANGES/9267.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 62 files changed, 80 insertions(+), 162 deletions(-) delete mode 100644 CHANGES/4414.doc delete mode 100644 CHANGES/4650.bugfix delete mode 100644 CHANGES/5343.bugfix delete mode 100644 CHANGES/6485.bugfix.rst delete mode 100644 CHANGES/6494.bugfix.rst delete mode 100644 CHANGES/6732.bugfix delete mode 100644 CHANGES/6764.doc.rst delete mode 100644 CHANGES/6807.bugfix.rst delete mode 100644 CHANGES/7167.bugfix.rst delete mode 100644 CHANGES/8564.feature.rst delete mode 100644 CHANGES/8748.feature.rst delete mode 100644 CHANGES/8768.bugfix.rst delete mode 100644 CHANGES/8823.bugfix.rst delete mode 100644 CHANGES/8845.bugfix.rst delete mode 100644 CHANGES/8847.misc.rst delete mode 100644 CHANGES/8858.bugfix.rst delete mode 100644 CHANGES/8875.bugfix.rst delete mode 100644 CHANGES/8876.bugfix.rst delete mode 100644 CHANGES/8878.bugfix.rst delete mode 100644 CHANGES/8898.bugfix.rst delete mode 100644 CHANGES/8908.bugfix.rst delete mode 100644 CHANGES/8929.bugfix.rst delete mode 100644 CHANGES/8947.misc.rst delete mode 100644 CHANGES/8967.bugfix.rst delete mode 100644 CHANGES/8968.doc.rst delete mode 100644 CHANGES/8990.bugfix.rst delete mode 100644 CHANGES/8991.doc.rst delete mode 100644 CHANGES/8992.bugfix.rst delete mode 100644 CHANGES/8998.bugfix.rst delete mode 100644 CHANGES/9004.packaging.rst delete mode 100644 CHANGES/9018.bugfix.rst delete mode 100644 CHANGES/9029.bugfix.rst delete mode 100644 CHANGES/9030.bugfix.rst delete mode 100644 CHANGES/9031.misc.rst delete mode 100644 CHANGES/9032.bugfix.rst delete mode 100644 CHANGES/9052.bugfix.rst delete mode 100644 CHANGES/9054.misc.rst delete mode 100644 CHANGES/9063.bugfix.rst delete mode 100644 CHANGES/9068.misc.rst delete mode 100644 CHANGES/9095.misc.rst delete mode 100644 CHANGES/9108.bugfix.rst delete mode 100644 CHANGES/9137.bugfix.rst delete mode 100644 CHANGES/9140.bugfix.rst delete mode 100644 CHANGES/9158.misc.rst delete mode 100644 CHANGES/9160.bugfix delete mode 100644 CHANGES/9167.bugfix.rst delete mode 120000 CHANGES/9168.misc.rst delete mode 120000 CHANGES/9169.misc.rst delete mode 120000 CHANGES/9170.misc.rst delete mode 120000 CHANGES/9172.misc.rst delete mode 100644 CHANGES/9173.misc.rst delete mode 100644 CHANGES/9174.misc.rst delete mode 120000 CHANGES/9175.misc.rst delete mode 100644 CHANGES/9200.breaking.rst delete mode 100644 CHANGES/9203.misc.rst delete mode 100644 CHANGES/9204.misc.rst delete mode 100644 CHANGES/9239.bugfix.rst delete mode 120000 CHANGES/9241.misc.rst delete mode 100644 CHANGES/9267.breaking.rst delete mode 120000 CHANGES/9267.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index cff8a8a0e2e..f6010b9840a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,100 +10,90 @@ .. towncrier release notes start -3.10.6rc2 (2024-09-23) -====================== +3.10.6 (2024-09-24) +=================== -No significant changes. +Bug fixes +--------- +- Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` + will now throw this -- by :user:`Dreamsorcerer`. ----- + *Related issues and pull requests on GitHub:* + :issue:`9137`. -3.10.6rc1 (2024-09-22) -====================== -Removals and backward incompatible breaking changes ---------------------------------------------------- -- Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. +- Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9267`. - - - - ----- + :issue:`8875`. -3.10.6rc0 (2024-09-22) -====================== -Bug fixes ---------- - -- Implemented binding to IPv6 addresses in the pytest server fixture. +- Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`4650`. + :issue:`8967`. -- Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. +- Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. *Related issues and pull requests on GitHub:* - :issue:`5343`. + :issue:`6732`. -- Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. +- Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`6485`. + :issue:`8898`, :issue:`9267`. -- Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` +- Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`6494`. + :issue:`8998`. -- Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. +- Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`6732`. + :issue:`6485`. -- Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. +- Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`6807`. + :issue:`9108`. -- Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. +- Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`7167`. + :issue:`8992`. -- Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamorcerer`. +- Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`8768`. + :issue:`9140`. @@ -115,11 +105,11 @@ Bug fixes -- Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. +- Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`8845`. + :issue:`8876`. @@ -131,158 +121,141 @@ Bug fixes -- Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. - - - *Related issues and pull requests on GitHub:* - :issue:`8875`. - - - -- Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. +- Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` *Related issues and pull requests on GitHub:* - :issue:`8876`. + :issue:`6494`. -- Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. +- Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`8878`. + :issue:`6807`. -- Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. +- Implemented binding to IPv6 addresses in the pytest server fixture. *Related issues and pull requests on GitHub:* - :issue:`8898`. - - - -- Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. + :issue:`4650`. - *Related issues and pull requests on GitHub:* - :issue:`8908`. - +- Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` -- Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. + Link-Local IPv6 addresses can now be handled by the Resolver correctly. *Related issues and pull requests on GitHub:* - :issue:`8929`. + :issue:`9032`. -- Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. +- Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. *Related issues and pull requests on GitHub:* - :issue:`8967`. + :issue:`5343`. -- Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. +- Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. *Related issues and pull requests on GitHub:* - :issue:`8990`. + :issue:`7167`. -- Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. +- Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`8992`. + :issue:`8768`. -- Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. +- Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`8998`. + :issue:`8845`. -- Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. +- Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9018`. + :issue:`8878`. -- Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. +- Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9029`. + :issue:`8908`. -- Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. +- Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9030`. - + :issue:`8990`. -- Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` - Link-Local IPv6 addresses can now be handled by the Resolver correctly. +- Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9032`. + :issue:`8929`. -- Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. +- Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9052`. + :issue:`9018`. -- Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. +- Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. *Related issues and pull requests on GitHub:* - :issue:`9063`. + :issue:`9029`. -- Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. +- Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9108`. + :issue:`9030`. -- Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` - will now throw this -- by :user:`Dreamsorcerer`. +- Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9137`. + :issue:`9052`. -- Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamosorcerer`. +- Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. *Related issues and pull requests on GitHub:* - :issue:`9140`. + :issue:`9063`. @@ -322,6 +295,14 @@ Features +- Declared Python 3.13 supported -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8748`. + + + Removals and backward incompatible breaking changes --------------------------------------------------- @@ -336,6 +317,14 @@ Removals and backward incompatible breaking changes +- Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9267`. + + + Improved documentation ---------------------- diff --git a/CHANGES/4414.doc b/CHANGES/4414.doc deleted file mode 100644 index b4be46afee8..00000000000 --- a/CHANGES/4414.doc +++ /dev/null @@ -1 +0,0 @@ -Clarified that ``GracefulExit`` needs to be handled in ``AppRunner`` and ``ServerRunner`` when using ``handle_signals=True``. -- by :user:`Daste745` diff --git a/CHANGES/4650.bugfix b/CHANGES/4650.bugfix deleted file mode 100644 index e3e17b00ae8..00000000000 --- a/CHANGES/4650.bugfix +++ /dev/null @@ -1 +0,0 @@ -Implemented binding to IPv6 addresses in the pytest server fixture. diff --git a/CHANGES/5343.bugfix b/CHANGES/5343.bugfix deleted file mode 100644 index 4e33071ea94..00000000000 --- a/CHANGES/5343.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. diff --git a/CHANGES/6485.bugfix.rst b/CHANGES/6485.bugfix.rst deleted file mode 100644 index b1d912f1579..00000000000 --- a/CHANGES/6485.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/6494.bugfix.rst b/CHANGES/6494.bugfix.rst deleted file mode 100644 index 3827644f0d1..00000000000 --- a/CHANGES/6494.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` diff --git a/CHANGES/6732.bugfix b/CHANGES/6732.bugfix deleted file mode 100644 index a460d7cd695..00000000000 --- a/CHANGES/6732.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. diff --git a/CHANGES/6764.doc.rst b/CHANGES/6764.doc.rst deleted file mode 100644 index dea2019fc76..00000000000 --- a/CHANGES/6764.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Clarified that auth parameter in ClientSession will persist and be included with any request to any origin, even during redirects to different origins. -- by :user:`MaximZemskov`. diff --git a/CHANGES/6807.bugfix.rst b/CHANGES/6807.bugfix.rst deleted file mode 100644 index 4eb07b9e0da..00000000000 --- a/CHANGES/6807.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/7167.bugfix.rst b/CHANGES/7167.bugfix.rst deleted file mode 100644 index 766f1438b66..00000000000 --- a/CHANGES/7167.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. diff --git a/CHANGES/8564.feature.rst b/CHANGES/8564.feature.rst deleted file mode 100644 index 1eac9d12217..00000000000 --- a/CHANGES/8564.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improved type on ``params`` to match the underlying type allowed by ``yarl`` -- by :user:`lpetre`. diff --git a/CHANGES/8748.feature.rst b/CHANGES/8748.feature.rst deleted file mode 100644 index 7794d16e4dc..00000000000 --- a/CHANGES/8748.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Declared Python 3.13 supported -- by :user:`bdraco`. diff --git a/CHANGES/8768.bugfix.rst b/CHANGES/8768.bugfix.rst deleted file mode 100644 index 18512163572..00000000000 --- a/CHANGES/8768.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamorcerer`. diff --git a/CHANGES/8823.bugfix.rst b/CHANGES/8823.bugfix.rst deleted file mode 100644 index ea18e65fd4a..00000000000 --- a/CHANGES/8823.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed Python parser chunked handling with multiple Transfer-Encoding values -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8845.bugfix.rst b/CHANGES/8845.bugfix.rst deleted file mode 100644 index c37a0095ed3..00000000000 --- a/CHANGES/8845.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Changed behavior when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8847.misc.rst b/CHANGES/8847.misc.rst deleted file mode 100644 index 58f61d48420..00000000000 --- a/CHANGES/8847.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/CHANGES/8858.bugfix.rst b/CHANGES/8858.bugfix.rst deleted file mode 100644 index e4efa91a2fd..00000000000 --- a/CHANGES/8858.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Stopped adding a default Content-Type header when response has no content -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8875.bugfix.rst b/CHANGES/8875.bugfix.rst deleted file mode 100644 index fa33df05ae2..00000000000 --- a/CHANGES/8875.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8876.bugfix.rst b/CHANGES/8876.bugfix.rst deleted file mode 100644 index 539eeb4c7d3..00000000000 --- a/CHANGES/8876.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8878.bugfix.rst b/CHANGES/8878.bugfix.rst deleted file mode 100644 index df53dea3c35..00000000000 --- a/CHANGES/8878.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8898.bugfix.rst b/CHANGES/8898.bugfix.rst deleted file mode 100644 index 0de6646c8cb..00000000000 --- a/CHANGES/8898.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8908.bugfix.rst b/CHANGES/8908.bugfix.rst deleted file mode 100644 index 0eb450431db..00000000000 --- a/CHANGES/8908.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8929.bugfix.rst b/CHANGES/8929.bugfix.rst deleted file mode 100644 index 229d5abd0e7..00000000000 --- a/CHANGES/8929.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8947.misc.rst b/CHANGES/8947.misc.rst deleted file mode 100644 index 277ba915c50..00000000000 --- a/CHANGES/8947.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Exported ``aiohttp.TraceRequestHeadersSentParams`` -- by :user:`Hadock-is-ok`. diff --git a/CHANGES/8967.bugfix.rst b/CHANGES/8967.bugfix.rst deleted file mode 100644 index 1046f36bd8b..00000000000 --- a/CHANGES/8967.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8968.doc.rst b/CHANGES/8968.doc.rst deleted file mode 100644 index 3420794586f..00000000000 --- a/CHANGES/8968.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Clarified which timeout exceptions happen on which timeouts -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8990.bugfix.rst b/CHANGES/8990.bugfix.rst deleted file mode 100644 index 9a9783103fd..00000000000 --- a/CHANGES/8990.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8991.doc.rst b/CHANGES/8991.doc.rst deleted file mode 100644 index c29850c4f3c..00000000000 --- a/CHANGES/8991.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Updated ``ClientSession`` parameters to match current code -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8992.bugfix.rst b/CHANGES/8992.bugfix.rst deleted file mode 100644 index bc41d5feb81..00000000000 --- a/CHANGES/8992.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8998.bugfix.rst b/CHANGES/8998.bugfix.rst deleted file mode 100644 index 1b6b189e7ea..00000000000 --- a/CHANGES/8998.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an error when trying to add a route for multiple methods with a path containing a regex pattern -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9004.packaging.rst b/CHANGES/9004.packaging.rst deleted file mode 100644 index f6b0f8ff2a3..00000000000 --- a/CHANGES/9004.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``test_client_session_timeout_zero`` to not require internet access -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9018.bugfix.rst b/CHANGES/9018.bugfix.rst deleted file mode 100644 index 2de6d142900..00000000000 --- a/CHANGES/9018.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Updated Python parser to reject messages after a close message, matching C parser behaviour -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9029.bugfix.rst b/CHANGES/9029.bugfix.rst deleted file mode 100644 index 7ca956e3832..00000000000 --- a/CHANGES/9029.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed creation of ``SSLContext`` inside of :py:class:`aiohttp.TCPConnector` with multiple event loops in different threads -- by :user:`bdraco`. diff --git a/CHANGES/9030.bugfix.rst b/CHANGES/9030.bugfix.rst deleted file mode 100644 index 2e9d48f5359..00000000000 --- a/CHANGES/9030.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed (on Python 3.11+) some edge cases where a task cancellation may get incorrectly suppressed -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9031.misc.rst b/CHANGES/9031.misc.rst deleted file mode 100644 index 1874a4deddd..00000000000 --- a/CHANGES/9031.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Avoided tracing overhead in the http writer when there are no active traces -- by user:`bdraco`. diff --git a/CHANGES/9032.bugfix.rst b/CHANGES/9032.bugfix.rst deleted file mode 100644 index 8c8d81f6319..00000000000 --- a/CHANGES/9032.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed the incorrect use of flags for ``getnameinfo()`` in the Resolver --by :user:`GitNMLee` - -Link-Local IPv6 addresses can now be handled by the Resolver correctly. diff --git a/CHANGES/9052.bugfix.rst b/CHANGES/9052.bugfix.rst deleted file mode 100644 index 913288d3368..00000000000 --- a/CHANGES/9052.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed exception information getting lost on ``HttpProcessingError`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9054.misc.rst b/CHANGES/9054.misc.rst deleted file mode 100644 index ddc71f453e5..00000000000 --- a/CHANGES/9054.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of reify Cython implementation -- by :user:`bdraco`. diff --git a/CHANGES/9063.bugfix.rst b/CHANGES/9063.bugfix.rst deleted file mode 100644 index e512677b9c8..00000000000 --- a/CHANGES/9063.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``If-None-Match`` not using weak comparison -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9068.misc.rst b/CHANGES/9068.misc.rst deleted file mode 100644 index 7ce5ec5c839..00000000000 --- a/CHANGES/9068.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Use :meth:`URL.extend_query() <yarl.URL.extend_query>` to extend query params (requires yarl 1.11.0+) -- by :user:`bdraco`. - -If yarl is older than 1.11.0, the previous slower hand rolled version will be used. diff --git a/CHANGES/9095.misc.rst b/CHANGES/9095.misc.rst deleted file mode 100644 index f4a06cb09d6..00000000000 --- a/CHANGES/9095.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of checking if a host is an IP Address -- by :user:`bdraco`. diff --git a/CHANGES/9108.bugfix.rst b/CHANGES/9108.bugfix.rst deleted file mode 100644 index 8be000575e8..00000000000 --- a/CHANGES/9108.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed compressed requests failing when no body was provided -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9137.bugfix.rst b/CHANGES/9137.bugfix.rst deleted file mode 100644 index d99802095bd..00000000000 --- a/CHANGES/9137.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added :exc:`aiohttp.ClientConnectionResetError`. Client code that previously threw :exc:`ConnectionResetError` -will now throw this -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9140.bugfix.rst b/CHANGES/9140.bugfix.rst deleted file mode 100644 index c9b8f7bf4ea..00000000000 --- a/CHANGES/9140.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed race condition that could cause server to close connection incorrectly at keepalive timeout -- by :user:`Dreamosorcerer`. diff --git a/CHANGES/9158.misc.rst b/CHANGES/9158.misc.rst deleted file mode 100644 index 8d87623c056..00000000000 --- a/CHANGES/9158.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Significantly improved performance of middlewares -- by :user:`bdraco`. - -The construction of the middleware wrappers is now cached and is built once per handler instead of on every request. diff --git a/CHANGES/9160.bugfix b/CHANGES/9160.bugfix deleted file mode 100644 index 253cfd07d50..00000000000 --- a/CHANGES/9160.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed badly encoded charset crashing when getting response text instead of falling back to charset detector. diff --git a/CHANGES/9167.bugfix.rst b/CHANGES/9167.bugfix.rst deleted file mode 100644 index 4c33c8ad355..00000000000 --- a/CHANGES/9167.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Rejected `\n` in `reason` values to avoid sending broken HTTP messages -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9168.misc.rst b/CHANGES/9168.misc.rst deleted file mode 120000 index d6a2f2aaaab..00000000000 --- a/CHANGES/9168.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9174.misc.rst \ No newline at end of file diff --git a/CHANGES/9169.misc.rst b/CHANGES/9169.misc.rst deleted file mode 120000 index d6a2f2aaaab..00000000000 --- a/CHANGES/9169.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9174.misc.rst \ No newline at end of file diff --git a/CHANGES/9170.misc.rst b/CHANGES/9170.misc.rst deleted file mode 120000 index e41cbad0125..00000000000 --- a/CHANGES/9170.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9158.misc.rst \ No newline at end of file diff --git a/CHANGES/9172.misc.rst b/CHANGES/9172.misc.rst deleted file mode 120000 index d6a2f2aaaab..00000000000 --- a/CHANGES/9172.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9174.misc.rst \ No newline at end of file diff --git a/CHANGES/9173.misc.rst b/CHANGES/9173.misc.rst deleted file mode 100644 index 6fcc098747f..00000000000 --- a/CHANGES/9173.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of starting web requests when there is no response prepare hook -- by :user:`bdraco`. diff --git a/CHANGES/9174.misc.rst b/CHANGES/9174.misc.rst deleted file mode 100644 index 13dc00ec1de..00000000000 --- a/CHANGES/9174.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of web requests -- by :user:`bdraco`. diff --git a/CHANGES/9175.misc.rst b/CHANGES/9175.misc.rst deleted file mode 120000 index d6a2f2aaaab..00000000000 --- a/CHANGES/9175.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9174.misc.rst \ No newline at end of file diff --git a/CHANGES/9200.breaking.rst b/CHANGES/9200.breaking.rst deleted file mode 100644 index 0282e165c41..00000000000 --- a/CHANGES/9200.breaking.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved middleware performance -- by :user:`bdraco`. - -The ``set_current_app`` method was removed from ``UrlMappingMatchInfo`` because it is no longer used, and it was unlikely external caller would ever use it. diff --git a/CHANGES/9203.misc.rst b/CHANGES/9203.misc.rst deleted file mode 100644 index 766fdc01a57..00000000000 --- a/CHANGES/9203.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Significantly improved performance of expiring cookies -- by :user:`bdraco`. - -Expiring cookies has been redesigned to use :mod:`heapq` instead of a linear search, to better scale. diff --git a/CHANGES/9204.misc.rst b/CHANGES/9204.misc.rst deleted file mode 100644 index 9f3196fa5be..00000000000 --- a/CHANGES/9204.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Significantly sped up filtering cookies -- by :user:`bdraco`. diff --git a/CHANGES/9239.bugfix.rst b/CHANGES/9239.bugfix.rst deleted file mode 100644 index 95b229742ce..00000000000 --- a/CHANGES/9239.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Changed :py:meth:`ClientResponse.raise_for_status() <aiohttp.ClientResponse.raise_for_status>` to only release the connection when invoked outside an ``async with`` context -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9241.misc.rst b/CHANGES/9241.misc.rst deleted file mode 120000 index d6a2f2aaaab..00000000000 --- a/CHANGES/9241.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9174.misc.rst \ No newline at end of file diff --git a/CHANGES/9267.breaking.rst b/CHANGES/9267.breaking.rst deleted file mode 100644 index 82fec1d21b4..00000000000 --- a/CHANGES/9267.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. diff --git a/CHANGES/9267.bugfix.rst b/CHANGES/9267.bugfix.rst deleted file mode 120000 index 2a85c7ec63c..00000000000 --- a/CHANGES/9267.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -8898.bugfix.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 02fde739bdb..8830d340940 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.6rc2" +__version__ = "3.10.6" from typing import TYPE_CHECKING, Tuple From 2a92a5cd6b5c390a479c167d0079e153fae9ff12 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 24 Sep 2024 21:45:21 +0100 Subject: [PATCH 0645/1511] Bump version --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 8830d340940..63367052646 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.6" +__version__ = "3.10.6.dev0" from typing import TYPE_CHECKING, Tuple From 0b82655f873c7c3883dc66aa3bd5ead0636a2a46 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 24 Sep 2024 16:02:39 -0500 Subject: [PATCH 0646/1511] [PR #9083/a6dd415 backport][3.10] Remove unused backwards compatibility code for old yarl versions (#9289) --- aiohttp/client_reqrep.py | 11 ++--------- aiohttp/typedefs.py | 13 +------------ tests/test_client_functional.py | 26 +++++++------------------- 3 files changed, 10 insertions(+), 40 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index aa8f54e67b8..aeabf0161b8 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL, __version__ as yarl_version +from yarl import URL from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -89,7 +89,6 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -_YARL_SUPPORTS_EXTEND_QUERY = tuple(map(int, yarl_version.split(".")[:2])) >= (1, 11) json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") @@ -303,13 +302,7 @@ def __init__( # assert session is not None self._session = cast("ClientSession", session) if params: - if _YARL_SUPPORTS_EXTEND_QUERY: - url = url.extend_query(params) - else: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) + url = url.extend_query(params) self.original_url = url self.url = url.with_fragment(None) self.method = method.upper() diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 2e285fa2561..cc8c0825b4e 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -8,23 +8,12 @@ Iterable, Mapping, Protocol, - Sequence, Tuple, Union, ) from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr -from yarl import URL - -try: - # Available in yarl>=1.10.0 - from yarl import Query as _Query -except ImportError: # pragma: no cover - SimpleQuery = Union[str, int, float] # pragma: no cover - QueryVariable = Union[SimpleQuery, "Sequence[SimpleQuery]"] # pragma: no cover - _Query = Union[ # type: ignore[misc] # pragma: no cover - None, str, "Mapping[str, QueryVariable]", "Sequence[Tuple[str, QueryVariable]]" - ] +from yarl import URL, Query as _Query Query = _Query diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 60af4930f14..f1b9c89ff97 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -20,7 +20,7 @@ from yarl import URL import aiohttp -from aiohttp import Fingerprint, ServerFingerprintMismatch, client_reqrep, hdrs, web +from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import ( ClientResponseError, @@ -676,10 +676,7 @@ async def handler(request): assert 200 == resp.status -@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) -async def test_params_and_query_string( - aiohttp_client: AiohttpClient, yarl_supports_extend_query: bool -) -> None: +async def test_params_and_query_string(aiohttp_client: AiohttpClient) -> None: """Test combining params with an existing query_string.""" async def handler(request: web.Request) -> web.Response: @@ -690,18 +687,13 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - # Ensure the old path is tested for old yarl versions - with mock.patch.object( - client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query - ): - async with client.get("/?q=abc", params="q=test&d=dog") as resp: - assert resp.status == 200 + async with client.get("/?q=abc", params="q=test&d=dog") as resp: + assert resp.status == 200 @pytest.mark.parametrize("params", [None, "", {}, MultiDict()]) -@pytest.mark.parametrize("yarl_supports_extend_query", [True, False]) async def test_empty_params_and_query_string( - aiohttp_client: AiohttpClient, params: Any, yarl_supports_extend_query: bool + aiohttp_client: AiohttpClient, params: Any ) -> None: """Test combining empty params with an existing query_string.""" @@ -713,12 +705,8 @@ async def handler(request: web.Request) -> web.Response: app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - # Ensure the old path is tested for old yarl versions - with mock.patch.object( - client_reqrep, "_YARL_SUPPORTS_EXTEND_QUERY", yarl_supports_extend_query - ): - async with client.get("/?q=abc", params=params) as resp: - assert resp.status == 200 + async with client.get("/?q=abc", params=params) as resp: + assert resp.status == 200 async def test_drop_params_on_redirect(aiohttp_client: AiohttpClient) -> None: From e6bcfbe302806558d3d6d08e411a1f9f3187dcac Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 21:18:12 +0000 Subject: [PATCH 0647/1511] [PR #9171/0462ae6b backport][3.10] Switch to using `yarl.URL.absolute` over `yarl.URL.is_absolute()` (#9291) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9171.misc.rst | 3 +++ aiohttp/client.py | 2 +- aiohttp/test_utils.py | 2 +- aiohttp/web_request.py | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9171.misc.rst diff --git a/CHANGES/9171.misc.rst b/CHANGES/9171.misc.rst new file mode 100644 index 00000000000..c6742edd891 --- /dev/null +++ b/CHANGES/9171.misc.rst @@ -0,0 +1,3 @@ +Improved performance of determining if a URL is absolute -- by :user:`bdraco`. + +The property :attr:`~yarl.URL.absolute` is more performant than the method ``URL.is_absolute()`` and preferred when newer versions of yarl are used. diff --git a/aiohttp/client.py b/aiohttp/client.py index da89ee2a790..e50d216cf5a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -444,7 +444,7 @@ def _build_url(self, str_or_url: StrOrURL) -> URL: if self._base_url is None: return url else: - assert not url.is_absolute() and url.path.startswith("/") + assert not url.absolute and url.path.startswith("/") return self._base_url.join(url) async def _request( diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 01496b6711a..850efcb8b65 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -148,7 +148,7 @@ def make_url(self, path: StrOrURL) -> URL: assert self._root is not None url = URL(path) if not self.skip_url_asserts: - assert not url.is_absolute() + assert not url.absolute return self._root.join(url) else: return URL(str(self._root) + str(path)) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index eca71e4413a..91fc1f42bfe 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -174,7 +174,7 @@ def __init__( self._version = message.version self._cache: Dict[str, Any] = {} url = message.url - if url.is_absolute(): + if url.absolute: if scheme is not None: url = url.with_scheme(scheme) if host is not None: From 4026191b880471d4af723b5c8e576b46f8723b6a Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 25 Sep 2024 16:27:39 +0100 Subject: [PATCH 0648/1511] Increase default keepalive_timeout server-side. (#9285) (#9293) (cherry picked from commit de997af2069781a7d50639fe59ad9ac8e3e9847e) --- CHANGES/9285.misc.rst | 1 + aiohttp/web_protocol.py | 3 ++- docs/web_reference.rst | 12 ++++++++++-- 3 files changed, 13 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9285.misc.rst diff --git a/CHANGES/9285.misc.rst b/CHANGES/9285.misc.rst new file mode 100644 index 00000000000..78c8a773cf0 --- /dev/null +++ b/CHANGES/9285.misc.rst @@ -0,0 +1 @@ +Changed web ``keepalive_timeout`` default to around an hour in order to reduce race conditions on reverse proxies -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 85eb70d5a0b..f5e4a0c5b68 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -173,7 +173,8 @@ def __init__( manager: "Server", *, loop: asyncio.AbstractEventLoop, - keepalive_timeout: float = 75.0, # NGINX default is 75 secs + # Default should be high enough that it's likely longer than a reverse proxy. + keepalive_timeout: float = 3630, tcp_keepalive: bool = True, logger: Logger = server_logger, access_log_class: Type[AbstractAccessLogger] = AccessLogger, diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 931210894c8..faf68cb9c43 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -2750,7 +2750,9 @@ application on specific TCP or Unix socket, e.g.:: :param bool tcp_keepalive: Enable TCP Keep-Alive. Default: ``True``. :param int keepalive_timeout: Number of seconds before closing Keep-Alive - connection. Default: ``75`` seconds (NGINX's default value). + connection. Default: ``3630`` seconds (when deployed behind a reverse proxy + it's important for this value to be higher than the proxy's timeout. To avoid + race conditions we always want the proxy to close the connection). :param logger: Custom logger object. Default: :data:`aiohttp.log.server_logger`. :param access_log: Custom logging object. Default: @@ -2985,7 +2987,7 @@ Utilities .. function:: run_app(app, *, host=None, port=None, path=None, \ sock=None, shutdown_timeout=60.0, \ - keepalive_timeout=75.0, \ + keepalive_timeout=3630, \ ssl_context=None, print=print, backlog=128, \ access_log_class=aiohttp.helpers.AccessLogger, \ access_log_format=aiohttp.helpers.AccessLogger.LOG_FORMAT, \ @@ -3052,6 +3054,12 @@ Utilities closed after a HTTP request. The delay allows for reuse of a TCP connection. + When deployed behind a reverse proxy + it's important for this value to be + higher than the proxy's timeout. To avoid + race conditions, we always want the proxy + to handle connection closing. + .. versionadded:: 3.8 :param ssl_context: :class:`ssl.SSLContext` for HTTPS server, From 552dea531d06d9388ce7e110a52960c515228b16 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 25 Sep 2024 19:31:26 +0100 Subject: [PATCH 0649/1511] Backport type fix from #9226 (#9294) --- aiohttp/web_ws.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 382223097ea..bf35f3bb1f6 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -379,14 +379,14 @@ async def pong(self, message: bytes = b"") -> None: raise RuntimeError("Call .prepare() first") await self._writer.pong(message) - async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + async def send_str(self, data: str, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, str): raise TypeError("data argument must be str (%r)" % type(data)) await self._writer.send(data, binary=False, compress=compress) - async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, (bytes, bytearray, memoryview)): @@ -396,7 +396,7 @@ async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None async def send_json( self, data: Any, - compress: Optional[bool] = None, + compress: Optional[int] = None, *, dumps: JSONEncoder = json.dumps, ) -> None: From d6d2bcc2ba6dc2d8933b89365dfbbfc22f259ca9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 19:38:22 +0100 Subject: [PATCH 0650/1511] [PR #9294/552dea53 backport][3.10] Backport type fix from #9226 (#9299) **This is a backport of PR #9294 as merged into 3.11 (552dea531d06d9388ce7e110a52960c515228b16).** Co-authored-by: Sam Bull <git@sambull.org> --- aiohttp/web_ws.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 382223097ea..bf35f3bb1f6 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -379,14 +379,14 @@ async def pong(self, message: bytes = b"") -> None: raise RuntimeError("Call .prepare() first") await self._writer.pong(message) - async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + async def send_str(self, data: str, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, str): raise TypeError("data argument must be str (%r)" % type(data)) await self._writer.send(data, binary=False, compress=compress) - async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, (bytes, bytearray, memoryview)): @@ -396,7 +396,7 @@ async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None async def send_json( self, data: Any, - compress: Optional[bool] = None, + compress: Optional[int] = None, *, dumps: JSONEncoder = json.dumps, ) -> None: From 31b14d3824f785ddfb000930ff288400d4d4dbe0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 27 Sep 2024 02:14:58 +0000 Subject: [PATCH 0651/1511] [PR #9302/0b8be7f6 backport][3.11] Bump yarl to 1.13.0 (#9303) Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index c4c189c3e4e..89e28622522 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,5 +40,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.12.1 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 64ab607cf31..ae130c09032 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -287,7 +287,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.12.1 +yarl==1.13.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index e170cac365f..80b486f0762 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.12.1 +yarl==1.13.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 9a28aaf8c00..14b6c85f48d 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -34,5 +34,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.12.1 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 2280e990965..7137147708e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -137,5 +137,5 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.12.1 +yarl==1.13.0 # via -r requirements/runtime-deps.in From fd5ece6dfd5f68d02144280813aa44005431e271 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 26 Sep 2024 21:23:09 -0500 Subject: [PATCH 0652/1511] Bump yarl to 1.13.0 (#9302) (#9304) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 6359f4d60c7..e089d2bd2c1 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ pycparser==2.21 # via cffi uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.12.0 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b9a6c80e86f..30356d04664 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -274,7 +274,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.12.0 +yarl==1.13.0 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 92ec10aa4c1..8aeea4ee844 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -261,7 +261,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.12.0 +yarl==1.13.0 # via -r requirements/runtime-deps.in zipp==3.17.0 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index b7a3828955e..a70b633b88c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,5 +32,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.12.0 +yarl==1.13.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 9900af5bd19..8be1e493f2d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -125,5 +125,5 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.12.0 +yarl==1.13.0 # via -r requirements/runtime-deps.in From 1ffe9faa912cca8e0a6d07dcb97d72bdb5cf5e0c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 27 Sep 2024 11:19:42 -0500 Subject: [PATCH 0653/1511] [PR #9305/9b33c814 backport][3.11] Increase minimum yarl requirement to 1.13.0 (#9313) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9305.breaking.rst | 1 + requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9305.breaking.rst diff --git a/CHANGES/9305.breaking.rst b/CHANGES/9305.breaking.rst new file mode 100644 index 00000000000..82fec1d21b4 --- /dev/null +++ b/CHANGES/9305.breaking.rst @@ -0,0 +1 @@ +Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 9a199453d55..4b8ab98dd08 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -9,4 +9,4 @@ Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 -yarl >= 1.12.0, < 2.0 +yarl >= 1.13.0, < 2.0 diff --git a/setup.cfg b/setup.cfg index 8168dac4408..781fc4ca40f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,7 +55,7 @@ install_requires = attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 - yarl >= 1.12.0, < 2.0 + yarl >= 1.13.0, < 2.0 [options.exclude_package_data] * = From d32d5805e39fa8b75f7a688788de5b73b6ce0077 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 27 Sep 2024 12:41:34 -0500 Subject: [PATCH 0654/1511] [PR #9301/c240b52 backport][3.10] Replace code that can now be handled by yarl (#9314) --- CHANGES/9301.misc.rst | 1 + aiohttp/client_reqrep.py | 32 +++++++++++++++++++++----------- tests/test_client_request.py | 15 +++++++++++---- tests/test_proxy_functional.py | 24 ++++++++++++++++-------- 4 files changed, 49 insertions(+), 23 deletions(-) create mode 100644 CHANGES/9301.misc.rst diff --git a/CHANGES/9301.misc.rst b/CHANGES/9301.misc.rst new file mode 100644 index 00000000000..a751bdfc6dc --- /dev/null +++ b/CHANGES/9301.misc.rst @@ -0,0 +1 @@ +Replaced code that can now be handled by ``yarl`` -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index aeabf0161b8..10682f57885 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL +from yarl import URL, __version__ as yarl_version from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -90,6 +90,10 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") +_YARL_SUPPORTS_HOST_SUBCOMPONENT = tuple(map(int, yarl_version.split(".")[:2])) >= ( + 1, + 13, +) def _gen_default_accept_encoding() -> str: @@ -429,9 +433,13 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: self.headers: CIMultiDict[str] = CIMultiDict() # add host - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" + if _YARL_SUPPORTS_HOST_SUBCOMPONENT: + netloc = self.url.host_subcomponent + assert netloc is not None + else: + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = f"[{netloc}]" # See https://github.com/aio-libs/aiohttp/issues/3636. netloc = netloc.rstrip(".") if self.url.port is not None and not self.url.is_default_port(): @@ -676,17 +684,19 @@ async def send(self, conn: "Connection") -> "ClientResponse": # - not CONNECT proxy must send absolute form URI # - most common is origin form URI if self.method == hdrs.METH_CONNECT: - connect_host = self.url.raw_host - assert connect_host is not None - if helpers.is_ipv6_address(connect_host): - connect_host = f"[{connect_host}]" + if _YARL_SUPPORTS_HOST_SUBCOMPONENT: + connect_host = self.url.host_subcomponent + assert connect_host is not None + else: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" path = f"{connect_host}:{self.url.port}" elif self.proxy and not self.is_ssl(): path = str(self.url) else: - path = self.url.raw_path - if self.url.raw_query_string: - path += "?" + self.url.raw_query_string + path = self.url.raw_path_qs protocol = conn.protocol assert protocol is not None diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f2eff019504..7853b541fc9 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,7 +14,7 @@ from yarl import URL import aiohttp -from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp import BaseConnector, client_reqrep, hdrs, helpers, payload from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, @@ -280,9 +280,16 @@ def test_host_header_ipv4(make_request) -> None: assert req.headers["HOST"] == "127.0.0.2" -def test_host_header_ipv6(make_request) -> None: - req = make_request("get", "http://[::2]") - assert req.headers["HOST"] == "[::2]" +@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) +def test_host_header_ipv6(make_request, yarl_supports_host_subcomponent: bool) -> None: + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, + "_YARL_SUPPORTS_HOST_SUBCOMPONENT", + yarl_supports_host_subcomponent, + ): + req = make_request("get", "http://[::2]") + assert req.headers["HOST"] == "[::2]" def test_host_header_ipv4_with_port(make_request) -> None: diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index c15ca326288..4b11d11e3a7 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -12,7 +12,7 @@ from yarl import URL import aiohttp -from aiohttp import web +from aiohttp import client_reqrep, web from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS @@ -95,6 +95,7 @@ async def handler(*args, **kwargs): reason="asyncio on this python does not support TLS in TLS", ) @pytest.mark.parametrize("web_server_endpoint_type", ("http", "https")) +@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) @pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") # Filter out the warning from # https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 @@ -104,18 +105,25 @@ async def test_secure_https_proxy_absolute_path( secure_proxy_url: URL, web_server_endpoint_url: str, web_server_endpoint_payload: str, + yarl_supports_host_subcomponent: bool, ) -> None: """Ensure HTTP(S) sites are accessible through a secure proxy.""" conn = aiohttp.TCPConnector() sess = aiohttp.ClientSession(connector=conn) - async with sess.get( - web_server_endpoint_url, - proxy=secure_proxy_url, - ssl=client_ssl_ctx, # used for both proxy and endpoint connections - ) as response: - assert response.status == 200 - assert await response.text() == web_server_endpoint_payload + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, + "_YARL_SUPPORTS_HOST_SUBCOMPONENT", + yarl_supports_host_subcomponent, + ): + async with sess.get( + web_server_endpoint_url, + proxy=secure_proxy_url, + ssl=client_ssl_ctx, # used for both proxy and endpoint connections + ) as response: + assert response.status == 200 + assert await response.text() == web_server_endpoint_payload await sess.close() await conn.close() From 0604da9f1d74a1f81d681efed10f89cf906dd9b0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 27 Sep 2024 12:50:08 -0500 Subject: [PATCH 0655/1511] [PR #9301/c240b52 backport][3.11] Replace code that can now be handled by yarl (#9315) --- CHANGES/9301.misc.rst | 1 + aiohttp/client_reqrep.py | 32 +++++++++++++++++++++----------- tests/test_client_request.py | 15 +++++++++++---- tests/test_proxy_functional.py | 24 ++++++++++++++++-------- 4 files changed, 49 insertions(+), 23 deletions(-) create mode 100644 CHANGES/9301.misc.rst diff --git a/CHANGES/9301.misc.rst b/CHANGES/9301.misc.rst new file mode 100644 index 00000000000..a751bdfc6dc --- /dev/null +++ b/CHANGES/9301.misc.rst @@ -0,0 +1 @@ +Replaced code that can now be handled by ``yarl`` -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 627966dbca6..01b7b3002d9 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL +from yarl import URL, __version__ as yarl_version from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -90,6 +90,10 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") +_YARL_SUPPORTS_HOST_SUBCOMPONENT = tuple(map(int, yarl_version.split(".")[:2])) >= ( + 1, + 13, +) def _gen_default_accept_encoding() -> str: @@ -427,9 +431,13 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: self.headers: CIMultiDict[str] = CIMultiDict() # add host - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" + if _YARL_SUPPORTS_HOST_SUBCOMPONENT: + netloc = self.url.host_subcomponent + assert netloc is not None + else: + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = f"[{netloc}]" # See https://github.com/aio-libs/aiohttp/issues/3636. netloc = netloc.rstrip(".") if self.url.port is not None and not self.url.is_default_port(): @@ -674,17 +682,19 @@ async def send(self, conn: "Connection") -> "ClientResponse": # - not CONNECT proxy must send absolute form URI # - most common is origin form URI if self.method == hdrs.METH_CONNECT: - connect_host = self.url.raw_host - assert connect_host is not None - if helpers.is_ipv6_address(connect_host): - connect_host = f"[{connect_host}]" + if _YARL_SUPPORTS_HOST_SUBCOMPONENT: + connect_host = self.url.host_subcomponent + assert connect_host is not None + else: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" path = f"{connect_host}:{self.url.port}" elif self.proxy and not self.is_ssl(): path = str(self.url) else: - path = self.url.raw_path - if self.url.raw_query_string: - path += "?" + self.url.raw_query_string + path = self.url.raw_path_qs protocol = conn.protocol assert protocol is not None diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f2eff019504..7853b541fc9 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,7 +14,7 @@ from yarl import URL import aiohttp -from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp import BaseConnector, client_reqrep, hdrs, helpers, payload from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, @@ -280,9 +280,16 @@ def test_host_header_ipv4(make_request) -> None: assert req.headers["HOST"] == "127.0.0.2" -def test_host_header_ipv6(make_request) -> None: - req = make_request("get", "http://[::2]") - assert req.headers["HOST"] == "[::2]" +@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) +def test_host_header_ipv6(make_request, yarl_supports_host_subcomponent: bool) -> None: + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, + "_YARL_SUPPORTS_HOST_SUBCOMPONENT", + yarl_supports_host_subcomponent, + ): + req = make_request("get", "http://[::2]") + assert req.headers["HOST"] == "[::2]" def test_host_header_ipv4_with_port(make_request) -> None: diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index c15ca326288..4b11d11e3a7 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -12,7 +12,7 @@ from yarl import URL import aiohttp -from aiohttp import web +from aiohttp import client_reqrep, web from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS @@ -95,6 +95,7 @@ async def handler(*args, **kwargs): reason="asyncio on this python does not support TLS in TLS", ) @pytest.mark.parametrize("web_server_endpoint_type", ("http", "https")) +@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) @pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") # Filter out the warning from # https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 @@ -104,18 +105,25 @@ async def test_secure_https_proxy_absolute_path( secure_proxy_url: URL, web_server_endpoint_url: str, web_server_endpoint_payload: str, + yarl_supports_host_subcomponent: bool, ) -> None: """Ensure HTTP(S) sites are accessible through a secure proxy.""" conn = aiohttp.TCPConnector() sess = aiohttp.ClientSession(connector=conn) - async with sess.get( - web_server_endpoint_url, - proxy=secure_proxy_url, - ssl=client_ssl_ctx, # used for both proxy and endpoint connections - ) as response: - assert response.status == 200 - assert await response.text() == web_server_endpoint_payload + # Ensure the old path is tested for old yarl versions + with mock.patch.object( + client_reqrep, + "_YARL_SUPPORTS_HOST_SUBCOMPONENT", + yarl_supports_host_subcomponent, + ): + async with sess.get( + web_server_endpoint_url, + proxy=secure_proxy_url, + ssl=client_ssl_ctx, # used for both proxy and endpoint connections + ) as response: + assert response.status == 200 + assert await response.text() == web_server_endpoint_payload await sess.close() await conn.close() From 74fd3e546f40787eafb47315ead6755e64d7d1ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 27 Sep 2024 13:45:10 -0500 Subject: [PATCH 0656/1511] Remove yarl back-compat shims for yarl older than 1.13.0 (#9316) (#9317) --- aiohttp/client_reqrep.py | 25 +++++-------------------- tests/test_client_request.py | 15 ++++----------- tests/test_proxy_functional.py | 24 ++++++++---------------- 3 files changed, 17 insertions(+), 47 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 01b7b3002d9..88b87c5da56 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -27,7 +27,7 @@ import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL, __version__ as yarl_version +from yarl import URL from . import hdrs, helpers, http, multipart, payload from .abc import AbstractStreamWriter @@ -90,10 +90,6 @@ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") -_YARL_SUPPORTS_HOST_SUBCOMPONENT = tuple(map(int, yarl_version.split(".")[:2])) >= ( - 1, - 13, -) def _gen_default_accept_encoding() -> str: @@ -431,13 +427,8 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: self.headers: CIMultiDict[str] = CIMultiDict() # add host - if _YARL_SUPPORTS_HOST_SUBCOMPONENT: - netloc = self.url.host_subcomponent - assert netloc is not None - else: - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" + netloc = self.url.host_subcomponent + assert netloc is not None # See https://github.com/aio-libs/aiohttp/issues/3636. netloc = netloc.rstrip(".") if self.url.port is not None and not self.url.is_default_port(): @@ -682,14 +673,8 @@ async def send(self, conn: "Connection") -> "ClientResponse": # - not CONNECT proxy must send absolute form URI # - most common is origin form URI if self.method == hdrs.METH_CONNECT: - if _YARL_SUPPORTS_HOST_SUBCOMPONENT: - connect_host = self.url.host_subcomponent - assert connect_host is not None - else: - connect_host = self.url.raw_host - assert connect_host is not None - if helpers.is_ipv6_address(connect_host): - connect_host = f"[{connect_host}]" + connect_host = self.url.host_subcomponent + assert connect_host is not None path = f"{connect_host}:{self.url.port}" elif self.proxy and not self.is_ssl(): path = str(self.url) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 7853b541fc9..f2eff019504 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,7 +14,7 @@ from yarl import URL import aiohttp -from aiohttp import BaseConnector, client_reqrep, hdrs, helpers, payload +from aiohttp import BaseConnector, hdrs, helpers, payload from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, @@ -280,16 +280,9 @@ def test_host_header_ipv4(make_request) -> None: assert req.headers["HOST"] == "127.0.0.2" -@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) -def test_host_header_ipv6(make_request, yarl_supports_host_subcomponent: bool) -> None: - # Ensure the old path is tested for old yarl versions - with mock.patch.object( - client_reqrep, - "_YARL_SUPPORTS_HOST_SUBCOMPONENT", - yarl_supports_host_subcomponent, - ): - req = make_request("get", "http://[::2]") - assert req.headers["HOST"] == "[::2]" +def test_host_header_ipv6(make_request) -> None: + req = make_request("get", "http://[::2]") + assert req.headers["HOST"] == "[::2]" def test_host_header_ipv4_with_port(make_request) -> None: diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 4b11d11e3a7..c15ca326288 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -12,7 +12,7 @@ from yarl import URL import aiohttp -from aiohttp import client_reqrep, web +from aiohttp import web from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS @@ -95,7 +95,6 @@ async def handler(*args, **kwargs): reason="asyncio on this python does not support TLS in TLS", ) @pytest.mark.parametrize("web_server_endpoint_type", ("http", "https")) -@pytest.mark.parametrize("yarl_supports_host_subcomponent", [True, False]) @pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") # Filter out the warning from # https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 @@ -105,25 +104,18 @@ async def test_secure_https_proxy_absolute_path( secure_proxy_url: URL, web_server_endpoint_url: str, web_server_endpoint_payload: str, - yarl_supports_host_subcomponent: bool, ) -> None: """Ensure HTTP(S) sites are accessible through a secure proxy.""" conn = aiohttp.TCPConnector() sess = aiohttp.ClientSession(connector=conn) - # Ensure the old path is tested for old yarl versions - with mock.patch.object( - client_reqrep, - "_YARL_SUPPORTS_HOST_SUBCOMPONENT", - yarl_supports_host_subcomponent, - ): - async with sess.get( - web_server_endpoint_url, - proxy=secure_proxy_url, - ssl=client_ssl_ctx, # used for both proxy and endpoint connections - ) as response: - assert response.status == 200 - assert await response.text() == web_server_endpoint_payload + async with sess.get( + web_server_endpoint_url, + proxy=secure_proxy_url, + ssl=client_ssl_ctx, # used for both proxy and endpoint connections + ) as response: + assert response.status == 200 + assert await response.text() == web_server_endpoint_payload await sess.close() await conn.close() From 8999d9b0563a4139701335fef1ae632e972e2e65 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 27 Sep 2024 19:06:21 +0000 Subject: [PATCH 0657/1511] [PR #9309/e4028333 backport][3.11] Fix building the URL in BaseRequest when the host contains a port or IPv6 address (#9319) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #9307 --- CHANGES/9309.bugfix.rst | 1 + aiohttp/web_request.py | 10 ++++++++-- tests/test_web_request.py | 10 ++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9309.bugfix.rst diff --git a/CHANGES/9309.bugfix.rst b/CHANGES/9309.bugfix.rst new file mode 100644 index 00000000000..73870da1938 --- /dev/null +++ b/CHANGES/9309.bugfix.rst @@ -0,0 +1 @@ +Fixed assembling the :class:`~yarl.URL` for web requests when the host contains a non-default port or IPv6 address -- by :user:`bdraco`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index f7e511fa477..502a93d247a 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -431,6 +431,10 @@ def host(self) -> str: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value + + For example, 'example.com' or 'localhost:8080'. + + For historical reasons, the port number may be included. """ host = self._message.headers.get(hdrs.HOST) if host is not None: @@ -454,8 +458,10 @@ def remote(self) -> Optional[str]: @reify def url(self) -> URL: - url = URL.build(scheme=self.scheme, host=self.host) - return url.join(self._rel_url) + """The full URL of the request.""" + # authority is used here because it may include the port number + # and we want yarl to parse it correctly + return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url) @reify def path(self) -> str: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index ff22e19d5b4..c22b3b17921 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -526,6 +526,16 @@ def test_url_url() -> None: assert URL("http://example.com/path") == req.url +def test_url_non_default_port() -> None: + req = make_mocked_request("GET", "/path", headers={"HOST": "example.com:8123"}) + assert req.url == URL("http://example.com:8123/path") + + +def test_url_ipv6() -> None: + req = make_mocked_request("GET", "/path", headers={"HOST": "[::1]:8123"}) + assert req.url == URL("http://[::1]:8123/path") + + def test_clone() -> None: req = make_mocked_request("GET", "/path") req2 = req.clone() From 8220ced9f7515901cbf0976fb48ed867ff541241 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 27 Sep 2024 19:15:12 +0000 Subject: [PATCH 0658/1511] [PR #9309/e4028333 backport][3.10] Fix building the URL in BaseRequest when the host contains a port or IPv6 address (#9318) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #9307 --- CHANGES/9309.bugfix.rst | 1 + aiohttp/web_request.py | 10 ++++++++-- tests/test_web_request.py | 10 ++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9309.bugfix.rst diff --git a/CHANGES/9309.bugfix.rst b/CHANGES/9309.bugfix.rst new file mode 100644 index 00000000000..73870da1938 --- /dev/null +++ b/CHANGES/9309.bugfix.rst @@ -0,0 +1 @@ +Fixed assembling the :class:`~yarl.URL` for web requests when the host contains a non-default port or IPv6 address -- by :user:`bdraco`. diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 91fc1f42bfe..62a08ea248b 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -431,6 +431,10 @@ def host(self) -> str: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value + + For example, 'example.com' or 'localhost:8080'. + + For historical reasons, the port number may be included. """ host = self._message.headers.get(hdrs.HOST) if host is not None: @@ -454,8 +458,10 @@ def remote(self) -> Optional[str]: @reify def url(self) -> URL: - url = URL.build(scheme=self.scheme, host=self.host) - return url.join(self._rel_url) + """The full URL of the request.""" + # authority is used here because it may include the port number + # and we want yarl to parse it correctly + return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url) @reify def path(self) -> str: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index ba12d6f54e7..9e613bb6613 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -526,6 +526,16 @@ def test_url_url() -> None: assert URL("http://example.com/path") == req.url +def test_url_non_default_port() -> None: + req = make_mocked_request("GET", "/path", headers={"HOST": "example.com:8123"}) + assert req.url == URL("http://example.com:8123/path") + + +def test_url_ipv6() -> None: + req = make_mocked_request("GET", "/path", headers={"HOST": "[::1]:8123"}) + assert req.url == URL("http://[::1]:8123/path") + + def test_clone() -> None: req = make_mocked_request("GET", "/path") req2 = req.clone() From f9a9e853802a26a9bdef4cfe4ac2328b6ab21190 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 27 Sep 2024 14:48:37 -0500 Subject: [PATCH 0659/1511] Release 3.10.7 (#9320) --- CHANGES.rst | 40 ++++++++++++++++++++++++++++++++++++++++ CHANGES/9171.misc.rst | 3 --- CHANGES/9301.misc.rst | 1 - CHANGES/9309.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 5 files changed, 41 insertions(+), 6 deletions(-) delete mode 100644 CHANGES/9171.misc.rst delete mode 100644 CHANGES/9301.misc.rst delete mode 100644 CHANGES/9309.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index f6010b9840a..443c62a184d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,46 @@ .. towncrier release notes start +3.10.7 (2024-09-27) +=================== + +Bug fixes +--------- + +- Fixed assembling the :class:`~yarl.URL` for web requests when the host contains a non-default port or IPv6 address -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9309`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of determining if a URL is absolute -- by :user:`bdraco`. + + The property :attr:`~yarl.URL.absolute` is more performant than the method ``URL.is_absolute()`` and preferred when newer versions of yarl are used. + + + *Related issues and pull requests on GitHub:* + :issue:`9171`. + + + +- Replaced code that can now be handled by ``yarl`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9301`. + + + + +---- + + 3.10.6 (2024-09-24) =================== diff --git a/CHANGES/9171.misc.rst b/CHANGES/9171.misc.rst deleted file mode 100644 index c6742edd891..00000000000 --- a/CHANGES/9171.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved performance of determining if a URL is absolute -- by :user:`bdraco`. - -The property :attr:`~yarl.URL.absolute` is more performant than the method ``URL.is_absolute()`` and preferred when newer versions of yarl are used. diff --git a/CHANGES/9301.misc.rst b/CHANGES/9301.misc.rst deleted file mode 100644 index a751bdfc6dc..00000000000 --- a/CHANGES/9301.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Replaced code that can now be handled by ``yarl`` -- by :user:`bdraco`. diff --git a/CHANGES/9309.bugfix.rst b/CHANGES/9309.bugfix.rst deleted file mode 100644 index 73870da1938..00000000000 --- a/CHANGES/9309.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed assembling the :class:`~yarl.URL` for web requests when the host contains a non-default port or IPv6 address -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 63367052646..cd3834cd3ff 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.6.dev0" +__version__ = "3.10.7" from typing import TYPE_CHECKING, Tuple From 609c6e331d2d262514b7fc4ca821264d37faa0e1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 27 Sep 2024 17:33:40 -0500 Subject: [PATCH 0660/1511] Increment version to 3.10.8.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index cd3834cd3ff..6a1382e5671 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.7" +__version__ = "3.10.8.dev0" from typing import TYPE_CHECKING, Tuple From 4e3797a800496e28d87ddea59ff70f3b129dd794 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sat, 28 Sep 2024 02:38:08 +0100 Subject: [PATCH 0661/1511] Fix custom cookies example (#9321) (#9323) (cherry picked from commit a4b148e84dacfaac3b17c6cb5f5ca3025b0e4914) --- docs/client_advanced.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 26594a21b1c..524b0877450 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -82,14 +82,14 @@ parameter of :class:`ClientSession` constructor:: between multiple requests:: async with aiohttp.ClientSession() as session: - await session.get( - 'http://httpbin.org/cookies/set?my_cookie=my_value') - filtered = session.cookie_jar.filter_cookies( - 'http://httpbin.org') - assert filtered['my_cookie'].value == 'my_value' - async with session.get('http://httpbin.org/cookies') as r: + async with session.get( + "http://httpbin.org/cookies/set?my_cookie=my_value", + allow_redirects=False + ) as resp: + assert resp.cookies["my_cookie"].value == "my_value" + async with session.get("http://httpbin.org/cookies") as r: json_body = await r.json() - assert json_body['cookies']['my_cookie'] == 'my_value' + assert json_body["cookies"]["my_cookie"] == "my_value" Response Headers and Cookies ---------------------------- From 52e0b917ddbba93cececb32587928583d66f5f61 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sat, 28 Sep 2024 02:38:21 +0100 Subject: [PATCH 0662/1511] Fix custom cookies example (#9321) (#9324) (cherry picked from commit a4b148e84dacfaac3b17c6cb5f5ca3025b0e4914) --- docs/client_advanced.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 26594a21b1c..524b0877450 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -82,14 +82,14 @@ parameter of :class:`ClientSession` constructor:: between multiple requests:: async with aiohttp.ClientSession() as session: - await session.get( - 'http://httpbin.org/cookies/set?my_cookie=my_value') - filtered = session.cookie_jar.filter_cookies( - 'http://httpbin.org') - assert filtered['my_cookie'].value == 'my_value' - async with session.get('http://httpbin.org/cookies') as r: + async with session.get( + "http://httpbin.org/cookies/set?my_cookie=my_value", + allow_redirects=False + ) as resp: + assert resp.cookies["my_cookie"].value == "my_value" + async with session.get("http://httpbin.org/cookies") as r: json_body = await r.json() - assert json_body['cookies']['my_cookie'] == 'my_value' + assert json_body["cookies"]["my_cookie"] == "my_value" Response Headers and Cookies ---------------------------- From a308f748de85708735a0a07f30551ebee77c75e5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 28 Sep 2024 14:11:18 -0500 Subject: [PATCH 0663/1511] [PR #9326/fe26ae2 backport][3.10] Fix TimerContext not uncancelling the current task (#9328) --- CHANGES/9326.bugfix.rst | 1 + aiohttp/helpers.py | 23 ++++++++++++++--- tests/test_helpers.py | 56 ++++++++++++++++++++++++++++++++++++++++- 3 files changed, 76 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9326.bugfix.rst diff --git a/CHANGES/9326.bugfix.rst b/CHANGES/9326.bugfix.rst new file mode 100644 index 00000000000..4689941708f --- /dev/null +++ b/CHANGES/9326.bugfix.rst @@ -0,0 +1 @@ +Fixed cancellation leaking upwards on timeout -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 40705b16d71..ee2a91cec46 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -686,6 +686,7 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._tasks: List[asyncio.Task[Any]] = [] self._cancelled = False + self._cancelling = 0 def assert_timeout(self) -> None: """Raise TimeoutError if timer has already been cancelled.""" @@ -694,12 +695,17 @@ def assert_timeout(self) -> None: def __enter__(self) -> BaseTimerContext: task = asyncio.current_task(loop=self._loop) - if task is None: raise RuntimeError( "Timeout context manager should be used " "inside a task" ) + if sys.version_info >= (3, 11): + # Remember if the task was already cancelling + # so when we __exit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = task.cancelling() + if self._cancelled: raise asyncio.TimeoutError from None @@ -712,11 +718,22 @@ def __exit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> Optional[bool]: + enter_task: Optional[asyncio.Task[Any]] = None if self._tasks: - self._tasks.pop() + enter_task = self._tasks.pop() if exc_type is asyncio.CancelledError and self._cancelled: - raise asyncio.TimeoutError from None + assert enter_task is not None + # The timeout was hit, and the task was cancelled + # so we need to uncancel the last task that entered the context manager + # since the cancellation should not leak out of the context manager + if sys.version_info >= (3, 11): + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + if enter_task.uncancel() > self._cancelling: + return None + raise asyncio.TimeoutError from exc_val return None def timeout(self) -> None: diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 2d6e098aae5..f79f9bebe09 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -397,7 +397,61 @@ def test_timer_context_not_cancelled() -> None: assert not m_asyncio.current_task.return_value.cancel.called -def test_timer_context_no_task(loop) -> None: +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()" +) +async def test_timer_context_timeout_does_not_leak_upward() -> None: + """Verify that the TimerContext does not leak cancellation outside the context manager.""" + loop = asyncio.get_running_loop() + ctx = helpers.TimerContext(loop) + current_task = asyncio.current_task() + assert current_task is not None + with pytest.raises(asyncio.TimeoutError): + with ctx: + assert current_task.cancelling() == 0 + loop.call_soon(ctx.timeout) + await asyncio.sleep(1) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()" +) +async def test_timer_context_timeout_does_swallow_cancellation() -> None: + """Verify that the TimerContext does not swallow cancellation.""" + loop = asyncio.get_running_loop() + current_task = asyncio.current_task() + assert current_task is not None + ctx = helpers.TimerContext(loop) + + async def task_with_timeout() -> None: + nonlocal ctx + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): + with ctx: + assert new_task.cancelling() == 0 + await asyncio.sleep(1) + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + ctx.timeout() + + # Cancellation should not leak into the current task + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + +def test_timer_context_no_task(loop: asyncio.AbstractEventLoop) -> None: with pytest.raises(RuntimeError): with helpers.TimerContext(loop): pass From 4187b8776ff68dfe55d3f3aa92600b12b728ce27 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 28 Sep 2024 14:11:28 -0500 Subject: [PATCH 0664/1511] [PR #9326/fe26ae2 backport][3.11] Fix TimerContext not uncancelling the current task (#9329) --- CHANGES/9326.bugfix.rst | 1 + aiohttp/helpers.py | 23 ++++++++++++++--- tests/test_helpers.py | 56 ++++++++++++++++++++++++++++++++++++++++- 3 files changed, 76 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9326.bugfix.rst diff --git a/CHANGES/9326.bugfix.rst b/CHANGES/9326.bugfix.rst new file mode 100644 index 00000000000..4689941708f --- /dev/null +++ b/CHANGES/9326.bugfix.rst @@ -0,0 +1 @@ +Fixed cancellation leaking upwards on timeout -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index f5540a19662..5e83f40a310 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -684,6 +684,7 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._tasks: List[asyncio.Task[Any]] = [] self._cancelled = False + self._cancelling = 0 def assert_timeout(self) -> None: """Raise TimeoutError if timer has already been cancelled.""" @@ -692,10 +693,15 @@ def assert_timeout(self) -> None: def __enter__(self) -> BaseTimerContext: task = asyncio.current_task(loop=self._loop) - if task is None: raise RuntimeError("Timeout context manager should be used inside a task") + if sys.version_info >= (3, 11): + # Remember if the task was already cancelling + # so when we __exit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = task.cancelling() + if self._cancelled: raise asyncio.TimeoutError from None @@ -708,11 +714,22 @@ def __exit__( exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> Optional[bool]: + enter_task: Optional[asyncio.Task[Any]] = None if self._tasks: - self._tasks.pop() + enter_task = self._tasks.pop() if exc_type is asyncio.CancelledError and self._cancelled: - raise asyncio.TimeoutError from None + assert enter_task is not None + # The timeout was hit, and the task was cancelled + # so we need to uncancel the last task that entered the context manager + # since the cancellation should not leak out of the context manager + if sys.version_info >= (3, 11): + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + if enter_task.uncancel() > self._cancelling: + return None + raise asyncio.TimeoutError from exc_val return None def timeout(self) -> None: diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 13d73a312fc..3ccca3a7e15 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -398,7 +398,61 @@ def test_timer_context_not_cancelled() -> None: assert not m_asyncio.current_task.return_value.cancel.called -def test_timer_context_no_task(loop) -> None: +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()" +) +async def test_timer_context_timeout_does_not_leak_upward() -> None: + """Verify that the TimerContext does not leak cancellation outside the context manager.""" + loop = asyncio.get_running_loop() + ctx = helpers.TimerContext(loop) + current_task = asyncio.current_task() + assert current_task is not None + with pytest.raises(asyncio.TimeoutError): + with ctx: + assert current_task.cancelling() == 0 + loop.call_soon(ctx.timeout) + await asyncio.sleep(1) + + # After the context manager exits, the task should no longer be cancelling + assert current_task.cancelling() == 0 + + +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()" +) +async def test_timer_context_timeout_does_swallow_cancellation() -> None: + """Verify that the TimerContext does not swallow cancellation.""" + loop = asyncio.get_running_loop() + current_task = asyncio.current_task() + assert current_task is not None + ctx = helpers.TimerContext(loop) + + async def task_with_timeout() -> None: + nonlocal ctx + new_task = asyncio.current_task() + assert new_task is not None + with pytest.raises(asyncio.TimeoutError): + with ctx: + assert new_task.cancelling() == 0 + await asyncio.sleep(1) + + task = asyncio.create_task(task_with_timeout()) + await asyncio.sleep(0) + task.cancel() + assert task.cancelling() == 1 + ctx.timeout() + + # Cancellation should not leak into the current task + assert current_task.cancelling() == 0 + # Cancellation should not be swallowed if the task is cancelled + # and it also times out + await asyncio.sleep(0) + with pytest.raises(asyncio.CancelledError): + await task + assert task.cancelling() == 1 + + +def test_timer_context_no_task(loop: asyncio.AbstractEventLoop) -> None: with pytest.raises(RuntimeError): with helpers.TimerContext(loop): pass From ce12066a228b857a95f2869f017c8b9ac08bda8f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 28 Sep 2024 19:43:01 +0000 Subject: [PATCH 0665/1511] Bump aiohappyeyeballs from 2.4.0 to 2.4.2 (#9310) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.4.0 to 2.4.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.4.2 (2024-09-27)</h1> <h2>Fix</h2> <ul> <li>fix: copy staggered from standard lib for python 3.12+ (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/95">#95</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c5a4023d904b3e72f30b8a9f56913894dda4c9d0"><code>c5a4023</code></a>)</li> </ul> <h1>v2.4.1 (2024-09-26)</h1> <h2>Fix</h2> <ul> <li>fix: avoid passing loop to staggered.staggered_race (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/94">#94</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/5f80b7951f32d727039d9db776a17a6eba8877cd"><code>5f80b79</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.4.2 (2024-09-27)</h2> <h3>Fix</h3> <ul> <li>Copy staggered from standard lib for python 3.12+ (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/95">#95</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c5a4023d904b3e72f30b8a9f56913894dda4c9d0"><code>c5a4023</code></a>)</li> </ul> <h2>v2.4.1 (2024-09-26)</h2> <h3>Fix</h3> <ul> <li>Avoid passing loop to staggered.staggered_race (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/94">#94</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/5f80b7951f32d727039d9db776a17a6eba8877cd"><code>5f80b79</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/04dbbe5be7c38bdbf8ec9e31491fe4d052d15908"><code>04dbbe5</code></a> 2.4.2</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c5a4023d904b3e72f30b8a9f56913894dda4c9d0"><code>c5a4023</code></a> fix: copy staggered from standard lib for python 3.12+ (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/95">#95</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/04c42b48b0bf79ec8fda77f1b5691e1b1e3fd296"><code>04c42b4</code></a> 2.4.1</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/5f80b7951f32d727039d9db776a17a6eba8877cd"><code>5f80b79</code></a> fix: avoid passing loop to staggered.staggered_race (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/94">#94</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b5192ad714315d212fc9bd87c560e3bf2b600f43"><code>b5192ad</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/92">#92</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/097c9fa75fa6efcb22cf947a90f4db25ef39ebe5"><code>097c9fa</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/91">#91</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/fdd35ac44eeebb33b916c10337161dabf130cd65"><code>fdd35ac</code></a> chore: fix trivial typo in readme</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/7038b2d087af1eacfd45cb4c22e7b65bea07b6c7"><code>7038b2d</code></a> chore: update readme to include license information (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/90">#90</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/e45003fffe08c117fbbde01ace18ce0123134be9"><code>e45003f</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/88">#88</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.0...v2.4.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.4.0&new-version=2.4.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 89e28622522..5e33e096224 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.0 +aiohappyeyeballs==2.4.2 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ae130c09032..b1991f84a6b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.0 +aiohappyeyeballs==2.4.2 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 80b486f0762..7ef54a516d8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.0 +aiohappyeyeballs==2.4.2 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 14b6c85f48d..62e5eacb4fc 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.0 +aiohappyeyeballs==2.4.2 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 7137147708e..11ce369747b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.0 +aiohappyeyeballs==2.4.2 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 8a7ce946650a24198c10da61059f4d38bd30708e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 28 Sep 2024 15:08:03 -0500 Subject: [PATCH 0666/1511] Release 3.10.8 (#9330) --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/9326.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/9326.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 443c62a184d..0cf93a5887c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.10.8 (2024-09-28) +=================== + +Bug fixes +--------- + +- Fixed cancellation leaking upwards on timeout -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9326`. + + + + +---- + + 3.10.7 (2024-09-27) =================== diff --git a/CHANGES/9326.bugfix.rst b/CHANGES/9326.bugfix.rst deleted file mode 100644 index 4689941708f..00000000000 --- a/CHANGES/9326.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed cancellation leaking upwards on timeout -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6a1382e5671..dfa44f798cc 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.8.dev0" +__version__ = "3.10.8" from typing import TYPE_CHECKING, Tuple From b959358119e7bcb899266a7cd961ab9ed111a745 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 28 Sep 2024 16:45:57 -0500 Subject: [PATCH 0667/1511] Increment version to 3.10.9.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index dfa44f798cc..491cfded254 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.8" +__version__ = "3.10.9.dev0" from typing import TYPE_CHECKING, Tuple From a62f6a38f28c157414f6173751176950fb20708e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 13:51:28 +0000 Subject: [PATCH 0668/1511] [PR #9333/2ac962cc backport][3.11] Small cleanups to connector ConnectionKey usage (#9339) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 46 ++++++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index bbdb140187d..fe85b9e6a36 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -260,7 +260,9 @@ def __init__( self._force_close = force_close # {host_key: FIFO list of waiters} - self._waiters = defaultdict(deque) # type: ignore[var-annotated] + self._waiters: DefaultDict[ConnectionKey, deque[asyncio.Future[None]]] = ( + defaultdict(deque) + ) self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) @@ -390,13 +392,10 @@ def _cleanup(self) -> None: def _drop_acquired_per_host( self, key: "ConnectionKey", val: ResponseHandler ) -> None: - acquired_per_host = self._acquired_per_host - if key not in acquired_per_host: - return - conns = acquired_per_host[key] - conns.remove(val) - if not conns: - del self._acquired_per_host[key] + if conns := self._acquired_per_host.get(key): + conns.remove(val) + if not conns: + del self._acquired_per_host[key] def _cleanup_closed(self) -> None: """Double confirmation for transport close. @@ -514,7 +513,7 @@ async def connect( # Wait if there are no available connections or if there are/were # waiters (i.e. don't steal connection from a waiter about to wake up) if available <= 0 or key in self._waiters: - fut = self._loop.create_future() + fut: asyncio.Future[None] = self._loop.create_future() # This connection will now count towards the limit. self._waiters[key].append(fut) @@ -681,20 +680,21 @@ def _release( if key.is_ssl and not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transport) - else: - conns = self._conns.get(key) - if conns is None: - conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) - - if self._cleanup_handle is None: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - self._keepalive_timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) + return + + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + self._keepalive_timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) async def _create_connection( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" From e1890ae56c8a878ac9603bd4a007de9106bab4ce Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 29 Sep 2024 14:53:18 +0100 Subject: [PATCH 0669/1511] Add default_to_mulitpart parameter (#9335) (#9337) Co-authored-by: Yevhenii Hyzyla <hyzyla@gmail.com> (cherry picked from commit d9a9368f0923e1447231880075271e7fe38683d8) --- CHANGES/9335.feature.rst | 1 + aiohttp/formdata.py | 4 +++- tests/test_formdata.py | 12 ++++++++++-- 3 files changed, 14 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9335.feature.rst diff --git a/CHANGES/9335.feature.rst b/CHANGES/9335.feature.rst new file mode 100644 index 00000000000..9a2e0684b44 --- /dev/null +++ b/CHANGES/9335.feature.rst @@ -0,0 +1 @@ +Added ``default_to_multipart`` parameter to ``FormData``. diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index 39ca8539acc..73056f4bc45 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -23,10 +23,12 @@ def __init__( fields: Iterable[Any] = (), quote_fields: bool = True, charset: Optional[str] = None, + *, + default_to_multipart: bool = False, ) -> None: self._writer = multipart.MultipartWriter("form-data") self._fields: List[Any] = [] - self._is_multipart = False + self._is_multipart = default_to_multipart self._is_processed = False self._quote_fields = quote_fields self._charset = charset diff --git a/tests/test_formdata.py b/tests/test_formdata.py index db1a3861c56..4b7c94ac4cd 100644 --- a/tests/test_formdata.py +++ b/tests/test_formdata.py @@ -22,14 +22,22 @@ async def write(chunk): return writer -def test_formdata_multipart(buf, writer) -> None: - form = FormData() +def test_formdata_multipart(buf: bytearray) -> None: + form = FormData(default_to_multipart=False) assert not form.is_multipart form.add_field("test", b"test", filename="test.txt") assert form.is_multipart +def test_form_data_is_multipart_param(buf: bytearray) -> None: + form = FormData(default_to_multipart=True) + assert form.is_multipart + + form.add_field("test", "test") + assert form.is_multipart + + def test_invalid_formdata_payload() -> None: form = FormData() form.add_field("test", object(), filename="test.txt") From 873fad9c8972de06ea32f4646afa49b9e55b3570 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 14:30:14 +0000 Subject: [PATCH 0670/1511] [PR #9333/2ac962cc backport][3.10] Small cleanups to connector ConnectionKey usage (#9338) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 46 ++++++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index c25f184bbbe..8123483f683 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -260,7 +260,9 @@ def __init__( self._force_close = force_close # {host_key: FIFO list of waiters} - self._waiters = defaultdict(deque) # type: ignore[var-annotated] + self._waiters: DefaultDict[ConnectionKey, deque[asyncio.Future[None]]] = ( + defaultdict(deque) + ) self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) @@ -390,13 +392,10 @@ def _cleanup(self) -> None: def _drop_acquired_per_host( self, key: "ConnectionKey", val: ResponseHandler ) -> None: - acquired_per_host = self._acquired_per_host - if key not in acquired_per_host: - return - conns = acquired_per_host[key] - conns.remove(val) - if not conns: - del self._acquired_per_host[key] + if conns := self._acquired_per_host.get(key): + conns.remove(val) + if not conns: + del self._acquired_per_host[key] def _cleanup_closed(self) -> None: """Double confirmation for transport close. @@ -514,7 +513,7 @@ async def connect( # Wait if there are no available connections or if there are/were # waiters (i.e. don't steal connection from a waiter about to wake up) if available <= 0 or key in self._waiters: - fut = self._loop.create_future() + fut: asyncio.Future[None] = self._loop.create_future() # This connection will now count towards the limit. self._waiters[key].append(fut) @@ -681,20 +680,21 @@ def _release( if key.is_ssl and not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transport) - else: - conns = self._conns.get(key) - if conns is None: - conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) - - if self._cleanup_handle is None: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - self._keepalive_timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) + return + + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + self._keepalive_timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) async def _create_connection( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" From 523c4ea57ff125e7c32f313262422726b564280c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 29 Sep 2024 10:17:19 -0500 Subject: [PATCH 0671/1511] [PR #9340/8a97e03 backport][3.10] Use dunder writer internally in ClientResponse (#9341) --- aiohttp/client_reqrep.py | 46 ++++++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 10682f57885..f732b70c20a 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -762,7 +762,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.response = response_class( self.method, self.original_url, - writer=self._writer, + writer=task, continue100=self._continue, timer=self._timer, request_info=self.request_info, @@ -773,9 +773,9 @@ async def send(self, conn: "Connection") -> "ClientResponse": return self.response async def close(self) -> None: - if self._writer is not None: + if self.__writer is not None: try: - await self._writer + await self.__writer except asyncio.CancelledError: if ( sys.version_info >= (3, 11) @@ -785,11 +785,11 @@ async def close(self) -> None: raise def terminate(self) -> None: - if self._writer is not None: + if self.__writer is not None: if not self.loop.is_closed(): - self._writer.cancel() - self._writer.remove_done_callback(self.__reset_writer) - self._writer = None + self.__writer.cancel() + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = None async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: for trace in self._traces: @@ -845,8 +845,8 @@ def __init__( self._real_url = url self._url = url.with_fragment(None) - self._body: Any = None - self._writer: Optional[asyncio.Task[None]] = writer + self._body: Optional[bytes] = None + self._writer = writer self._continue = continue100 # None by default self._closed = True self._history: Tuple[ClientResponse, ...] = () @@ -874,10 +874,16 @@ def __reset_writer(self, _: object = None) -> None: @property def _writer(self) -> Optional["asyncio.Task[None]"]: + """The writer task for streaming data. + + _writer is only provided for backwards compatibility + for subclasses that may need to access it. + """ return self.__writer @_writer.setter def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + """Set the writer task for streaming data.""" if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer @@ -1128,16 +1134,16 @@ def raise_for_status(self) -> None: def _release_connection(self) -> None: if self._connection is not None: - if self._writer is None: + if self.__writer is None: self._connection.release() self._connection = None else: - self._writer.add_done_callback(lambda f: self._release_connection()) + self.__writer.add_done_callback(lambda f: self._release_connection()) async def _wait_released(self) -> None: - if self._writer is not None: + if self.__writer is not None: try: - await self._writer + await self.__writer except asyncio.CancelledError: if ( sys.version_info >= (3, 11) @@ -1148,8 +1154,8 @@ async def _wait_released(self) -> None: self._release_connection() def _cleanup_writer(self) -> None: - if self._writer is not None: - self._writer.cancel() + if self.__writer is not None: + self.__writer.cancel() self._session = None def _notify_content(self) -> None: @@ -1159,9 +1165,9 @@ def _notify_content(self) -> None: self._released = True async def wait_for_close(self) -> None: - if self._writer is not None: + if self.__writer is not None: try: - await self._writer + await self.__writer except asyncio.CancelledError: if ( sys.version_info >= (3, 11) @@ -1189,7 +1195,7 @@ async def read(self) -> bytes: protocol = self._connection and self._connection.protocol if protocol is None or not protocol.upgraded: await self._wait_released() # Underlying connection released - return self._body # type: ignore[no-any-return] + return self._body def get_encoding(self) -> str: ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() @@ -1222,9 +1228,7 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> if encoding is None: encoding = self.get_encoding() - return self._body.decode( # type: ignore[no-any-return,union-attr] - encoding, errors=errors - ) + return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] async def json( self, From ebee448c6174fa54c6aeaf5842be5195b1fd6e3c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 29 Sep 2024 10:28:54 -0500 Subject: [PATCH 0672/1511] [PR #9340/8a97e03 backport][3.11] Use dunder writer internally in ClientResponse (#9342) --- aiohttp/client_reqrep.py | 46 ++++++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 88b87c5da56..7de45b67007 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -745,7 +745,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.response = response_class( self.method, self.original_url, - writer=self._writer, + writer=task, continue100=self._continue, timer=self._timer, request_info=self.request_info, @@ -756,9 +756,9 @@ async def send(self, conn: "Connection") -> "ClientResponse": return self.response async def close(self) -> None: - if self._writer is not None: + if self.__writer is not None: try: - await self._writer + await self.__writer except asyncio.CancelledError: if ( sys.version_info >= (3, 11) @@ -768,11 +768,11 @@ async def close(self) -> None: raise def terminate(self) -> None: - if self._writer is not None: + if self.__writer is not None: if not self.loop.is_closed(): - self._writer.cancel() - self._writer.remove_done_callback(self.__reset_writer) - self._writer = None + self.__writer.cancel() + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = None async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: for trace in self._traces: @@ -828,8 +828,8 @@ def __init__( self._real_url = url self._url = url.with_fragment(None) - self._body: Any = None - self._writer: Optional[asyncio.Task[None]] = writer + self._body: Optional[bytes] = None + self._writer = writer self._continue = continue100 # None by default self._closed = True self._history: Tuple[ClientResponse, ...] = () @@ -857,10 +857,16 @@ def __reset_writer(self, _: object = None) -> None: @property def _writer(self) -> Optional["asyncio.Task[None]"]: + """The writer task for streaming data. + + _writer is only provided for backwards compatibility + for subclasses that may need to access it. + """ return self.__writer @_writer.setter def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + """Set the writer task for streaming data.""" if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer @@ -1111,16 +1117,16 @@ def raise_for_status(self) -> None: def _release_connection(self) -> None: if self._connection is not None: - if self._writer is None: + if self.__writer is None: self._connection.release() self._connection = None else: - self._writer.add_done_callback(lambda f: self._release_connection()) + self.__writer.add_done_callback(lambda f: self._release_connection()) async def _wait_released(self) -> None: - if self._writer is not None: + if self.__writer is not None: try: - await self._writer + await self.__writer except asyncio.CancelledError: if ( sys.version_info >= (3, 11) @@ -1131,8 +1137,8 @@ async def _wait_released(self) -> None: self._release_connection() def _cleanup_writer(self) -> None: - if self._writer is not None: - self._writer.cancel() + if self.__writer is not None: + self.__writer.cancel() self._session = None def _notify_content(self) -> None: @@ -1142,9 +1148,9 @@ def _notify_content(self) -> None: self._released = True async def wait_for_close(self) -> None: - if self._writer is not None: + if self.__writer is not None: try: - await self._writer + await self.__writer except asyncio.CancelledError: if ( sys.version_info >= (3, 11) @@ -1172,7 +1178,7 @@ async def read(self) -> bytes: protocol = self._connection and self._connection.protocol if protocol is None or not protocol.upgraded: await self._wait_released() # Underlying connection released - return self._body # type: ignore[no-any-return] + return self._body def get_encoding(self) -> str: ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() @@ -1205,9 +1211,7 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> if encoding is None: encoding = self.get_encoding() - return self._body.decode( # type: ignore[no-any-return,union-attr] - encoding, errors=errors - ) + return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] async def json( self, From 23ab28e0c312e5bf01272df374bbeb3a6ff9d828 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 16:09:56 +0000 Subject: [PATCH 0673/1511] [PR #9343/5762ed61 backport][3.10] Create dns resolution task eagerly on python 3.12+ (#9345) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9342.misc.rst | 1 + aiohttp/connector.py | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) create mode 100644 CHANGES/9342.misc.rst diff --git a/CHANGES/9342.misc.rst b/CHANGES/9342.misc.rst new file mode 100644 index 00000000000..379e52dfc90 --- /dev/null +++ b/CHANGES/9342.misc.rst @@ -0,0 +1 @@ +Improved performance of resolving hosts with Python 3.12+ -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 8123483f683..81d49083837 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -929,11 +929,18 @@ async def _resolve_host( # the underlying lookup or else the cancel event will get broadcast to # all the waiters across all connections. # - resolved_host_task = asyncio.create_task( - self._resolve_host_with_throttle(key, host, port, traces) - ) - self._resolve_host_tasks.add(resolved_host_task) - resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) + coro = self._resolve_host_with_throttle(key, host, port, traces) + loop = asyncio.get_running_loop() + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send immediately + resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + resolved_host_task = loop.create_task(coro) + + if not resolved_host_task.done(): + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) + try: return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: From 35b4c9062950f1d06da4789c5798d30ab9fdc70b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 16:10:50 +0000 Subject: [PATCH 0674/1511] [PR #9343/5762ed61 backport][3.11] Create dns resolution task eagerly on python 3.12+ (#9346) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9342.misc.rst | 1 + aiohttp/connector.py | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) create mode 100644 CHANGES/9342.misc.rst diff --git a/CHANGES/9342.misc.rst b/CHANGES/9342.misc.rst new file mode 100644 index 00000000000..379e52dfc90 --- /dev/null +++ b/CHANGES/9342.misc.rst @@ -0,0 +1 @@ +Improved performance of resolving hosts with Python 3.12+ -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index fe85b9e6a36..6bc3ee54cdf 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -929,11 +929,18 @@ async def _resolve_host( # the underlying lookup or else the cancel event will get broadcast to # all the waiters across all connections. # - resolved_host_task = asyncio.create_task( - self._resolve_host_with_throttle(key, host, port, traces) - ) - self._resolve_host_tasks.add(resolved_host_task) - resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) + coro = self._resolve_host_with_throttle(key, host, port, traces) + loop = asyncio.get_running_loop() + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send immediately + resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + resolved_host_task = loop.create_task(coro) + + if not resolved_host_task.done(): + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) + try: return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: From 1ca72440747618f7039bf9a8e1fde666a0aa6ea2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 16:57:51 +0000 Subject: [PATCH 0675/1511] [PR #9344/ea991a91 backport][3.11] Remove unused ip address helpers (#9349) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9344.breaking.rst | 1 + aiohttp/helpers.py | 40 +++------------------------------------ tests/test_helpers.py | 26 ------------------------- 3 files changed, 4 insertions(+), 63 deletions(-) create mode 100644 CHANGES/9344.breaking.rst diff --git a/CHANGES/9344.breaking.rst b/CHANGES/9344.breaking.rst new file mode 100644 index 00000000000..5888b674a51 --- /dev/null +++ b/CHANGES/9344.breaking.rst @@ -0,0 +1 @@ +Removed the ``is_ipv6_address`` and ``is_ip4_address`` helpers are they are no longer used -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index aad38fda3df..097f711a1da 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -469,11 +469,8 @@ def __set__(self, inst: _TSelf[_T], value: _T) -> None: pass -def is_ipv4_address(host: Optional[Union[str, bytes]]) -> bool: - """Check if host looks like an IPv4 address. - - This function does not validate that the format is correct, only that - the host is a str or bytes, and its all numeric. +def is_ip_address(host: Optional[str]) -> bool: + """Check if host looks like an IP Address. This check is only meant as a heuristic to ensure that a host is not a domain name. @@ -481,39 +478,8 @@ def is_ipv4_address(host: Optional[Union[str, bytes]]) -> bool: if not host: return False # For a host to be an ipv4 address, it must be all numeric. - if isinstance(host, str): - return host.replace(".", "").isdigit() - if isinstance(host, (bytes, bytearray, memoryview)): - return host.decode("ascii").replace(".", "").isdigit() - raise TypeError(f"{host} [{type(host)}] is not a str or bytes") - - -def is_ipv6_address(host: Optional[Union[str, bytes]]) -> bool: - """Check if host looks like an IPv6 address. - - This function does not validate that the format is correct, only that - the host contains a colon and that it is a str or bytes. - - This check is only meant as a heuristic to ensure that - a host is not a domain name. - """ - if not host: - return False # The host must contain a colon to be an IPv6 address. - if isinstance(host, str): - return ":" in host - if isinstance(host, (bytes, bytearray, memoryview)): - return b":" in host - raise TypeError(f"{host} [{type(host)}] is not a str or bytes") - - -def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: - """Check if host looks like an IP Address. - - This check is only meant as a heuristic to ensure that - a host is not a domain name. - """ - return is_ipv4_address(host) or is_ipv6_address(host) + return ":" in host or host.replace(".", "").isdigit() _cached_current_datetime: Optional[int] = None diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 3ccca3a7e15..a9e31d13249 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -268,16 +268,6 @@ def test_is_ip_address() -> None: assert not helpers.is_ip_address("www.example.com") -def test_is_ip_address_bytes() -> None: - assert helpers.is_ip_address(b"127.0.0.1") - assert helpers.is_ip_address(b"::1") - assert helpers.is_ip_address(b"FE80:0000:0000:0000:0202:B3FF:FE1E:8329") - - # Hostnames - assert not helpers.is_ip_address(b"localhost") - assert not helpers.is_ip_address(b"www.example.com") - - def test_ipv4_addresses() -> None: ip_addresses = [ "0.0.0.0", @@ -285,8 +275,6 @@ def test_ipv4_addresses() -> None: "255.255.255.255", ] for address in ip_addresses: - assert helpers.is_ipv4_address(address) - assert not helpers.is_ipv6_address(address) assert helpers.is_ip_address(address) @@ -302,8 +290,6 @@ def test_ipv6_addresses() -> None: "1::1", ] for address in ip_addresses: - assert not helpers.is_ipv4_address(address) - assert helpers.is_ipv6_address(address) assert helpers.is_ip_address(address) @@ -325,18 +311,6 @@ def test_is_ip_address_invalid_type() -> None: with pytest.raises(TypeError): helpers.is_ip_address(object()) - with pytest.raises(TypeError): - helpers.is_ipv4_address(123) # type: ignore[arg-type] - - with pytest.raises(TypeError): - helpers.is_ipv4_address(object()) # type: ignore[arg-type] - - with pytest.raises(TypeError): - helpers.is_ipv6_address(123) # type: ignore[arg-type] - - with pytest.raises(TypeError): - helpers.is_ipv6_address(object()) # type: ignore[arg-type] - # ----------------------------------- TimeoutHandle ------------------- From 5c5436a1092f3a55df74508eec7aea04c338a28a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 29 Sep 2024 14:46:02 -0500 Subject: [PATCH 0676/1511] [PR #9348/2628256 backport][3.11] Add new send_frame method to WebSockets (#9350) --- CHANGES/9348.feature.rst | 1 + aiohttp/client_ws.py | 12 +++++- aiohttp/http_websocket.py | 22 ++-------- aiohttp/web_ws.py | 14 ++++++- docs/client_reference.rst | 26 ++++++++++++ docs/web_reference.rst | 30 ++++++++++++- tests/test_client_ws.py | 18 ++++++-- tests/test_client_ws_functional.py | 24 ++++++++++- tests/test_web_websocket.py | 34 +++++++++++---- tests/test_web_websocket_functional.py | 2 +- tests/test_websocket_writer.py | 58 +++++++++++++------------- 11 files changed, 175 insertions(+), 66 deletions(-) create mode 100644 CHANGES/9348.feature.rst diff --git a/CHANGES/9348.feature.rst b/CHANGES/9348.feature.rst new file mode 100644 index 00000000000..66fa5c1a06e --- /dev/null +++ b/CHANGES/9348.feature.rst @@ -0,0 +1 @@ +Added :py:meth:`~aiohttp.ClientWebSocketResponse.send_frame` and :py:meth:`~aiohttp.web.WebSocketResponse.send_frame` for WebSockets -- by :user:`bdraco`. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 58409ed71e5..fb9fbee4a26 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -229,15 +229,23 @@ async def ping(self, message: bytes = b"") -> None: async def pong(self, message: bytes = b"") -> None: await self._writer.pong(message) + async def send_frame( + self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket.""" + await self._writer.send_frame(message, opcode, compress) + async def send_str(self, data: str, compress: Optional[int] = None) -> None: if not isinstance(data, str): raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send(data, binary=False, compress=compress) + await self._writer.send_frame( + data.encode("utf-8"), WSMsgType.TEXT, compress=compress + ) async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send(data, binary=True, compress=compress) + await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress) async def send_json( self, diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index c6521695d94..62628e66d78 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -620,7 +620,7 @@ def __init__( self._output_size = 0 self._compressobj: Any = None # actually compressobj - async def _send_frame( + async def send_frame( self, message: bytes, opcode: int, compress: Optional[int] = None ) -> None: """Send a frame over the websocket with message as its payload.""" @@ -727,34 +727,20 @@ async def pong(self, message: Union[bytes, str] = b"") -> None: """Send pong message.""" if isinstance(message, str): message = message.encode("utf-8") - await self._send_frame(message, WSMsgType.PONG) + await self.send_frame(message, WSMsgType.PONG) async def ping(self, message: Union[bytes, str] = b"") -> None: """Send ping message.""" if isinstance(message, str): message = message.encode("utf-8") - await self._send_frame(message, WSMsgType.PING) - - async def send( - self, - message: Union[str, bytes], - binary: bool = False, - compress: Optional[int] = None, - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if isinstance(message, str): - message = message.encode("utf-8") - if binary: - await self._send_frame(message, WSMsgType.BINARY, compress) - else: - await self._send_frame(message, WSMsgType.TEXT, compress) + await self.send_frame(message, WSMsgType.PING) async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): message = message.encode("utf-8") try: - await self._send_frame( + await self.send_frame( PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE ) finally: diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index bf35f3bb1f6..787c5cb1d39 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -379,19 +379,29 @@ async def pong(self, message: bytes = b"") -> None: raise RuntimeError("Call .prepare() first") await self._writer.pong(message) + async def send_frame( + self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket.""" + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.send_frame(message, opcode, compress) + async def send_str(self, data: str, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, str): raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send(data, binary=False, compress=compress) + await self._writer.send_frame( + data.encode("utf-8"), WSMsgType.TEXT, compress=compress + ) async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send(data, binary=True, compress=compress) + await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress) async def send_json( self, diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 7f88fda14c9..05325045ee2 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1610,6 +1610,32 @@ manually. The method is converted into :term:`coroutine`, *compress* parameter added. + .. method:: send_frame(message, opcode, compress=None) + :async: + + Send a :const:`~aiohttp.WSMsgType` message *message* to peer. + + This method is low-level and should be used with caution as it + only accepts bytes which must conform to the correct message type + for *message*. + + It is recommended to use the :meth:`send_str`, :meth:`send_bytes` + or :meth:`send_json` methods instead of this method. + + The primary use case for this method is to send bytes that are + have already been encoded without having to decode and + re-encode them. + + :param bytes message: message to send. + + :param ~aiohttp.WSMsgType opcode: opcode of the message. + + :param int compress: sets specific level of compression for + single message, + ``None`` for not overriding per-socket setting. + + .. versionadded:: 3.11 + .. method:: close(*, code=WSCloseCode.OK, message=b'') :async: diff --git a/docs/web_reference.rst b/docs/web_reference.rst index faf68cb9c43..784cfa6e717 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -966,8 +966,8 @@ and :ref:`aiohttp-web-signals` handlers:: To enable back-pressure from slow websocket clients treat methods :meth:`ping`, :meth:`pong`, :meth:`send_str`, - :meth:`send_bytes`, :meth:`send_json` as coroutines. By - default write buffer size is set to 64k. + :meth:`send_bytes`, :meth:`send_json`, :meth:`send_frame` as coroutines. + By default write buffer size is set to 64k. :param bool autoping: Automatically send :const:`~aiohttp.WSMsgType.PONG` on @@ -1181,6 +1181,32 @@ and :ref:`aiohttp-web-signals` handlers:: The method is converted into :term:`coroutine`, *compress* parameter added. + .. method:: send_frame(message, opcode, compress=None) + :async: + + Send a :const:`~aiohttp.WSMsgType` message *message* to peer. + + This method is low-level and should be used with caution as it + only accepts bytes which must conform to the correct message type + for *message*. + + It is recommended to use the :meth:`send_str`, :meth:`send_bytes` + or :meth:`send_json` methods instead of this method. + + The primary use case for this method is to send bytes that are + have already been encoded without having to decode and + re-encode them. + + :param bytes message: message to send. + + :param ~aiohttp.WSMsgType opcode: opcode of the message. + + :param int compress: sets specific level of compression for + single message, + ``None`` for not overriding per-socket setting. + + .. versionadded:: 3.11 + .. method:: close(*, code=WSCloseCode.OK, message=b'', drain=True) :async: diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index afe7983648f..eedf65f86a1 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -537,6 +537,7 @@ async def test_send_data_after_close( (resp.send_str, ("s",)), (resp.send_bytes, (b"b",)), (resp.send_json, ({},)), + (resp.send_frame, (b"", aiohttp.WSMsgType.BINARY)), ): with pytest.raises(exc): # Verify exc can be caught with both classes await meth(*args) @@ -725,19 +726,28 @@ async def test_ws_connect_deflate_per_message(loop, ws_key, key_data) -> None: m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) writer = WebSocketWriter.return_value = mock.Mock() - send = writer.send = make_mocked_coro() + send_frame = writer.send_frame = make_mocked_coro() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect("http://test.org") await resp.send_str("string", compress=-1) - send.assert_called_with("string", binary=False, compress=-1) + send_frame.assert_called_with( + b"string", aiohttp.WSMsgType.TEXT, compress=-1 + ) await resp.send_bytes(b"bytes", compress=15) - send.assert_called_with(b"bytes", binary=True, compress=15) + send_frame.assert_called_with( + b"bytes", aiohttp.WSMsgType.BINARY, compress=15 + ) await resp.send_json([{}], compress=-9) - send.assert_called_with("[{}]", binary=False, compress=-9) + send_frame.assert_called_with( + b"[{}]", aiohttp.WSMsgType.TEXT, compress=-9 + ) + + await resp.send_frame(b"[{}]", aiohttp.WSMsgType.TEXT, compress=-9) + send_frame.assert_called_with(b"[{}]", aiohttp.WSMsgType.TEXT, -9) await session.close() diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 0a8008f07ca..0c4a081eafa 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -131,7 +131,29 @@ async def handler(request): await resp.close() -async def test_ping_pong(aiohttp_client) -> None: +async def test_send_recv_frame(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + + data = await ws.receive() + await ws.send_frame(data.data, data.type) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + await resp.send_frame(b"test", WSMsgType.BINARY) + + data = await resp.receive() + assert data.data == b"test" + assert data.type is WSMsgType.BINARY + await resp.close() + + +async def test_ping_pong(aiohttp_client: AiohttpClient) -> None: loop = asyncio.get_event_loop() closed = loop.create_future() diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index d9eeda3d1d2..e45c96bf70f 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -67,6 +67,12 @@ async def test_nonstarted_pong() -> None: await ws.pong() +async def test_nonstarted_send_frame() -> None: + ws = WebSocketResponse() + with pytest.raises(RuntimeError): + await ws.send_frame(b"string", WSMsgType.TEXT) + + async def test_nonstarted_send_str() -> None: ws = WebSocketResponse() with pytest.raises(RuntimeError): @@ -277,6 +283,18 @@ async def test_send_json_closed(make_request) -> None: await ws.send_json({"type": "json"}) +async def test_send_frame_closed(make_request) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + assert ws._reader is not None + ws._reader.feed_data(WS_CLOSED_MESSAGE) + await ws.close() + + with pytest.raises(ConnectionError): + await ws.send_frame(b'{"type": "json"}', WSMsgType.TEXT) + + async def test_ping_closed(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() @@ -536,16 +554,18 @@ async def test_send_with_per_message_deflate(make_request, mocker) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) - writer_send = ws._writer.send = make_mocked_coro() + with mock.patch.object(ws._writer, "send_frame", autospec=True, spec_set=True) as m: + await ws.send_str("string", compress=15) + m.assert_called_with(b"string", WSMsgType.TEXT, compress=15) - await ws.send_str("string", compress=15) - writer_send.assert_called_with("string", binary=False, compress=15) + await ws.send_bytes(b"bytes", compress=0) + m.assert_called_with(b"bytes", WSMsgType.BINARY, compress=0) - await ws.send_bytes(b"bytes", compress=0) - writer_send.assert_called_with(b"bytes", binary=True, compress=0) + await ws.send_json("[{}]", compress=9) + m.assert_called_with(b'"[{}]"', WSMsgType.TEXT, compress=9) - await ws.send_json("[{}]", compress=9) - writer_send.assert_called_with('"[{}]"', binary=False, compress=9) + await ws.send_frame(b"[{}]", WSMsgType.TEXT, compress=9) + m.assert_called_with(b"[{}]", WSMsgType.TEXT, compress=9) async def test_no_transfer_encoding_header(make_request, mocker) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 5770eee87dc..607ab6d7de3 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -376,7 +376,7 @@ async def handler(request): ws: web.WebSocketResponse = await client.ws_connect("/", protocols=("eggs", "bar")) - await ws._writer._send_frame(b"", WSMsgType.CLOSE) + await ws._writer.send_frame(b"", WSMsgType.CLOSE) msg = await ws.receive() assert msg.type == WSMsgType.CLOSE diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index 8dbbc815fb7..e1766b72b12 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -5,7 +5,7 @@ import pytest -from aiohttp import DataQueue, WSMessage +from aiohttp import DataQueue, WSMessage, WSMsgType from aiohttp.http import WebSocketReader, WebSocketWriter from aiohttp.test_utils import make_mocked_coro @@ -39,23 +39,23 @@ async def test_ping(writer) -> None: writer.transport.write.assert_called_with(b"\x89\x00") -async def test_send_text(writer) -> None: - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\x81\x04text") +async def test_send_text(writer: WebSocketWriter) -> None: + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\x81\x04text") # type: ignore[attr-defined] -async def test_send_binary(writer) -> None: - await writer.send("binary", True) - writer.transport.write.assert_called_with(b"\x82\x06binary") +async def test_send_binary(writer: WebSocketWriter) -> None: + await writer.send_frame(b"binary", WSMsgType.BINARY) + writer.transport.write.assert_called_with(b"\x82\x06binary") # type: ignore[attr-defined] -async def test_send_binary_long(writer) -> None: - await writer.send(b"b" * 127, True) - assert writer.transport.write.call_args[0][0].startswith(b"\x82~\x00\x7fb") +async def test_send_binary_long(writer: WebSocketWriter) -> None: + await writer.send_frame(b"b" * 127, WSMsgType.BINARY) + assert writer.transport.write.call_args[0][0].startswith(b"\x82~\x00\x7fb") # type: ignore[attr-defined] -async def test_send_binary_very_long(writer) -> None: - await writer.send(b"b" * 65537, True) +async def test_send_binary_very_long(writer: WebSocketWriter) -> None: + await writer.send_frame(b"b" * 65537, WSMsgType.BINARY) assert ( writer.transport.write.call_args_list[0][0][0] == b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x01" @@ -79,34 +79,34 @@ async def test_send_text_masked(protocol, transport) -> None: writer = WebSocketWriter( protocol, transport, use_mask=True, random=random.Random(123) ) - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\x81\x84\rg\xb3fy\x02\xcb\x12") + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\x81\x84\rg\xb3fy\x02\xcb\x12") # type: ignore[attr-defined] async def test_send_compress_text(protocol, transport) -> None: writer = WebSocketWriter(protocol, transport, compress=15) - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\xc1\x05*\x01b\x00\x00") + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\xc1\x05*\x01b\x00\x00") # type: ignore[attr-defined] async def test_send_compress_text_notakeover(protocol, transport) -> None: writer = WebSocketWriter(protocol, transport, compress=15, notakeover=True) - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] async def test_send_compress_text_per_message(protocol, transport) -> None: writer = WebSocketWriter(protocol, transport) - await writer.send(b"text", compress=15) - writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") - await writer.send(b"text") - writer.transport.write.assert_called_with(b"\x81\x04text") - await writer.send(b"text", compress=15) - writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") + await writer.send_frame(b"text", WSMsgType.TEXT, compress=15) + writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] + await writer.send_frame(b"text", WSMsgType.TEXT) + writer.transport.write.assert_called_with(b"\x81\x04text") # type: ignore[attr-defined] + await writer.send_frame(b"text", WSMsgType.TEXT, compress=15) + writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] @pytest.mark.parametrize( @@ -152,7 +152,7 @@ async def test_concurrent_messages( point = payload_point_generator(count) payload = bytes((point,)) * point payloads.append(payload) - writers.append(writer.send(payload, binary=True)) + writers.append(writer.send_frame(payload, WSMsgType.BINARY)) await asyncio.gather(*writers) for call in writer.transport.write.call_args_list: From 69461fe5e64407f13dbce0d7c04895e9c5264e94 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:00:12 +0000 Subject: [PATCH 0677/1511] Bump pyproject-hooks from 1.1.0 to 1.2.0 (#9352) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pyproject-hooks](https://github.com/pypa/pyproject-hooks) from 1.1.0 to 1.2.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pyproject-hooks/blob/main/docs/changelog.rst">pyproject-hooks's changelog</a>.</em></p> <blockquote> <h1>Changelog</h1> <h2>v1.2</h2> <ul> <li>Improve interoperability with <code>importlib.metadata</code>, fixing a regression in setuptools compatibility in 1.1 (<a href="https://redirect.github.com/pypa/pyproject-hooks/issues/199">#199</a>).</li> <li>Clean up the <code>_in_process</code> directory inside the package from <code>sys.path</code> before imporing the backend (<a href="https://redirect.github.com/pypa/pyproject-hooks/issues/193">#193</a>).</li> </ul> <h2>v1.1</h2> <ul> <li>Add type annotations to the public API.</li> <li>More careful handling of the <code>backend-path</code> key from <code>pyproject.toml</code>. Previous versions would load the backend and then check that it was loaded from the specified path; the new version only loads it from the specified path. The <code>BackendInvalid</code> exception is now a synonym for :exc:<code>BackendUnavailable</code>, and code should move to using the latter name.</li> </ul> <h2>v1.0</h2> <ul> <li>Rename package to <code>pyproject_hooks</code> (from <code>pep517</code>).</li> <li>Remove deprecated modules (<code>.build</code>, <code>.check</code> and <code>.envbuild</code>). Use the <code>build <https://pypa-build.readthedocs.io/en/stable/></code>_ project instead for this higher-level functionality of setting up a temporary build environment.</li> <li>Require Python 3.7 or above.</li> <li>Use <code>tomllib</code> from the standard library on Python 3.11. <code>pyproject_hooks</code> now has no external dependencies when installed in Python 3.11.</li> <li>Avoid chaining exceptions when using the fallback implementation for :meth:<code>.prepare_metadata_for_build_wheel</code>.</li> <li>Fix propagating error message for :exc:<code>.BackendInvalid</code> errors.</li> </ul> <h2>v0.13</h2> <ul> <li>Remove support for end-of-life Pythons. Now requires Python3.6+.</li> <li>Remove support for <code>toml</code> package. Now requires <code>tomli</code>.</li> <li>Rely on preferred "files" API on Python 3.9 and later (<a href="https://redirect.github.com/pypa/pyproject-hooks/issues/140">#140</a>).</li> </ul> <h2>v0.12</h2> <ul> <li>Add method for pip to check if build_editable hook is supported. This is a private API for now.</li> </ul> <p>v0.11.1</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pyproject-hooks/commit/4b7c6d113fb89b755d762a88712c8a6873cddd47"><code>4b7c6d1</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/205">#205</a> from pypa/prepare-1.2</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/102747081953a43bcbb9304e6319de3bf2166bfc"><code>1027470</code></a> Bump version: 1.1.0 → 1.2.0</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/dabe52eac18ef677af262657c97a7689b20fa909"><code>dabe52e</code></a> Prepare release notes for 1.2</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/3df91751cb87c01b1fd2584a37ca3c186f126a48"><code>3df9175</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/204">#204</a> from pypa/fix/setuptools-71</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/9efaa97258b63108750539e440789a5cada90fd8"><code>9efaa97</code></a> Relax test for build requirements from setuptools</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/6b57973c3a8ef919dffecac8d15fd608fc51a271"><code>6b57973</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/199">#199</a> from abravalheri/issue-192-take2</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/9988edd23921cdf74bed88f7ae1397b1c440f385"><code>9988edd</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/196">#196</a> from pypa/sdist-includes</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/dba181736a41503f81ed73434143db7537494c76"><code>dba1817</code></a> Always use importlib.metadata</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/d68f4d94477f0c20f900b1520ef1a1193e9d4f9d"><code>d68f4d9</code></a> Remove unused dev-dependency</li> <li><a href="https://github.com/pypa/pyproject-hooks/commit/455b77f9a89f3bfded5925ccc15e17d20c9b99cd"><code>455b77f</code></a> Account for importlib_metadata in Python<3.7</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pyproject-hooks/compare/v1.1.0...v1.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pyproject-hooks&package-manager=pip&previous-version=1.1.0&new-version=1.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b1991f84a6b..13245e16e0b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyjwt==2.9.0 # via # gidgethub # pyjwt -pyproject-hooks==1.1.0 +pyproject-hooks==1.2.0 # via # build # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 7ef54a516d8..746b1b498bc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -161,7 +161,7 @@ pyjwt==2.8.0 # via # gidgethub # pyjwt -pyproject-hooks==1.1.0 +pyproject-hooks==1.2.0 # via # build # pip-tools From bb90ffa051927c3ba59ea730024496161e7ad040 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:20:08 +0000 Subject: [PATCH 0678/1511] Bump yarl from 1.13.0 to 1.13.1 (#9355) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.13.0 to 1.13.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.13.1</h2> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of calling :py:meth:<code>~yarl.URL.build</code> with <code>authority</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1163">#1163</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.13.1</h1> <p><em>(2024-09-27)</em></p> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of calling :py:meth:<code>~yarl.URL.build</code> with <code>authority</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1163</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/4a773911993bacf08ae6aa3137429f0004355773"><code>4a77391</code></a> Release 1.13.1</li> <li><a href="https://github.com/aio-libs/yarl/commit/07b2e84dff919060fe74c154a7ae4206fe01e814"><code>07b2e84</code></a> Improve performance of building URLs with authority (<a href="https://redirect.github.com/aio-libs/yarl/issues/1163">#1163</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/38ef0f95d25c240e17963971a41ed85b0b0c4e31"><code>38ef0f9</code></a> Increment version to 1.13.1.dev0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1162">#1162</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/yarl/compare/v1.13.0...v1.13.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.13.0&new-version=1.13.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 5e33e096224..f25226e637c 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,5 +40,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.13.0 +yarl==1.13.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 13245e16e0b..36715f43c8b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -287,7 +287,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.13.0 +yarl==1.13.1 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 746b1b498bc..441b03af509 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ webcolors==24.8.0 # via blockdiag wheel==0.44.0 # via pip-tools -yarl==1.13.0 +yarl==1.13.1 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 62e5eacb4fc..d28e3292fa1 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -34,5 +34,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.13.0 +yarl==1.13.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 11ce369747b..d8df504b9c9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -137,5 +137,5 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.13.0 +yarl==1.13.1 # via -r requirements/runtime-deps.in From f5b2b0783fce72edaafe8fff4ba7a50d59c67fe3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:27:15 +0000 Subject: [PATCH 0679/1511] Bump virtualenv from 20.26.5 to 20.26.6 (#9356) Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.26.5 to 20.26.6. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.26.6 (2024-09-27)</h2> <p>Bugfixes - 20.26.6</p> <pre><code>- Properly quote string placeholders in activation script templates to mitigate potential command injection - by :user:`y5c4l3`. (:issue:`2768`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/ec04726d065372ffad9920998aef1ce41252a61d"><code>ec04726</code></a> release 20.26.6</li> <li><a href="https://github.com/pypa/virtualenv/commit/86dddeda7c991f8529e1995bbff280fb7b761972"><code>86ddded</code></a> Fix <a href="https://redirect.github.com/pypa/virtualenv/issues/2768">#2768</a>: Quote template strings in activation scripts (<a href="https://redirect.github.com/pypa/virtualenv/issues/2771">#2771</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/6bb3f6226c18d69bb6cfa3475b6d46dd463bb530"><code>6bb3f62</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2769">#2769</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/220d49c2e3ade2ed24f5712ab5a23895cde2e04c"><code>220d49c</code></a> Bump pypa/gh-action-pypi-publish from 1.10.1 to 1.10.2 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2767">#2767</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/cf340c83c2828a92def78c77b3e037a2baa4d557"><code>cf340c8</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2766">#2766</a> from pypa/release-20.26.5</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.26.5...20.26.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.26.5&new-version=20.26.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 36715f43c8b..83dd056b3b6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -279,7 +279,7 @@ uvloop==0.21.0b1 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.26.5 +virtualenv==20.26.6 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 441b03af509..e2a81d30973 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -271,7 +271,7 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.26.5 +virtualenv==20.26.6 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 299b2b09f4a..663feea390c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,5 +119,5 @@ urllib3==2.2.3 # via requests uvloop==0.21.0b1 ; platform_system != "Windows" # via -r requirements/lint.in -virtualenv==20.26.5 +virtualenv==20.26.6 # via pre-commit From b4a8083b14dd15ca84a155d0fe7a602d045edb28 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 23:32:09 +0000 Subject: [PATCH 0680/1511] [PR #9358/a50ca6ce backport][3.10] Remove unnecessary generator expression to generate the connection key (#9361) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index f732b70c20a..d51964a9c4d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -374,7 +374,7 @@ def ssl(self) -> Union["SSLContext", bool, Fingerprint]: def connection_key(self) -> ConnectionKey: proxy_headers = self.proxy_headers if proxy_headers: - h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + h: Optional[int] = hash(tuple(proxy_headers.items())) else: h = None return ConnectionKey( From 354153e9b706f38f7f673d7c49a71a775f36d063 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 23:38:51 +0000 Subject: [PATCH 0681/1511] [PR #9358/a50ca6ce backport][3.11] Remove unnecessary generator expression to generate the connection key (#9362) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 7de45b67007..293c745b349 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -368,7 +368,7 @@ def ssl(self) -> Union["SSLContext", bool, Fingerprint]: def connection_key(self) -> ConnectionKey: proxy_headers = self.proxy_headers if proxy_headers: - h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + h: Optional[int] = hash(tuple(proxy_headers.items())) else: h = None return ConnectionKey( From 22a12cc2e2ef289d9e96fd87dfc17272177e52ac Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 1 Oct 2024 01:13:35 +0100 Subject: [PATCH 0682/1511] =?UTF-8?q?Fix=20issue=206652:=20Raise=20`aiohtt?= =?UTF-8?q?p.ServerFingerprintMismatch`=20exception=20o=E2=80=A6=20(#9363)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …… (#6653) (cherry picked from commit e3b1011f2146ad0faa4c3d3c29f26b73e1400564) Co-authored-by: Gang Ji <62988402+gangj@users.noreply.github.com> --- CHANGES/6652.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 10 ++++ tests/test_proxy.py | 106 ++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 116 insertions(+), 2 deletions(-) create mode 100644 CHANGES/6652.bugfix diff --git a/CHANGES/6652.bugfix b/CHANGES/6652.bugfix new file mode 100644 index 00000000000..4ce1f678792 --- /dev/null +++ b/CHANGES/6652.bugfix @@ -0,0 +1 @@ +Raise `aiohttp.ServerFingerprintMismatch` exception on client-side if request through http proxy with mismatching server fingerprint digest: `aiohttp.ClientSession(headers=headers, connector=TCPConnector(ssl=aiohttp.Fingerprint(mismatch_digest), trust_env=True).request(...)`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 92e1666fbc6..96403c2aec4 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -127,6 +127,7 @@ Franek Magiera Frederik Gladhorn Frederik Peter Aalund Gabriel Tremblay +Gang Ji Gary Wilson Jr. Gennady Andreyev Georges Dubus diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 6bc3ee54cdf..13c1a0cdc48 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1217,6 +1217,16 @@ async def _start_tls_connection( # chance to do this: underlying_transport.close() raise + if isinstance(tls_transport, asyncio.Transport): + fingerprint = self._get_fingerprint(req) + if fingerprint: + try: + fingerprint.check(tls_transport) + except ServerFingerprintMismatch: + tls_transport.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(tls_transport) + raise except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc except ssl_errors as exc: diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 4fa5e932098..c98ae7c2653 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -11,7 +11,7 @@ from yarl import URL import aiohttp -from aiohttp.client_reqrep import ClientRequest, ClientResponse +from aiohttp.client_reqrep import ClientRequest, ClientResponse, Fingerprint from aiohttp.connector import _SSL_CONTEXT_VERIFIED from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -384,7 +384,109 @@ async def make_conn(): autospec=True, spec_set=True, ) - def test_https_connect(self, start_connection: Any, ClientRequestMock: Any) -> None: + def test_https_connect_fingerprint_mismatch( + self, start_connection: mock.Mock, ClientRequestMock: mock.Mock + ) -> None: + async def make_conn() -> aiohttp.TCPConnector: + return aiohttp.TCPConnector(enable_cleanup_closed=cleanup) + + for cleanup in (True, False): + with self.subTest(cleanup=cleanup): + proxy_req = ClientRequest( + "GET", URL("http://proxy.example.com"), loop=self.loop + ) + ClientRequestMock.return_value = proxy_req + + class TransportMock(asyncio.Transport): + def close(self) -> None: + pass + + proxy_resp = ClientResponse( + "get", + URL("http://proxy.example.com"), + request_info=mock.Mock(), + writer=mock.Mock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=self.loop, + session=mock.Mock(), + ) + fingerprint_mock = mock.Mock(spec=Fingerprint, auto_spec=True) + fingerprint_mock.check.side_effect = aiohttp.ServerFingerprintMismatch( + b"exp", b"got", "example.com", 8080 + ) + with mock.patch.object( + proxy_req, + "send", + autospec=True, + spec_set=True, + return_value=proxy_resp, + ), mock.patch.object( + proxy_resp, + "start", + autospec=True, + spec_set=True, + return_value=mock.Mock(status=200), + ): + connector = self.loop.run_until_complete(make_conn()) + host = [ + { + "hostname": "hostname", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": 0, + } + ] + with mock.patch.object( + connector, + "_resolve_host", + autospec=True, + spec_set=True, + return_value=host, + ), mock.patch.object( + connector, + "_get_fingerprint", + autospec=True, + spec_set=True, + return_value=fingerprint_mock, + ), mock.patch.object( # Called on connection to http://proxy.example.com + self.loop, + "create_connection", + autospec=True, + spec_set=True, + return_value=(mock.Mock(), mock.Mock()), + ), mock.patch.object( # Called on connection to https://www.python.org + self.loop, + "start_tls", + autospec=True, + spec_set=True, + return_value=TransportMock(), + ): + req = ClientRequest( + "GET", + URL("https://www.python.org"), + proxy=URL("http://proxy.example.com"), + loop=self.loop, + ) + with self.assertRaises(aiohttp.ServerFingerprintMismatch): + self.loop.run_until_complete( + connector._create_connection( + req, [], aiohttp.ClientTimeout() + ) + ) + + @mock.patch("aiohttp.connector.ClientRequest") + @mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + autospec=True, + spec_set=True, + ) + def test_https_connect( + self, start_connection: mock.Mock, ClientRequestMock: mock.Mock + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) From dc080fd95c2b1216e11cf874b7ae20c014b74ac9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 13:26:11 +0000 Subject: [PATCH 0683/1511] Bump aiohappyeyeballs from 2.4.2 to 2.4.3 (#9370) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.4.2 to 2.4.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.4.3 (2024-09-30)</h1> <h2>Fix</h2> <ul> <li> <p>fix: rewrite staggered_race to be race safe (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/101">#101</a>)</p> </li> <li> <p>fix: re-raise RuntimeError when uvloop raises RuntimeError during connect (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/105">#105</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c8f1fa93d698f216f84de7074a6282777fbf0439"><code>c8f1fa9</code></a>)</p> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.4.3 (2024-09-30)</h2> <h3>Fix</h3> <ul> <li>Rewrite staggered_race to be race safe (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/101">#101</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/9db617a982ee27994bf13c805f9c4f054f05de47"><code>9db617a</code></a>)</li> <li>Re-raise runtimeerror when uvloop raises runtimeerror during connect (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/105">#105</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c8f1fa93d698f216f84de7074a6282777fbf0439"><code>c8f1fa9</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/e3519bbebf2069eee0aff0dfde50689c742ba97f"><code>e3519bb</code></a> 2.4.3</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/9db617a982ee27994bf13c805f9c4f054f05de47"><code>9db617a</code></a> fix: rewrite staggered_race to be race safe (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/101">#101</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c8f1fa93d698f216f84de7074a6282777fbf0439"><code>c8f1fa9</code></a> fix: re-raise RuntimeError when uvloop raises RuntimeError during connect (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/105">#105</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b075f25b0766c4f2051dfd7eba3d9158ae3307fd"><code>b075f25</code></a> chore: switch license classifier to PSF-2.0 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/104">#104</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f134b0079017fc2bf8531989d9874c082528b0db"><code>f134b00</code></a> chore: use ruff for formatting instead of black (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/103">#103</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b16a1f3fc8c50547348de5840e67ebe7fc337274"><code>b16a1f3</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/102">#102</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/ec948d8e5d240cabe3f9ed2ea92722f4dc2a0759"><code>ec948d8</code></a> chore: add coverage for cancellation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/98">#98</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.2...v2.4.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.4.2&new-version=2.4.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index f25226e637c..eeca7016a96 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.2 +aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 83dd056b3b6..ab86471e38c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.2 +aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index e2a81d30973..8e9c0f98d43 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.2 +aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index d28e3292fa1..4656f681d2f 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.2 +aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d8df504b9c9..b2f69e70728 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.2 +aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 70190541ae7ff9d9d495fb68edcbdf1433fe9b10 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 11:20:57 -0500 Subject: [PATCH 0684/1511] [PR #9371/c25099c backport][3.10] Reduce indent in connector tests (#9373) --- tests/test_connector.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/tests/test_connector.py b/tests/test_connector.py index 9f9dbe66c28..2e43573db4e 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -733,9 +733,15 @@ def get_extra_info(param): assert False - conn._loop.create_connection = create_connection - - with mock.patch( + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -885,9 +891,15 @@ async def create_connection(*args, **kwargs): pr = create_mocked_conn(loop) return tr, pr - conn._loop.create_connection = create_connection - - with mock.patch( + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection ): established_connection = await conn.connect(req, [], ClientTimeout()) From 224345ee8d5b5dc48826d56b1329b024d48741a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 11:28:44 -0500 Subject: [PATCH 0685/1511] [PR #9371/c25099c backport][3.11] Reduce indent in connector tests (#9374) --- tests/test_connector.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/tests/test_connector.py b/tests/test_connector.py index bbe77f2a705..f28545b08e9 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -728,9 +728,15 @@ def get_extra_info(param): assert False - conn._loop.create_connection = create_connection - - with mock.patch( + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -880,9 +886,15 @@ async def create_connection(*args, **kwargs): pr = create_mocked_conn(loop) return tr, pr - conn._loop.create_connection = create_connection - - with mock.patch( + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection ): established_connection = await conn.connect(req, [], ClientTimeout()) From 7a89f20bb29e731378b8fa501d9278307c142585 Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Wed, 2 Oct 2024 02:54:50 +0800 Subject: [PATCH 0686/1511] [PR #9359/a9ac5e2 backport][3.11] replace blockdiag with graphviz in docs (#9375) --- .readthedocs.yml | 2 + CHANGES/9359.contrib.rst | 2 + docs/conf.py | 2 +- docs/tracing_reference.rst | 140 +++++++++++++++++++++++--------- requirements/broken-projects.in | 1 - requirements/constraints.txt | 18 +--- requirements/dev.txt | 18 +--- requirements/doc-spelling.txt | 23 +----- requirements/doc.in | 3 - requirements/doc.txt | 17 +--- requirements/test.in | 1 - 11 files changed, 112 insertions(+), 115 deletions(-) create mode 100644 CHANGES/9359.contrib.rst delete mode 100644 requirements/broken-projects.in diff --git a/.readthedocs.yml b/.readthedocs.yml index 1b66ee7c0e4..b3edaf4b8ea 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -14,6 +14,8 @@ build: os: ubuntu-24.04 tools: python: "3.11" + apt_packages: + - graphviz jobs: post_create_environment: diff --git a/CHANGES/9359.contrib.rst b/CHANGES/9359.contrib.rst new file mode 100644 index 00000000000..cff763e8b09 --- /dev/null +++ b/CHANGES/9359.contrib.rst @@ -0,0 +1,2 @@ +Changed diagram images generator from ``blockdiag`` to ``GraphViz``. +Generating documentation now requires the GraphViz executable to be included in $PATH or sphinx build configuration. diff --git a/docs/conf.py b/docs/conf.py index 23ac3e426ec..5cbf398e6a9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,10 +55,10 @@ extensions = [ # stdlib-party extensions: "sphinx.ext.extlinks", + "sphinx.ext.graphviz", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", # Third-party extensions: - "sphinxcontrib.blockdiag", "sphinxcontrib.towncrier.ext", # provides `towncrier-draft-entries` directive ] diff --git a/docs/tracing_reference.rst b/docs/tracing_reference.rst index 91fc31c33b9..5c60eba9c5c 100644 --- a/docs/tracing_reference.rst +++ b/docs/tracing_reference.rst @@ -21,23 +21,20 @@ A request goes through the following stages and corresponding fallbacks. Overview ^^^^^^^^ -.. blockdiag:: - :desctable: +.. graphviz:: + digraph { - blockdiag { - orientation = portrait; + start[shape=point, xlabel="start", width="0.1"]; + redirect[shape=box]; + end[shape=point, xlabel="end ", width="0.1"]; + exception[shape=oval]; - start[shape=beginpoint, description="on_request_start"]; - redirect[description="on_request_redirect"]; - end[shape=endpoint, description="on_request_end"]; - exception[shape=flowchart.terminator, description="on_request_exception"]; - - acquire_connection[description="Connection acquiring"]; - headers_received; - headers_sent[description="on_request_headers_sent"]; - chunk_sent[description="on_request_chunk_sent"]; - chunk_received[description="on_response_chunk_received"]; + acquire_connection[shape=box]; + headers_received[shape=box]; + headers_sent[shape=box]; + chunk_sent[shape=box]; + chunk_received[shape=box]; start -> acquire_connection; acquire_connection -> headers_sent; @@ -57,28 +54,48 @@ Overview } +.. list-table:: + :header-rows: 1 + + * - Name + - Description + * - start + - on_request_start + * - redirect + - on_request_redirect + * - acquire_connection + - Connection acquiring + * - headers_received + - + * - exception + - on_request_exception + * - end + - on_request_end + * - headers_sent + - on_request_headers_sent + * - chunk_sent + - on_request_chunk_sent + * - chunk_received + - on_response_chunk_received Connection acquiring ^^^^^^^^^^^^^^^^^^^^ -.. blockdiag:: - :desctable: - - blockdiag { - orientation = portrait; +.. graphviz:: - begin[shape=beginpoint]; - end[shape=endpoint]; - exception[shape=flowchart.terminator, description="Exception raised"]; + digraph { - queued_start[description="on_connection_queued_start"]; - queued_end[description="on_connection_queued_end"]; - create_start[description="on_connection_create_start"]; - create_end[description="on_connection_create_end"]; - reuseconn[description="on_connection_reuseconn"]; + begin[shape=point, xlabel="begin", width="0.1"]; + end[shape=point, xlabel="end ", width="0.1"]; + exception[shape=oval]; - resolve_dns[description="DNS resolving"]; - sock_connect[description="Connection establishment"]; + queued_start[shape=box]; + queued_end[shape=box]; + create_start[shape=box]; + create_end[shape=box]; + reuseconn[shape=box]; + resolve_dns[shape=box]; + sock_connect[shape=box]; begin -> reuseconn; begin -> create_start; @@ -95,23 +112,47 @@ Connection acquiring } +.. list-table:: + :header-rows: 1 + + * - Name + - Description + * - begin + - + * - end + - + * - queued_start + - on_connection_queued_start + * - create_start + - on_connection_create_start + * - reuseconn + - on_connection_reuseconn + * - queued_end + - on_connection_queued_end + * - create_end + - on_connection_create_end + * - exception + - Exception raised + * - resolve_dns + - DNS resolving + * - sock_connect + - Connection establishment + DNS resolving ^^^^^^^^^^^^^ -.. blockdiag:: - :desctable: +.. graphviz:: - blockdiag { - orientation = portrait; + digraph { - begin[shape=beginpoint]; - end[shape=endpoint]; - exception[shape=flowchart.terminator, description="Exception raised"]; + begin[shape=point, xlabel="begin", width="0.1"]; + end[shape=point, xlabel="end", width="0.1"]; + exception[shape=oval]; - resolve_start[description="on_dns_resolvehost_start"]; - resolve_end[description="on_dns_resolvehost_end"]; - cache_hit[description="on_dns_cache_hit"]; - cache_miss[description="on_dns_cache_miss"]; + resolve_start[shape=box]; + resolve_end[shape=box]; + cache_hit[shape=box]; + cache_miss[shape=box]; begin -> cache_hit -> end; begin -> cache_miss -> resolve_start; @@ -120,6 +161,25 @@ DNS resolving } +.. list-table:: + :header-rows: 1 + + * - Name + - Description + * - begin + - + * - end + - + * - exception + - Exception raised + * - resolve_end + - on_dns_resolvehost_end + * - resolve_start + - on_dns_resolvehost_start + * - cache_hit + - on_dns_cache_hit + * - cache_miss + - on_dns_cache_miss Classes ------- diff --git a/requirements/broken-projects.in b/requirements/broken-projects.in deleted file mode 100644 index 02d95f8b948..00000000000 --- a/requirements/broken-projects.in +++ /dev/null @@ -1 +0,0 @@ -Pillow < 10 # https://github.com/blockdiag/sphinxcontrib-blockdiag/issues/26 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ab86471e38c..eee37c04f4b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -28,8 +28,6 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.16.0 # via sphinx -blockdiag==3.0.0 - # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2 @@ -80,8 +78,6 @@ frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -funcparserlib==1.0.1 - # via blockdiag gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 @@ -134,10 +130,6 @@ packaging==24.1 # gunicorn # pytest # sphinx -pillow==9.5.0 - # via - # -c requirements/broken-projects.in - # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==4.3.6 @@ -163,9 +155,7 @@ pygments==2.18.0 # rich # sphinx pyjwt==2.9.0 - # via - # gidgethub - # pyjwt + # via gidgethub pyproject-hooks==1.2.0 # via # build @@ -216,13 +206,10 @@ snowballstemmer==2.2.0 sphinx==7.1.2 # via # -r requirements/doc.in - # sphinxcontrib-blockdiag # sphinxcontrib-spelling # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 # via sphinx -sphinxcontrib-blockdiag==3.0.0 - # via -r requirements/doc.in sphinxcontrib-devhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==2.0.1 @@ -283,8 +270,6 @@ virtualenv==20.26.6 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -webcolors==24.8.0 - # via blockdiag wheel==0.44.0 # via pip-tools yarl==1.13.1 @@ -299,6 +284,5 @@ pip==24.2 # via pip-tools setuptools==75.1.0 # via - # blockdiag # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 8e9c0f98d43..49c9142aaf6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -28,8 +28,6 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.16.0 # via sphinx -blockdiag==3.0.0 - # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2 @@ -78,8 +76,6 @@ frozenlist==1.4.1 # via # -r requirements/runtime-deps.in # aiosignal -funcparserlib==1.0.1 - # via blockdiag gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 @@ -131,10 +127,6 @@ packaging==24.1 # gunicorn # pytest # sphinx -pillow==9.5.0 - # via - # -c requirements/broken-projects.in - # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==4.3.6 @@ -158,9 +150,7 @@ pygments==2.18.0 # rich # sphinx pyjwt==2.8.0 - # via - # gidgethub - # pyjwt + # via gidgethub pyproject-hooks==1.2.0 # via # build @@ -211,12 +201,9 @@ snowballstemmer==2.2.0 sphinx==7.1.2 # via # -r requirements/doc.in - # sphinxcontrib-blockdiag # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 # via sphinx -sphinxcontrib-blockdiag==3.0.0 - # via -r requirements/doc.in sphinxcontrib-devhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==2.0.1 @@ -275,8 +262,6 @@ virtualenv==20.26.6 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -webcolors==24.8.0 - # via blockdiag wheel==0.44.0 # via pip-tools yarl==1.13.1 @@ -291,6 +276,5 @@ pip==24.2 # via pip-tools setuptools==75.1.0 # via - # blockdiag # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index fae36c2f105..030d6cadc5d 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.8 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: # -# pip-compile --allow-unsafe --output-file=requirements/doc-spelling.txt --resolver=backtracking --strip-extras requirements/doc-spelling.in +# pip-compile --allow-unsafe --output-file=requirements/doc-spelling.txt --strip-extras requirements/doc-spelling.in # aiohttp-theme==0.1.7 # via -r requirements/doc.in @@ -10,8 +10,6 @@ alabaster==0.7.13 # via sphinx babel==2.16.0 # via sphinx -blockdiag==3.0.0 - # via sphinxcontrib-blockdiag certifi==2024.8.30 # via requests charset-normalizer==3.3.2 @@ -20,8 +18,6 @@ click==8.1.7 # via towncrier docutils==0.20.1 # via sphinx -funcparserlib==1.0.1 - # via blockdiag idna==3.4 # via requests imagesize==1.4.1 @@ -40,10 +36,6 @@ markupsafe==2.1.5 # via jinja2 packaging==24.1 # via sphinx -pillow==9.5.0 - # via - # -c requirements/broken-projects.in - # blockdiag pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.18.0 @@ -57,13 +49,10 @@ snowballstemmer==2.2.0 sphinx==7.1.2 # via # -r requirements/doc.in - # sphinxcontrib-blockdiag # sphinxcontrib-spelling # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 # via sphinx -sphinxcontrib-blockdiag==3.0.0 - # via -r requirements/doc.in sphinxcontrib-devhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==2.0.1 @@ -88,8 +77,6 @@ towncrier==23.11.0 # sphinxcontrib-towncrier urllib3==2.2.3 # via requests -webcolors==24.8.0 - # via blockdiag zipp==3.20.2 # via # importlib-metadata @@ -97,6 +84,4 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: setuptools==75.1.0 - # via - # blockdiag - # incremental + # via incremental diff --git a/requirements/doc.in b/requirements/doc.in index 1e0b6568724..15017b083d3 100644 --- a/requirements/doc.in +++ b/requirements/doc.in @@ -1,7 +1,4 @@ --c broken-projects.in - aiohttp-theme sphinx -sphinxcontrib-blockdiag sphinxcontrib-towncrier towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index e696c59b1a6..50746e26f8f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -10,8 +10,6 @@ alabaster==0.7.13 # via sphinx babel==2.16.0 # via sphinx -blockdiag==3.0.0 - # via sphinxcontrib-blockdiag certifi==2024.8.30 # via requests charset-normalizer==3.3.2 @@ -20,8 +18,6 @@ click==8.1.7 # via towncrier docutils==0.20.1 # via sphinx -funcparserlib==1.0.1 - # via blockdiag idna==3.4 # via requests imagesize==1.4.1 @@ -40,10 +36,6 @@ markupsafe==2.1.5 # via jinja2 packaging==24.1 # via sphinx -pillow==9.5.0 - # via - # -c requirements/broken-projects.in - # blockdiag pygments==2.18.0 # via sphinx pytz==2024.2 @@ -55,12 +47,9 @@ snowballstemmer==2.2.0 sphinx==7.1.2 # via # -r requirements/doc.in - # sphinxcontrib-blockdiag # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 # via sphinx -sphinxcontrib-blockdiag==3.0.0 - # via -r requirements/doc.in sphinxcontrib-devhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==2.0.1 @@ -83,8 +72,6 @@ towncrier==23.11.0 # sphinxcontrib-towncrier urllib3==2.2.3 # via requests -webcolors==24.8.0 - # via blockdiag zipp==3.20.2 # via # importlib-metadata @@ -92,6 +79,4 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: setuptools==75.1.0 - # via - # blockdiag - # incremental + # via incremental diff --git a/requirements/test.in b/requirements/test.in index 686cd6dbf2e..a88b54f3532 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -1,4 +1,3 @@ --c broken-projects.in -r base.in coverage From c95998e2641009fab2426b7b7596b07f271435a4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:55:03 +0000 Subject: [PATCH 0687/1511] [PR #9368/02d8dba9 backport][3.11] Avoid using the proxy headers in the ConnectionKey if no proxy is in use (#9379) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9368.bugfix.rst | 3 +++ aiohttp/client_reqrep.py | 8 +++++++- tests/test_client_request.py | 27 +++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9368.bugfix.rst diff --git a/CHANGES/9368.bugfix.rst b/CHANGES/9368.bugfix.rst new file mode 100644 index 00000000000..fb2f90c1d4d --- /dev/null +++ b/CHANGES/9368.bugfix.rst @@ -0,0 +1,3 @@ +Fixed proxy headers being used in the ``ConnectionKey`` hash when proxy was being used -- by :user:`bdraco`. + +If default headers are used, they are also used for proxy headers. This could have led to creating connections that were not needed when one was already available. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 293c745b349..d1c83c151ce 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -594,10 +594,16 @@ def update_proxy( proxy_auth: Optional[BasicAuth], proxy_headers: Optional[LooseHeaders], ) -> None: + self.proxy = proxy + if proxy is None: + self.proxy_auth = None + self.proxy_headers = None + return + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy = proxy self.proxy_auth = proxy_auth + if proxy_headers is not None and not isinstance( proxy_headers, (MultiDict, MultiDictProxy) ): diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f2eff019504..d6e8b823bb6 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1460,3 +1460,30 @@ def test_basicauth_from_empty_netrc( """Test that no Authorization header is sent when netrc is empty""" req = make_request("get", "http://example.com", trust_env=True) assert hdrs.AUTHORIZATION not in req.headers + + +async def test_connection_key_with_proxy() -> None: + """Verify the proxy headers are included in the ConnectionKey when a proxy is used.""" + proxy = URL("http://proxy.example.com") + req = ClientRequest( + "GET", + URL("http://example.com"), + proxy=proxy, + proxy_headers={"X-Proxy": "true"}, + loop=asyncio.get_running_loop(), + ) + assert req.connection_key.proxy_headers_hash is not None + await req.close() + + +async def test_connection_key_without_proxy() -> None: + """Verify the proxy headers are not included in the ConnectionKey when a proxy is used.""" + # If proxy is unspecified, proxy_headers should be ignored + req = ClientRequest( + "GET", + URL("http://example.com"), + proxy_headers={"X-Proxy": "true"}, + loop=asyncio.get_running_loop(), + ) + assert req.connection_key.proxy_headers_hash is None + await req.close() From 0a74b540ddd5c2314347053f22bfea3f76af4cc5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:55:03 +0000 Subject: [PATCH 0688/1511] [PR #9367/b612127d backport][3.10] Speed up handling auth in urls (#9380) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 5 ++--- aiohttp/helpers.py | 13 ++++++++----- tests/test_helpers.py | 6 ++++++ 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d51964a9c4d..f3f91ffd5b8 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -409,9 +409,8 @@ def update_host(self, url: URL) -> None: raise InvalidURL(url) # basic auth info - username, password = url.user, url.password - if username or password: - self.auth = helpers.BasicAuth(username or "", password or "") + if url.raw_user or url.raw_password: + self.auth = helpers.BasicAuth(url.user or "", url.password or "") def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index ee2a91cec46..070b04f8d82 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -163,7 +163,9 @@ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth" """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") - if url.user is None and url.password is None: + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: return None return cls(url.user or "", url.password or "", encoding=encoding) @@ -174,11 +176,12 @@ def encode(self) -> str: def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - auth = BasicAuth.from_url(url) - if auth is None: + """Remove user and password from URL if present and return BasicAuth object.""" + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: return url, None - else: - return url.with_user(None), auth + return url.with_user(None), BasicAuth(url.user or "", url.password or "") def netrc_from_env() -> Optional[netrc.netrc]: diff --git a/tests/test_helpers.py b/tests/test_helpers.py index f79f9bebe09..6c752ce6d89 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -197,6 +197,12 @@ def test_basic_auth_no_user_from_url() -> None: assert auth.password == "pass" +def test_basic_auth_no_auth_from_url() -> None: + url = URL("http://example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is None + + def test_basic_auth_from_not_url() -> None: with pytest.raises(TypeError): helpers.BasicAuth.from_url("http://user:pass@example.com") From 9f8721aba72381008eecebdffbcc82bde009fe44 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:55:17 +0000 Subject: [PATCH 0689/1511] [PR #9367/b612127d backport][3.11] Speed up handling auth in urls (#9381) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 5 ++--- aiohttp/helpers.py | 13 ++++++++----- tests/test_helpers.py | 6 ++++++ 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d1c83c151ce..6a0f22d73c6 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -403,9 +403,8 @@ def update_host(self, url: URL) -> None: raise InvalidURL(url) # basic auth info - username, password = url.user, url.password - if username or password: - self.auth = helpers.BasicAuth(username or "", password or "") + if url.raw_user or url.raw_password: + self.auth = helpers.BasicAuth(url.user or "", url.password or "") def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 097f711a1da..13a531d5cab 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -163,7 +163,9 @@ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth" """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") - if url.user is None and url.password is None: + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: return None return cls(url.user or "", url.password or "", encoding=encoding) @@ -174,11 +176,12 @@ def encode(self) -> str: def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - auth = BasicAuth.from_url(url) - if auth is None: + """Remove user and password from URL if present and return BasicAuth object.""" + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: return url, None - else: - return url.with_user(None), auth + return url.with_user(None), BasicAuth(url.user or "", url.password or "") def netrc_from_env() -> Optional[netrc.netrc]: diff --git a/tests/test_helpers.py b/tests/test_helpers.py index a9e31d13249..6f45ceca0b9 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -197,6 +197,12 @@ def test_basic_auth_no_user_from_url() -> None: assert auth.password == "pass" +def test_basic_auth_no_auth_from_url() -> None: + url = URL("http://example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is None + + def test_basic_auth_from_not_url() -> None: with pytest.raises(TypeError): helpers.BasicAuth.from_url("http://user:pass@example.com") From 9c539c3f01212f68e4acb7217fbceba4f77b25c7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:55:58 +0000 Subject: [PATCH 0690/1511] [PR #9366/43deadb2 backport][3.11] Small speed up to update_headers (#9382) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 58 +++++++++++++++++++++++++++------------- 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 6a0f22d73c6..815318a07e5 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -425,25 +425,45 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers: CIMultiDict[str] = CIMultiDict() - # add host - netloc = self.url.host_subcomponent - assert netloc is not None - # See https://github.com/aio-libs/aiohttp/issues/3636. - netloc = netloc.rstrip(".") - if self.url.port is not None and not self.url.is_default_port(): - netloc += ":" + str(self.url.port) - self.headers[hdrs.HOST] = netloc - - if headers: - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() - - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key.lower() == "host": - self.headers[key] = value - else: - self.headers.add(key, value) + # Build the host header + host = self.url.host_subcomponent + + # host_subcomponent is None when the URL is a relative URL. + # but we know we do not have a relative URL here. + assert host is not None + + if host[-1] == ".": + # Remove all trailing dots from the netloc as while + # they are valid FQDNs in DNS, TLS validation fails. + # See https://github.com/aio-libs/aiohttp/issues/3636. + # To avoid string manipulation we only call rstrip if + # the last character is a dot. + host = host.rstrip(".") + + # If explicit port is not None, it means that the port was + # explicitly specified in the URL. In this case we check + # if its not the default port for the scheme and add it to + # the host header. We check explicit_port first because + # yarl caches explicit_port and its likely to already be + # in the cache and non-default port URLs are far less common. + explicit_port = self.url.explicit_port + if explicit_port is not None and not self.url.is_default_port(): + host = f"{host}:{explicit_port}" + + self.headers[hdrs.HOST] = host + + if not headers: + return + + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: if skip_auto_headers is not None: From ac48753f6423a2fe4d8c0d0fe73773c1a7af7e15 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 13:58:30 -0500 Subject: [PATCH 0691/1511] [PR #9372/0416d28 backport][3.11] Only prepare proxy headers for a request if a proxy is set (#9377) --- aiohttp/client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index c893b06bb11..6c129c3ad64 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -515,7 +515,6 @@ async def _request( # Merge with default headers and transform to CIMultiDict headers = self._prepare_headers(headers) - proxy_headers = self._prepare_headers(proxy_headers) try: url = self._build_url(str_or_url) @@ -531,7 +530,10 @@ async def _request( for i in skip_auto_headers: skip_headers.add(istr(i)) - if proxy is not None: + if proxy is None: + proxy_headers = None + else: + proxy_headers = self._prepare_headers(proxy_headers) try: proxy = URL(proxy) except ValueError as e: From 57ce46c12bf084d2e7c1df8eb39ac1b68dabbd64 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 14:10:29 -0500 Subject: [PATCH 0692/1511] [PR #9372/0416d28 backport][3.10] Only prepare proxy headers for a request if a proxy is set (#9376) --- aiohttp/client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index e50d216cf5a..5c83099258a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -510,7 +510,6 @@ async def _request( # Merge with default headers and transform to CIMultiDict headers = self._prepare_headers(headers) - proxy_headers = self._prepare_headers(proxy_headers) try: url = self._build_url(str_or_url) @@ -526,7 +525,10 @@ async def _request( for i in skip_auto_headers: skip_headers.add(istr(i)) - if proxy is not None: + if proxy is None: + proxy_headers = None + else: + proxy_headers = self._prepare_headers(proxy_headers) try: proxy = URL(proxy) except ValueError as e: From fbbe4ed98378c7ec5c6bd648b86e3edfe831c5fe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 14:51:52 -0500 Subject: [PATCH 0693/1511] [PR #9365/d684195b backport][3.11] Speed up the ConnectionKey (#9384) --- CHANGES/9365.breaking.rst | 1 + aiohttp/client_reqrep.py | 24 +++++++++++++++--------- aiohttp/connector.py | 5 ++--- 3 files changed, 18 insertions(+), 12 deletions(-) create mode 100644 CHANGES/9365.breaking.rst diff --git a/CHANGES/9365.breaking.rst b/CHANGES/9365.breaking.rst new file mode 100644 index 00000000000..f0224170f07 --- /dev/null +++ b/CHANGES/9365.breaking.rst @@ -0,0 +1 @@ +Changed ``ClientRequest.connection_key`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 815318a07e5..ed45ffbe271 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -18,6 +18,7 @@ Iterable, List, Mapping, + NamedTuple, Optional, Tuple, Type, @@ -208,8 +209,13 @@ def _merge_ssl_params( return ssl -@attr.s(auto_attribs=True, slots=True, frozen=True, cache_hash=True) -class ConnectionKey: +_SSL_SCHEMES = frozenset(("https", "wss")) + + +# ConnectionKey is a NamedTuple because it is used as a key in a dict +# and a set in the connector. Since a NamedTuple is a tuple it uses +# the fast native tuple __hash__ and __eq__ implementation in CPython. +class ConnectionKey(NamedTuple): # the key should contain an information about used proxy / TLS # to prevent reusing wrong connections from a pool host: str @@ -358,7 +364,7 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: writer.add_done_callback(self.__reset_writer) def is_ssl(self) -> bool: - return self.url.scheme in ("https", "wss") + return self.url.scheme in _SSL_SCHEMES @property def ssl(self) -> Union["SSLContext", bool, Fingerprint]: @@ -366,16 +372,16 @@ def ssl(self) -> Union["SSLContext", bool, Fingerprint]: @property def connection_key(self) -> ConnectionKey: - proxy_headers = self.proxy_headers - if proxy_headers: + if proxy_headers := self.proxy_headers: h: Optional[int] = hash(tuple(proxy_headers.items())) else: h = None + url = self.url return ConnectionKey( - self.host, - self.port, - self.is_ssl(), - self.ssl, + url.raw_host or "", + url.port, + url.scheme in _SSL_SCHEMES, + self._ssl, self.proxy, self.proxy_auth, h, diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 13c1a0cdc48..31d3c6df083 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -32,7 +32,6 @@ ) import aiohappyeyeballs -import attr from . import hdrs, helpers from .abc import AbstractResolver, ResolveResult @@ -1401,8 +1400,8 @@ async def _create_proxy_connection( # asyncio handles this perfectly proxy_req.method = hdrs.METH_CONNECT proxy_req.url = req.url - key = attr.evolve( - req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None + key = req.connection_key._replace( + proxy=None, proxy_auth=None, proxy_headers_hash=None ) conn = Connection(self, key, proto, self._loop) proxy_resp = await proxy_req.send(conn) From 8e395a1190a25f8c5ad3bf93556f6e8ea6928e4d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 15:10:48 -0500 Subject: [PATCH 0694/1511] [PR #9366/43deadb2 backport][3.10] Small speed up to update_headers (#9383) --- aiohttp/client_reqrep.py | 63 ++++++++++++++++++++++++++-------------- 1 file changed, 41 insertions(+), 22 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index f3f91ffd5b8..1f3ea6b8500 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -431,30 +431,49 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers: CIMultiDict[str] = CIMultiDict() - # add host + # Build the host header if _YARL_SUPPORTS_HOST_SUBCOMPONENT: - netloc = self.url.host_subcomponent - assert netloc is not None + host = self.url.host_subcomponent + # host_subcomponent is None when the URL is a relative URL. + # but we know we do not have a relative URL here. + assert host is not None else: - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" - # See https://github.com/aio-libs/aiohttp/issues/3636. - netloc = netloc.rstrip(".") - if self.url.port is not None and not self.url.is_default_port(): - netloc += ":" + str(self.url.port) - self.headers[hdrs.HOST] = netloc - - if headers: - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() - - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key.lower() == "host": - self.headers[key] = value - else: - self.headers.add(key, value) + host = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(host): + host = f"[{host}]" + + if host[-1] == ".": + # Remove all trailing dots from the netloc as while + # they are valid FQDNs in DNS, TLS validation fails. + # See https://github.com/aio-libs/aiohttp/issues/3636. + # To avoid string manipulation we only call rstrip if + # the last character is a dot. + host = host.rstrip(".") + + # If explicit port is not None, it means that the port was + # explicitly specified in the URL. In this case we check + # if its not the default port for the scheme and add it to + # the host header. We check explicit_port first because + # yarl caches explicit_port and its likely to already be + # in the cache and non-default port URLs are far less common. + explicit_port = self.url.explicit_port + if explicit_port is not None and not self.url.is_default_port(): + host = f"{host}:{explicit_port}" + + self.headers[hdrs.HOST] = host + + if not headers: + return + + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: if skip_auto_headers is not None: From e238777c2a1e99e5de6e57972a31488ee8f03e37 Mon Sep 17 00:00:00 2001 From: meshya <mohali4h@gmail.com> Date: Wed, 2 Oct 2024 00:49:00 +0330 Subject: [PATCH 0695/1511] [PR #9207/970c5d9 backport][3.11] add proxy and proxy_auth variables to ClientSession.__init__ (#9331) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Fixes #123'). --> --- CHANGES/9207.feature.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 12 ++++++++ docs/client_advanced.rst | 7 +++++ tests/test_client_session.py | 53 ++++++++++++++++++++++++++++++++++-- 5 files changed, 72 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9207.feature.rst diff --git a/CHANGES/9207.feature.rst b/CHANGES/9207.feature.rst new file mode 100644 index 00000000000..d9ac55c8520 --- /dev/null +++ b/CHANGES/9207.feature.rst @@ -0,0 +1 @@ +Added ``proxy`` and ``proxy_auth`` parameters to ``ClientSession`` -- by :user:`meshya`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 96403c2aec4..4bc8f5337fd 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -237,6 +237,7 @@ Matthieu Hauglustaine Matthieu Rigal Matvey Tingaev Meet Mangukiya +Meshya Michael Ihnatenko Michał Górny Mikhail Burshteyn diff --git a/aiohttp/client.py b/aiohttp/client.py index 6c129c3ad64..a5c57bb25ac 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -252,6 +252,8 @@ class ClientSession: "_max_line_size", "_max_field_size", "_resolve_charset", + "_default_proxy", + "_default_proxy_auth", ] ) @@ -266,6 +268,8 @@ def __init__( loop: Optional[asyncio.AbstractEventLoop] = None, cookies: Optional[LooseCookies] = None, headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, skip_auto_headers: Optional[Iterable[str]] = None, auth: Optional[BasicAuth] = None, json_serialize: JSONEncoder = json.dumps, @@ -396,6 +400,9 @@ def __init__( self._resolve_charset = fallback_charset_resolver + self._default_proxy = proxy + self._default_proxy_auth = proxy_auth + def __init_subclass__(cls: Type["ClientSession"]) -> None: warnings.warn( "Inheritance class {} from ClientSession " @@ -530,6 +537,11 @@ async def _request( for i in skip_auto_headers: skip_headers.add(istr(i)) + if proxy is None: + proxy = self._default_proxy + if proxy_auth is None: + proxy_auth = self._default_proxy_auth + if proxy is None: proxy_headers = None else: diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 524b0877450..8b27351c882 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -567,6 +567,13 @@ Authentication credentials can be passed in proxy URL:: session.get("http://python.org", proxy="http://user:pass@some.proxy.com") +And you may set default proxy:: + + proxy_auth = aiohttp.BasicAuth('user', 'pass') + async with aiohttp.ClientSession(proxy="http://proxy.com", proxy_auth=proxy_auth) as session: + async with session.get("http://python.org") as resp: + print(resp.status) + Contrary to the ``requests`` library, it won't read environment variables by default. But you can do so by passing ``trust_env=True`` into :class:`aiohttp.ClientSession` diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 86f3a1b6c6e..89a06466767 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -659,8 +659,57 @@ def test_proxy_str(session, params) -> None: ] -async def test_request_tracing(loop, aiohttp_client) -> None: - async def handler(request): +async def test_default_proxy(loop: asyncio.AbstractEventLoop) -> None: + proxy_url = URL("http://proxy.example.com") + proxy_auth = mock.Mock() + proxy_url2 = URL("http://proxy.example2.com") + proxy_auth2 = mock.Mock() + + class OnCall(Exception): + pass + + request_class_mock = mock.Mock(side_effect=OnCall()) + session = ClientSession( + proxy=proxy_url, proxy_auth=proxy_auth, request_class=request_class_mock + ) + + assert session._default_proxy == proxy_url, "`ClientSession._default_proxy` not set" + assert ( + session._default_proxy_auth == proxy_auth + ), "`ClientSession._default_proxy_auth` not set" + + with pytest.raises(OnCall): + await session.get( + "http://example.com", + ) + + assert request_class_mock.called, "request class not called" + assert ( + request_class_mock.call_args[1].get("proxy") == proxy_url + ), "`ClientSession._request` uses default proxy not one used in ClientSession.get" + assert ( + request_class_mock.call_args[1].get("proxy_auth") == proxy_auth + ), "`ClientSession._request` uses default proxy_auth not one used in ClientSession.get" + + request_class_mock.reset_mock() + with pytest.raises(OnCall): + await session.get( + "http://example.com", proxy=proxy_url2, proxy_auth=proxy_auth2 + ) + + assert request_class_mock.called, "request class not called" + assert ( + request_class_mock.call_args[1].get("proxy") == proxy_url2 + ), "`ClientSession._request` uses default proxy not one used in ClientSession.get" + assert ( + request_class_mock.call_args[1].get("proxy_auth") == proxy_auth2 + ), "`ClientSession._request` uses default proxy_auth not one used in ClientSession.get" + + await session.close() + + +async def test_request_tracing(loop: asyncio.AbstractEventLoop, aiohttp_client) -> None: + async def handler(request: web.Request) -> web.Response: return web.json_response({"ok": True}) app = web.Application() From 72b90b01f0ac05e3e72b63a8f0c4df2931544f2d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 21:36:04 +0000 Subject: [PATCH 0696/1511] [PR #9385/8a583ed1 backport][3.11] Fix missing text in #9368 changelog message (#9387) Co-authored-by: J. Nick Koston <nick@koston.org> Fix missing text in #9368 changelog message --- CHANGES/9368.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES/9368.bugfix.rst b/CHANGES/9368.bugfix.rst index fb2f90c1d4d..7a9d8c7087e 100644 --- a/CHANGES/9368.bugfix.rst +++ b/CHANGES/9368.bugfix.rst @@ -1,3 +1,3 @@ -Fixed proxy headers being used in the ``ConnectionKey`` hash when proxy was being used -- by :user:`bdraco`. +Fixed proxy headers being used in the ``ConnectionKey`` hash when a proxy was not being used -- by :user:`bdraco`. If default headers are used, they are also used for proxy headers. This could have led to creating connections that were not needed when one was already available. From 456cf5e1265951993c8bb878ba6b49cb79fdf434 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:08:46 +0000 Subject: [PATCH 0697/1511] [PR #9386/803d818d backport][3.10] Small speed up to starting client requests (#9388) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 1f3ea6b8500..3648e9cbb95 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -308,7 +308,7 @@ def __init__( if params: url = url.extend_query(params) self.original_url = url - self.url = url.with_fragment(None) + self.url = url.with_fragment(None) if url.raw_fragment else url self.method = method.upper() self.chunked = chunked self.compress = compress @@ -611,7 +611,10 @@ def update_body_from_data(self, body: Any) -> None: def update_expect_continue(self, expect: bool = False) -> None: if expect: self.headers[hdrs.EXPECT] = "100-continue" - elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + elif ( + hdrs.EXPECT in self.headers + and self.headers[hdrs.EXPECT].lower() == "100-continue" + ): expect = True if expect: @@ -862,7 +865,7 @@ def __init__( self.cookies = SimpleCookie() self._real_url = url - self._url = url.with_fragment(None) + self._url = url.with_fragment(None) if url.raw_fragment else url self._body: Optional[bytes] = None self._writer = writer self._continue = continue100 # None by default From 8ab799a2489bf5914960c5046072b5f1d2909685 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:08:55 +0000 Subject: [PATCH 0698/1511] [PR #9386/803d818d backport][3.11] Small speed up to starting client requests (#9389) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ed45ffbe271..c68dfcbd1bd 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -308,7 +308,7 @@ def __init__( if params: url = url.extend_query(params) self.original_url = url - self.url = url.with_fragment(None) + self.url = url.with_fragment(None) if url.raw_fragment else url self.method = method.upper() self.chunked = chunked self.compress = compress @@ -607,7 +607,10 @@ def update_body_from_data(self, body: Any) -> None: def update_expect_continue(self, expect: bool = False) -> None: if expect: self.headers[hdrs.EXPECT] = "100-continue" - elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + elif ( + hdrs.EXPECT in self.headers + and self.headers[hdrs.EXPECT].lower() == "100-continue" + ): expect = True if expect: @@ -858,7 +861,7 @@ def __init__( self.cookies = SimpleCookie() self._real_url = url - self._url = url.with_fragment(None) + self._url = url.with_fragment(None) if url.raw_fragment else url self._body: Optional[bytes] = None self._writer = writer self._continue = continue100 # None by default From 6198a56e67a65a4bc68d1661a1c4cb9201d455cd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 18:45:14 -0500 Subject: [PATCH 0699/1511] [PR #9368/02d8dba9 backport][3.10] Avoid using the proxy headers in the ConnectionKey if no proxy is in use (#9378) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9368.bugfix.rst | 3 +++ aiohttp/client_reqrep.py | 8 +++++++- tests/test_client_request.py | 27 +++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9368.bugfix.rst diff --git a/CHANGES/9368.bugfix.rst b/CHANGES/9368.bugfix.rst new file mode 100644 index 00000000000..7a9d8c7087e --- /dev/null +++ b/CHANGES/9368.bugfix.rst @@ -0,0 +1,3 @@ +Fixed proxy headers being used in the ``ConnectionKey`` hash when a proxy was not being used -- by :user:`bdraco`. + +If default headers are used, they are also used for proxy headers. This could have led to creating connections that were not needed when one was already available. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 3648e9cbb95..6ebc696988a 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -626,10 +626,16 @@ def update_proxy( proxy_auth: Optional[BasicAuth], proxy_headers: Optional[LooseHeaders], ) -> None: + self.proxy = proxy + if proxy is None: + self.proxy_auth = None + self.proxy_headers = None + return + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy = proxy self.proxy_auth = proxy_auth + if proxy_headers is not None and not isinstance( proxy_headers, (MultiDict, MultiDictProxy) ): diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 7853b541fc9..c9d61bf1fb7 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1467,3 +1467,30 @@ def test_basicauth_from_empty_netrc( """Test that no Authorization header is sent when netrc is empty""" req = make_request("get", "http://example.com", trust_env=True) assert hdrs.AUTHORIZATION not in req.headers + + +async def test_connection_key_with_proxy() -> None: + """Verify the proxy headers are included in the ConnectionKey when a proxy is used.""" + proxy = URL("http://proxy.example.com") + req = ClientRequest( + "GET", + URL("http://example.com"), + proxy=proxy, + proxy_headers={"X-Proxy": "true"}, + loop=asyncio.get_running_loop(), + ) + assert req.connection_key.proxy_headers_hash is not None + await req.close() + + +async def test_connection_key_without_proxy() -> None: + """Verify the proxy headers are not included in the ConnectionKey when a proxy is used.""" + # If proxy is unspecified, proxy_headers should be ignored + req = ClientRequest( + "GET", + URL("http://example.com"), + proxy_headers={"X-Proxy": "true"}, + loop=asyncio.get_running_loop(), + ) + assert req.connection_key.proxy_headers_hash is None + await req.close() From b5e2b0ba14cdbd9ce2f56b5eda6f0ba8d9403b57 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 20:24:49 -0500 Subject: [PATCH 0700/1511] [PR #7368/8a8913b backport][3.10] Fixed failure to try next host after single-host connection timeout (#9390) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Brett Higgins <brett.higgins@gmail.com> --- CHANGES/7342.breaking.rst | 3 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 2 +- aiohttp/connector.py | 2 +- docs/client_quickstart.rst | 3 +- docs/client_reference.rst | 8 ++- tests/test_client_session.py | 4 +- tests/test_connector.py | 115 ++++++++++++++++++++++++++++++++++- 8 files changed, 130 insertions(+), 8 deletions(-) create mode 100644 CHANGES/7342.breaking.rst diff --git a/CHANGES/7342.breaking.rst b/CHANGES/7342.breaking.rst new file mode 100644 index 00000000000..1fa511c4c97 --- /dev/null +++ b/CHANGES/7342.breaking.rst @@ -0,0 +1,3 @@ +Fixed failure to try next host after single-host connection timeout -- by :user:`brettdh`. + +The default client :class:`aiohttp.ClientTimeout` params has changed to include a ``sock_connect`` timeout of 30 seconds so that this correct behavior happens by default. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c318f7cc669..52cb1d59ff3 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -60,6 +60,7 @@ Bob Haddleton Boris Feld Boyi Chen Brett Cannon +Brett Higgins Brian Bouterse Brian C. Lane Brian Muller diff --git a/aiohttp/client.py b/aiohttp/client.py index 5c83099258a..7f42e4b8d4d 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -208,7 +208,7 @@ class ClientTimeout: # 5 Minute default read timeout -DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30) # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 81d49083837..1c1283190d4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1318,7 +1318,7 @@ async def _create_direct_connection( req=req, client_error=client_error, ) - except ClientConnectorError as exc: + except (ClientConnectorError, asyncio.TimeoutError) as exc: last_exc = exc aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) continue diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index 51b2ca1ec6d..f99339cf4a6 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -417,7 +417,8 @@ Timeouts Timeout settings are stored in :class:`ClientTimeout` data structure. By default *aiohttp* uses a *total* 300 seconds (5min) timeout, it means that the -whole operation should finish in 5 minutes. +whole operation should finish in 5 minutes. In order to allow time for DNS fallback, +the default ``sock_connect`` timeout is 30 seconds. The value could be overridden by *timeout* parameter for the session (specified in seconds):: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 7379743ae02..1b582932523 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -164,10 +164,14 @@ The client session supports the context manager protocol for self closing. overwrite it on a per-request basis. :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout by default. + total timeout, 30 seconds socket connect timeout by default. .. versionadded:: 3.3 + .. versionchanged:: 3.10.9 + + The default value for the ``sock_connect`` timeout has been changed to 30 seconds. + :param bool auto_decompress: Automatically decompress response body (``True`` by default). .. versionadded:: 2.3 @@ -897,7 +901,7 @@ certification chaining. .. versionadded:: 3.7 :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout by default. + total timeout, 30 seconds socket connect timeout by default. :param loop: :ref:`event loop<asyncio-event-loop>` used for processing HTTP requests. diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 86f3a1b6c6e..dac05ae3eb9 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -913,7 +913,9 @@ async def test_client_session_timeout_args(loop) -> None: with pytest.warns(DeprecationWarning): session2 = ClientSession(loop=loop, read_timeout=20 * 60, conn_timeout=30 * 60) - assert session2._timeout == client.ClientTimeout(total=20 * 60, connect=30 * 60) + assert session2._timeout == client.ClientTimeout( + total=20 * 60, connect=30 * 60, sock_connect=client.DEFAULT_TIMEOUT.sock_connect + ) with pytest.raises(ValueError): ClientSession( diff --git a/tests/test_connector.py b/tests/test_connector.py index 2e43573db4e..a21dd872993 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,7 +10,7 @@ from collections import deque from concurrent import futures from contextlib import closing, suppress -from typing import Any, List, Literal, Optional +from typing import Any, List, Literal, Optional, Sequence, Tuple from unittest import mock import pytest @@ -25,6 +25,7 @@ connector as connector_module, web, ) +from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ConnectionKey from aiohttp.connector import ( _SSL_CONTEXT_UNVERIFIED, @@ -34,6 +35,7 @@ _DNSCacheTable, ) from aiohttp.locks import EventResultOrError +from aiohttp.resolver import ResolveResult from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -970,7 +972,116 @@ async def create_connection(*args, **kwargs): established_connection.close() -async def test_tcp_connector_resolve_host(loop: Any) -> None: +@pytest.mark.parametrize( + ("request_url"), + [ + ("http://mocked.host"), + ("https://mocked.host"), + ], +) +async def test_tcp_connector_multiple_hosts_one_timeout( + loop: asyncio.AbstractEventLoop, + request_url: str, +) -> None: + conn = aiohttp.TCPConnector() + + ip1 = "192.168.1.1" + ip2 = "192.168.1.2" + ips = [ip1, ip2] + ips_tried = [] + ips_success = [] + timeout_error = False + connected = False + + req = ClientRequest( + "GET", + URL(request_url), + loop=loop, + ) + + async def _resolve_host( + host: str, port: int, traces: object = None + ) -> List[ResolveResult]: + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + async def start_connection( + addr_infos: Sequence[AddrInfoType], + *, + interleave: Optional[int] = None, + **kwargs: object, + ) -> socket.socket: + nonlocal timeout_error + + addr_info = addr_infos[0] + addr_info_addr = addr_info[-1] + + ip = addr_info_addr[0] + ips_tried.append(ip) + + if ip == ip1: + timeout_error = True + raise asyncio.TimeoutError + + if ip == ip2: + mock_socket = mock.create_autospec( + socket.socket, spec_set=True, instance=True + ) + mock_socket.getpeername.return_value = addr_info_addr + return mock_socket # type: ignore[no-any-return] + + assert False + + async def create_connection( + *args: object, sock: Optional[socket.socket] = None, **kwargs: object + ) -> Tuple[ResponseHandler, ResponseHandler]: + nonlocal connected + + assert isinstance(sock, socket.socket) + addr_info = sock.getpeername() + ip = addr_info[0] + ips_success.append(ip) + connected = True + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert ips_tried == ips + assert ips_success == [ip2] + + assert timeout_error + assert connected + + established_connection.close() + + +async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector(use_dns_cache=True) res = await conn._resolve_host("localhost", 8080) From 71fe3bea1d7d54a9cbab54c579d8a26572557837 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Oct 2024 20:25:03 -0500 Subject: [PATCH 0701/1511] [PR #7368/8a8913b backport][3.11] Fixed failure to try next host after single-host connection timeout (#9391) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Brett Higgins <brett.higgins@gmail.com> --- CHANGES/7342.breaking.rst | 3 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 2 +- aiohttp/connector.py | 2 +- docs/client_quickstart.rst | 3 +- docs/client_reference.rst | 8 ++- tests/test_client_session.py | 4 +- tests/test_connector.py | 115 ++++++++++++++++++++++++++++++++++- 8 files changed, 130 insertions(+), 8 deletions(-) create mode 100644 CHANGES/7342.breaking.rst diff --git a/CHANGES/7342.breaking.rst b/CHANGES/7342.breaking.rst new file mode 100644 index 00000000000..1fa511c4c97 --- /dev/null +++ b/CHANGES/7342.breaking.rst @@ -0,0 +1,3 @@ +Fixed failure to try next host after single-host connection timeout -- by :user:`brettdh`. + +The default client :class:`aiohttp.ClientTimeout` params has changed to include a ``sock_connect`` timeout of 30 seconds so that this correct behavior happens by default. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 4bc8f5337fd..b195486e76b 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -60,6 +60,7 @@ Bob Haddleton Boris Feld Boyi Chen Brett Cannon +Brett Higgins Brian Bouterse Brian C. Lane Brian Muller diff --git a/aiohttp/client.py b/aiohttp/client.py index a5c57bb25ac..c4e4740102f 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -213,7 +213,7 @@ class ClientTimeout: # 5 Minute default read timeout -DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30) # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 31d3c6df083..5947fdc6953 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1327,7 +1327,7 @@ async def _create_direct_connection( req=req, client_error=client_error, ) - except ClientConnectorError as exc: + except (ClientConnectorError, asyncio.TimeoutError) as exc: last_exc = exc aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) continue diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index 51b2ca1ec6d..f99339cf4a6 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -417,7 +417,8 @@ Timeouts Timeout settings are stored in :class:`ClientTimeout` data structure. By default *aiohttp* uses a *total* 300 seconds (5min) timeout, it means that the -whole operation should finish in 5 minutes. +whole operation should finish in 5 minutes. In order to allow time for DNS fallback, +the default ``sock_connect`` timeout is 30 seconds. The value could be overridden by *timeout* parameter for the session (specified in seconds):: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 05325045ee2..8495ecd9d8e 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -166,10 +166,14 @@ The client session supports the context manager protocol for self closing. overwrite it on a per-request basis. :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout by default. + total timeout, 30 seconds socket connect timeout by default. .. versionadded:: 3.3 + .. versionchanged:: 3.10.9 + + The default value for the ``sock_connect`` timeout has been changed to 30 seconds. + :param bool auto_decompress: Automatically decompress response body (``True`` by default). .. versionadded:: 2.3 @@ -898,7 +902,7 @@ certification chaining. .. versionadded:: 3.7 :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout by default. + total timeout, 30 seconds socket connect timeout by default. :param loop: :ref:`event loop<asyncio-event-loop>` used for processing HTTP requests. diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 89a06466767..aa5824283b2 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -962,7 +962,9 @@ async def test_client_session_timeout_args(loop) -> None: with pytest.warns(DeprecationWarning): session2 = ClientSession(loop=loop, read_timeout=20 * 60, conn_timeout=30 * 60) - assert session2._timeout == client.ClientTimeout(total=20 * 60, connect=30 * 60) + assert session2._timeout == client.ClientTimeout( + total=20 * 60, connect=30 * 60, sock_connect=client.DEFAULT_TIMEOUT.sock_connect + ) with pytest.raises(ValueError): ClientSession( diff --git a/tests/test_connector.py b/tests/test_connector.py index f28545b08e9..bad4e4a2f6e 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,7 +10,7 @@ from collections import deque from concurrent import futures from contextlib import closing, suppress -from typing import Any, List, Literal, Optional +from typing import Any, List, Literal, Optional, Sequence, Tuple from unittest import mock import pytest @@ -20,6 +20,7 @@ import aiohttp from aiohttp import client, connector as connector_module, web from aiohttp.client import ClientRequest, ClientTimeout +from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ConnectionKey from aiohttp.connector import ( _SSL_CONTEXT_UNVERIFIED, @@ -29,6 +30,7 @@ _DNSCacheTable, ) from aiohttp.locks import EventResultOrError +from aiohttp.resolver import ResolveResult from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -965,7 +967,116 @@ async def create_connection(*args, **kwargs): established_connection.close() -async def test_tcp_connector_resolve_host(loop: Any) -> None: +@pytest.mark.parametrize( + ("request_url"), + [ + ("http://mocked.host"), + ("https://mocked.host"), + ], +) +async def test_tcp_connector_multiple_hosts_one_timeout( + loop: asyncio.AbstractEventLoop, + request_url: str, +) -> None: + conn = aiohttp.TCPConnector() + + ip1 = "192.168.1.1" + ip2 = "192.168.1.2" + ips = [ip1, ip2] + ips_tried = [] + ips_success = [] + timeout_error = False + connected = False + + req = ClientRequest( + "GET", + URL(request_url), + loop=loop, + ) + + async def _resolve_host( + host: str, port: int, traces: object = None + ) -> List[ResolveResult]: + return [ + { + "hostname": host, + "host": ip, + "port": port, + "family": socket.AF_INET6 if ":" in ip else socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + for ip in ips + ] + + async def start_connection( + addr_infos: Sequence[AddrInfoType], + *, + interleave: Optional[int] = None, + **kwargs: object, + ) -> socket.socket: + nonlocal timeout_error + + addr_info = addr_infos[0] + addr_info_addr = addr_info[-1] + + ip = addr_info_addr[0] + ips_tried.append(ip) + + if ip == ip1: + timeout_error = True + raise asyncio.TimeoutError + + if ip == ip2: + mock_socket = mock.create_autospec( + socket.socket, spec_set=True, instance=True + ) + mock_socket.getpeername.return_value = addr_info_addr + return mock_socket # type: ignore[no-any-return] + + assert False + + async def create_connection( + *args: object, sock: Optional[socket.socket] = None, **kwargs: object + ) -> Tuple[ResponseHandler, ResponseHandler]: + nonlocal connected + + assert isinstance(sock, socket.socket) + addr_info = sock.getpeername() + ip = addr_info[0] + ips_success.append(ip) + connected = True + + # Close the socket since we are not actually connecting + # and we don't want to leak it. + sock.close() + tr = create_mocked_conn(loop) + pr = create_mocked_conn(loop) + return tr, pr + + with mock.patch.object( + conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host + ), mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ): + established_connection = await conn.connect(req, [], ClientTimeout()) + + assert ips_tried == ips + assert ips_success == [ip2] + + assert timeout_error + assert connected + + established_connection.close() + + +async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector(use_dns_cache=True) res = await conn._resolve_host("localhost", 8080) From de344228361f1275d309a13ba65f510846f89fef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 11:20:31 +0000 Subject: [PATCH 0702/1511] Bump rich from 13.8.1 to 13.9.1 (#9393) Bumps [rich](https://github.com/Textualize/rich) from 13.8.1 to 13.9.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>Hotfix for dependency issue</h2> <h2>[13.9.1] - 2024-10-01</h2> <h3>Fixed</h3> <ul> <li>Fixed typing_extensions dependency</li> </ul> <h2>The so long Python 3.7 release</h2> <p>This version adds support for fine-grained information in tracebacks. In other words, it will highlight columns in tracebacks (for supported Python versions). Here's an example:</p> <!-- raw HTML omitted --> <p>This version also <strong>drops support for Python 3.7</strong>, which has long since reached its EOL. If you are stuck on Python3.7 for any reason, you will not be able to upgrade to this version, but nothing should break.</p> <p>See below for other changes in this release.</p> <h2>[13.9.0] - 2024-10-01</h2> <h3>Changed</h3> <ul> <li>Dropped support for Python3.7 <a href="https://redirect.github.com/Textualize/rich/pull/3509">Textualize/rich#3509</a></li> <li>Rich will display tracebacks with finely grained error locations on python 3.11+ <a href="https://redirect.github.com/Textualize/rich/pull/3486">Textualize/rich#3486</a></li> </ul> <h3>Fixed</h3> <ul> <li>Fixed issue with Segment._split_cells <a href="https://redirect.github.com/Textualize/rich/pull/3506">Textualize/rich#3506</a></li> <li>Fix auto detection of terminal size on Windows <a href="https://redirect.github.com/Textualize/rich/pull/2916">Textualize/rich#2916</a></li> <li><code>Text.style</code> now respected in Panel title/subtitle <a href="https://redirect.github.com/Textualize/rich/pull/3509">Textualize/rich#3509</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[13.9.1] - 2024-10-01</h2> <h3>Fixed</h3> <ul> <li>Fixed typing_extensions dependency</li> </ul> <h2>[13.9.0] - 2024-10-01</h2> <h3>Changed</h3> <ul> <li>Dropped support for Python3.7 <a href="https://redirect.github.com/Textualize/rich/pull/3509">Textualize/rich#3509</a></li> <li>Rich will display tracebacks with finely grained error locations on python 3.11+ <a href="https://redirect.github.com/Textualize/rich/pull/3486">Textualize/rich#3486</a></li> </ul> <h3>Fixed</h3> <ul> <li>Fixed issue with Segment._split_cells <a href="https://redirect.github.com/Textualize/rich/pull/3506">Textualize/rich#3506</a></li> <li>Fix auto detection of terminal size on Windows <a href="https://redirect.github.com/Textualize/rich/pull/2916">Textualize/rich#2916</a></li> <li><code>Text.style</code> now respected in Panel title/subtitle <a href="https://redirect.github.com/Textualize/rich/pull/3509">Textualize/rich#3509</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/5ba9cb56e68dcdf2db50c214d8f1412c8a50761b"><code>5ba9cb5</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3513">#3513</a> from Textualize/typing-extensions-fix</li> <li><a href="https://github.com/Textualize/rich/commit/2e98b726ba555b9202a156371973c31a825e07eb"><code>2e98b72</code></a> depenency fix</li> <li><a href="https://github.com/Textualize/rich/commit/36f3ca645dd225ef5d9d836e1e3c480dc3f68ff0"><code>36f3ca6</code></a> changelog</li> <li><a href="https://github.com/Textualize/rich/commit/92abd703b34cc8cdcd581b5128ad5eab89a9c9e5"><code>92abd70</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3509">#3509</a> from Textualize/panel-title-style</li> <li><a href="https://github.com/Textualize/rich/commit/7db6b63222f1b7270e8119633dd3fdad0070551e"><code>7db6b63</code></a> version bump</li> <li><a href="https://github.com/Textualize/rich/commit/d14139000752526fe0b20100ad32292a287280bd"><code>d141390</code></a> skip lockfiles in codespell</li> <li><a href="https://github.com/Textualize/rich/commit/0c77fb744ea15db06e1e80b0c2ca25dbc0360641"><code>0c77fb7</code></a> typing fix</li> <li><a href="https://github.com/Textualize/rich/commit/ff52f68b78f48553d2a8bf8eb2fbff92a3145ac7"><code>ff52f68</code></a> typing tweak</li> <li><a href="https://github.com/Textualize/rich/commit/fd7b32603cf0bcb4e5b631202134b4d77ed121fc"><code>fd7b326</code></a> test fix</li> <li><a href="https://github.com/Textualize/rich/commit/3936debf0c98d2773b99ade2ded7998be9615195"><code>3936deb</code></a> changelog</li> <li>Additional commits viewable in <a href="https://github.com/Textualize/rich/compare/v13.8.1...v13.9.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.8.1&new-version=13.9.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 6 ++++-- requirements/dev.txt | 6 ++++-- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index eee37c04f4b..103bf70add8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -155,7 +155,9 @@ pygments==2.18.0 # rich # sphinx pyjwt==2.9.0 - # via gidgethub + # via + # gidgethub + # pyjwt pyproject-hooks==1.2.0 # via # build @@ -191,7 +193,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.8.1 +rich==13.9.1 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 49c9142aaf6..4fcb003345b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -150,7 +150,9 @@ pygments==2.18.0 # rich # sphinx pyjwt==2.8.0 - # via gidgethub + # via + # gidgethub + # pyjwt pyproject-hooks==1.2.0 # via # build @@ -186,7 +188,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.8.1 +rich==13.9.1 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 663feea390c..6f5e5efa876 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -86,7 +86,7 @@ pyyaml==6.0.2 # via pre-commit requests==2.32.3 # via python-on-whales -rich==13.8.1 +rich==13.9.1 # via typer shellingham==1.5.4 # via typer diff --git a/requirements/test.txt b/requirements/test.txt index b2f69e70728..6193ff40379 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -102,7 +102,7 @@ regex==2024.9.11 # via re-assert requests==2.32.3 # via python-on-whales -rich==13.8.1 +rich==13.9.1 # via typer setuptools-git==1.2 # via -r requirements/test.in From 10c4854989d79e50bcfb01f1a1e1e0859287efed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:05:22 +0000 Subject: [PATCH 0703/1511] Bump pypa/cibuildwheel from 2.21.1 to 2.21.2 (#9399) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.1 to 2.21.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.21.2</h2> <ul> <li>✨ Adds support for building 32-bit armv7l wheels on musllinux. On a Linux system with emulation set up, set <a href="https://cibuildwheel.pypa.io/en/stable/options/#archs">CIBW_ARCHS</a> to <code>armv7l</code> to try it out if you're interested! (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2017">#2017</a>)</li> <li>🐛 Fix Linux Podman builds on some systems (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2016">#2016</a>)</li> <li>✨ Adds official support for running on Python 3.13 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2026">#2026</a>)</li> <li>🛠 Update CPython 3.13 to 3.13.0rc3 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2029">#2029</a>)</li> </ul> <p>Note: the default <a href="https://cibuildwheel.pypa.io/en/stable/options/#linux-image">manylinux image</a> is <strong>scheduled to change</strong> from <code>manylinux2014</code> to <code>manylinux_2_28</code> in a cibuildwheel release on or after <strong>6th May 2025</strong> - you can set the value now to avoid getting upgraded if you want. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1992">#1992</a>)</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.21.2</h3> <p><em>2 October 2024</em></p> <ul> <li>✨ Adds support for building 32-bit armv7l wheels on musllinux. On a Linux system with emulation set up, set <a href="https://cibuildwheel.pypa.io/en/stable/options/#archs">CIBW_ARCHS</a> to <code>armv7l</code> on Linux to try it out if you're interested! (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2017">#2017</a>)</li> <li>🐛 Fix Linux Podman builds on some systems (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2016">#2016</a>)</li> <li>✨ Adds official support for running on Python 3.13 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2026">#2026</a>)</li> <li>🛠 Update CPython 3.13 to 3.13.0rc3 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2029">#2029</a>)</li> </ul> <p>Note: the default <a href="https://cibuildwheel.pypa.io/en/stable/options/#linux-image">manylinux image</a> is <strong>scheduled to change</strong> from <code>manylinux2014</code> to <code>manylinux_2_28</code> in a cibuildwheel release on or after <strong>6th May 2025</strong> - you can set the value now to avoid getting upgraded if you want. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1992">#1992</a>)</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/f1859528322d7b29d4493ee241a167807661dfb4"><code>f185952</code></a> Bump version: v2.21.2</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/d5c6a83aeb038f0a0c0e2a17630988119f345bcf"><code>d5c6a83</code></a> ci: update job slightly (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2028">#2028</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/2e8e511106b207a45ea1a4f0fe4c50422144400a"><code>2e8e511</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2029">#2029</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/ec591cdf473735b21f20a533f57ef1ce352bfa9a"><code>ec591cd</code></a> Add note about the planned change of the manylinux default (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1992">#1992</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/ee7fa96f6d4c81347cbdab3e6afcedc882a03433"><code>ee7fa96</code></a> chore: remove a couple of things pylint is unhappy about (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2027">#2027</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/d625d18b91de85cefdba12b52e40ccf6adf56981"><code>d625d18</code></a> ci: add 3.13 classifier (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2026">#2026</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/895eae340a2488903e75e88d9d5bf6975d51b75f"><code>895eae3</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2025">#2025</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/9fad66fcfb699a9f2ee7fcadbe3631a6cc409cb1"><code>9fad66f</code></a> feat: add musllinux armv7l (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2017">#2017</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/dfd01af9aa15f73df4504d14086db64ac0dce436"><code>dfd01af</code></a> fix: more reliably validate Podman API version (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2016">#2016</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/735e88deba4b3758af8ecca429a076c1a9f684bb"><code>735e88d</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2022">#2022</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.21.1...v2.21.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.21.1&new-version=2.21.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 0c32a97f647..6a7f4d42ef1 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -354,7 +354,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.21.1 + uses: pypa/cibuildwheel@v2.21.2 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From a82aa64b8edec5e67ea4f182869de7186729310f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:15:29 +0000 Subject: [PATCH 0704/1511] Bump tomli from 2.0.1 to 2.0.2 (#9400) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [tomli](https://github.com/hukkin/tomli) from 2.0.1 to 2.0.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/hukkin/tomli/blob/master/CHANGELOG.md">tomli's changelog</a>.</em></p> <blockquote> <h2>2.0.2</h2> <ul> <li>Removed <ul> <li>Python 3.7 support</li> </ul> </li> <li>Improved <ul> <li>Make <code>loads</code> raise <code>TypeError</code> not <code>AttributeError</code> on bad input types that do not have the <code>replace</code> attribute. Improve error message when <code>bytes</code> is received.</li> </ul> </li> <li>Type annotations <ul> <li>Type annotate <code>load</code> input as <code>typing.IO[bytes]</code> (previously <code>typing.BinaryIO</code>).</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/hukkin/tomli/commit/3ec6775b118f2ae030d5b12c90939c7f65668e7e"><code>3ec6775</code></a> Bump version: 2.0.1 → 2.0.2</li> <li><a href="https://github.com/hukkin/tomli/commit/1dcd317c62e905655090f3e18f57c93860086904"><code>1dcd317</code></a> Add v2.0.2 changelog</li> <li><a href="https://github.com/hukkin/tomli/commit/c94ee6904bb93b84364be502fe219e849d5f9120"><code>c94ee69</code></a> Fix GitHub Actions badge</li> <li><a href="https://github.com/hukkin/tomli/commit/4e245a4bbbefed99e550e196095ea65c851cf31d"><code>4e245a4</code></a> <code>tomli.loads</code>: Raise TypeError not AttributeError. Improve message (<a href="https://redirect.github.com/hukkin/tomli/issues/229">#229</a>)</li> <li><a href="https://github.com/hukkin/tomli/commit/facdab0f5aacc5eb223753c42604d5de7bdaee9d"><code>facdab0</code></a> Update pre-commit. Remove docformatter</li> <li><a href="https://github.com/hukkin/tomli/commit/a6138675bcca68eea5b8abec7c2ec06d57f965a0"><code>a613867</code></a> Use sys.version_info in compatibility layer (<a href="https://redirect.github.com/hukkin/tomli/issues/220">#220</a>)</li> <li><a href="https://github.com/hukkin/tomli/commit/39eff9b9cbdad9f0dab96b84f7b518f2b87e1866"><code>39eff9b</code></a> Add support for Python 3.12, drop EOL 3.7 (<a href="https://redirect.github.com/hukkin/tomli/issues/224">#224</a>)</li> <li><a href="https://github.com/hukkin/tomli/commit/0054e60840060499c67c6c4115d5d60abaa51ca2"><code>0054e60</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/hukkin/tomli/issues/208">#208</a>)</li> <li><a href="https://github.com/hukkin/tomli/commit/1bd3345f97cba795d7e6075956815c0a52151ed0"><code>1bd3345</code></a> Test against Python 3.12-dev</li> <li><a href="https://github.com/hukkin/tomli/commit/5646e6923d895725aad7ecfa32be19861812d1fc"><code>5646e69</code></a> Type annotate as <code>IO[bytes]</code>, not <code>BinaryIO</code></li> <li>Additional commits viewable in <a href="https://github.com/hukkin/tomli/compare/2.0.1...2.0.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=tomli&package-manager=pip&previous-version=2.0.1&new-version=2.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 103bf70add8..77bcdf3fa4d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -226,7 +226,7 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.1 +tomli==2.0.2 # via # build # cherry-picker diff --git a/requirements/dev.txt b/requirements/dev.txt index 4fcb003345b..5852032638c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -218,7 +218,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.1 +tomli==2.0.2 # via # build # cherry-picker diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 030d6cadc5d..4f33e55ec4e 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -67,7 +67,7 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.1 +tomli==2.0.2 # via # incremental # towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 50746e26f8f..1aa20f4daec 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -62,7 +62,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.1 +tomli==2.0.2 # via # incremental # towncrier diff --git a/requirements/lint.txt b/requirements/lint.txt index 6f5e5efa876..e2e096113e2 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -94,7 +94,7 @@ six==1.16.0 # via python-dateutil slotscheck==0.19.0 # via -r requirements/lint.in -tomli==2.0.1 +tomli==2.0.2 # via # mypy # pytest diff --git a/requirements/test.txt b/requirements/test.txt index 6193ff40379..15642eb7a87 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -110,7 +110,7 @@ shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil -tomli==2.0.1 +tomli==2.0.2 # via # coverage # mypy From e1320b7e01ccbf39e3a8eb5b556b6b2a93dfb8b7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 13:57:11 +0100 Subject: [PATCH 0705/1511] [PR #9398/3f43bd1b backport][3.10] Widen `trace_request_ctx` type (#9403) **This is a backport of PR #9398 as merged into master (3f43bd1b7d2b2630c2567d3620eaf886ad9e5184).** --- CHANGES/9397.bugfix.rst | 3 +++ aiohttp/client.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9397.bugfix.rst diff --git a/CHANGES/9397.bugfix.rst b/CHANGES/9397.bugfix.rst new file mode 100644 index 00000000000..ff5a235d07e --- /dev/null +++ b/CHANGES/9397.bugfix.rst @@ -0,0 +1,3 @@ +Widened the type of the ``trace_request_ctx`` parameter of +:meth:`ClientSession.request() <aiohttp.ClientSession.request>` and friends +-- by :user:`layday`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 7f42e4b8d4d..596d94bd8bf 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -178,7 +178,7 @@ class _RequestOptions(TypedDict, total=False): ssl: Union[SSLContext, bool, Fingerprint] server_hostname: Union[str, None] proxy_headers: Union[LooseHeaders, None] - trace_request_ctx: Union[Mapping[str, str], None] + trace_request_ctx: Union[Mapping[str, Any], None] read_bufsize: Union[int, None] auto_decompress: Union[bool, None] max_line_size: Union[int, None] @@ -477,7 +477,7 @@ async def _request( ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[Mapping[str, str]] = None, + trace_request_ctx: Optional[Mapping[str, Any]] = None, read_bufsize: Optional[int] = None, auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, From 4c674729afa8229dc7d14c038d0d39bf899e0888 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 13:57:45 +0100 Subject: [PATCH 0706/1511] [PR #9398/3f43bd1b backport][3.11] Widen `trace_request_ctx` type (#9404) **This is a backport of PR #9398 as merged into master (3f43bd1b7d2b2630c2567d3620eaf886ad9e5184).** Co-authored-by: layday <layday@protonmail.com> --- CHANGES/9397.bugfix.rst | 3 +++ aiohttp/client.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9397.bugfix.rst diff --git a/CHANGES/9397.bugfix.rst b/CHANGES/9397.bugfix.rst new file mode 100644 index 00000000000..ff5a235d07e --- /dev/null +++ b/CHANGES/9397.bugfix.rst @@ -0,0 +1,3 @@ +Widened the type of the ``trace_request_ctx`` parameter of +:meth:`ClientSession.request() <aiohttp.ClientSession.request>` and friends +-- by :user:`layday`. diff --git a/aiohttp/client.py b/aiohttp/client.py index c4e4740102f..3407c010263 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -183,7 +183,7 @@ class _RequestOptions(TypedDict, total=False): ssl: Union[SSLContext, bool, Fingerprint] server_hostname: Union[str, None] proxy_headers: Union[LooseHeaders, None] - trace_request_ctx: Union[Mapping[str, str], None] + trace_request_ctx: Union[Mapping[str, Any], None] read_bufsize: Union[int, None] auto_decompress: Union[bool, None] max_line_size: Union[int, None] @@ -489,7 +489,7 @@ async def _request( ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[Mapping[str, str]] = None, + trace_request_ctx: Optional[Mapping[str, Any]] = None, read_bufsize: Optional[int] = None, auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, From d93c78f81fcacf8f13c7de7ab41b4a6a147125dc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 3 Oct 2024 17:59:19 -0500 Subject: [PATCH 0707/1511] [PR #9405/b96b01b backport][3.11] Only create the connection closed exception once (#9409) --- aiohttp/client_reqrep.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index c68dfcbd1bd..7664ab15201 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -819,6 +819,9 @@ async def _on_headers_request_sent( await trace.send_request_headers(method, url, headers) +_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") + + class ClientResponse(HeadersMixin): # Some of these attributes are None when created, @@ -1178,7 +1181,7 @@ def _cleanup_writer(self) -> None: def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - set_exception(content, ClientConnectionError("Connection closed")) + set_exception(content, _CONNECTION_CLOSED_EXCEPTION) self._released = True async def wait_for_close(self) -> None: From 978ed7440cd29f9b46beafe31a21c5356f684caf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 3 Oct 2024 17:59:30 -0500 Subject: [PATCH 0708/1511] [PR #9406/24b0e6f backport][3.10] Add __slots__ to timer helpers (#9411) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9406.misc.rst | 1 + aiohttp/helpers.py | 10 ++++++++++ pyproject.toml | 5 +++++ 3 files changed, 16 insertions(+) create mode 100644 CHANGES/9406.misc.rst diff --git a/CHANGES/9406.misc.rst b/CHANGES/9406.misc.rst new file mode 100644 index 00000000000..0a0f7e78677 --- /dev/null +++ b/CHANGES/9406.misc.rst @@ -0,0 +1 @@ +Reduced memory required for timer objects created during the client request lifecycle -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 070b04f8d82..1ea6a56db46 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -617,6 +617,8 @@ def calculate_timeout_when( class TimeoutHandle: """Timeout handle""" + __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks") + def __init__( self, loop: asyncio.AbstractEventLoop, @@ -665,11 +667,17 @@ def __call__(self) -> None: class BaseTimerContext(ContextManager["BaseTimerContext"]): + + __slots__ = () + def assert_timeout(self) -> None: """Raise TimeoutError if timeout has been exceeded.""" class TimerNoop(BaseTimerContext): + + __slots__ = () + def __enter__(self) -> BaseTimerContext: return self @@ -685,6 +693,8 @@ def __exit__( class TimerContext(BaseTimerContext): """Low resolution timeout context manager""" + __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling") + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._tasks: List[asyncio.Task[Any]] = [] diff --git a/pyproject.toml b/pyproject.toml index 85d7c87eb34..33962686919 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,3 +82,8 @@ skip = "pp*" [tool.codespell] skip = '.git,*.pdf,*.svg,Makefile,CONTRIBUTORS.txt,venvs,_build' ignore-words-list = 'te' + +[tool.slotscheck] +# TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 +# is available in all supported cpython versions +exclude-modules = "(^aiohttp\\.helpers)" From 08ada3ec4f6b0645483ed07388d643600b70b37d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 3 Oct 2024 17:59:34 -0500 Subject: [PATCH 0709/1511] [PR #9405/b96b01b backport][3.10] Only create the connection closed exception once (#9410) --- aiohttp/client_reqrep.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 6ebc696988a..d536c0a1ca4 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -829,6 +829,9 @@ async def _on_headers_request_sent( await trace.send_request_headers(method, url, headers) +_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") + + class ClientResponse(HeadersMixin): # Some of these attributes are None when created, @@ -1188,7 +1191,7 @@ def _cleanup_writer(self) -> None: def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - set_exception(content, ClientConnectionError("Connection closed")) + set_exception(content, _CONNECTION_CLOSED_EXCEPTION) self._released = True async def wait_for_close(self) -> None: From 0a294bd73954b66e4970be74aa60aa9356d86fa7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 3 Oct 2024 18:31:01 -0500 Subject: [PATCH 0710/1511] [PR #9406/24b0e6f backport][3.11] Add __slots__ to timer helpers (#9412) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9406.misc.rst | 1 + aiohttp/helpers.py | 10 ++++++++++ pyproject.toml | 5 +++++ 3 files changed, 16 insertions(+) create mode 100644 CHANGES/9406.misc.rst diff --git a/CHANGES/9406.misc.rst b/CHANGES/9406.misc.rst new file mode 100644 index 00000000000..0a0f7e78677 --- /dev/null +++ b/CHANGES/9406.misc.rst @@ -0,0 +1 @@ +Reduced memory required for timer objects created during the client request lifecycle -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 13a531d5cab..bc626fd939e 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -581,6 +581,8 @@ def calculate_timeout_when( class TimeoutHandle: """Timeout handle""" + __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks") + def __init__( self, loop: asyncio.AbstractEventLoop, @@ -629,11 +631,17 @@ def __call__(self) -> None: class BaseTimerContext(ContextManager["BaseTimerContext"]): + + __slots__ = () + def assert_timeout(self) -> None: """Raise TimeoutError if timeout has been exceeded.""" class TimerNoop(BaseTimerContext): + + __slots__ = () + def __enter__(self) -> BaseTimerContext: return self @@ -649,6 +657,8 @@ def __exit__( class TimerContext(BaseTimerContext): """Low resolution timeout context manager""" + __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling") + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._tasks: List[asyncio.Task[Any]] = [] diff --git a/pyproject.toml b/pyproject.toml index 85d7c87eb34..33962686919 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,3 +82,8 @@ skip = "pp*" [tool.codespell] skip = '.git,*.pdf,*.svg,Makefile,CONTRIBUTORS.txt,venvs,_build' ignore-words-list = 'te' + +[tool.slotscheck] +# TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 +# is available in all supported cpython versions +exclude-modules = "(^aiohttp\\.helpers)" From 0dbfa7f8275642098a6b62d4054a607c49dab74d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 18:44:42 -0500 Subject: [PATCH 0711/1511] [PR #9407/e653b281 backport][3.11] Add __slots__ to stream classes (#9408) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9407.misc.rst | 1 + aiohttp/streams.py | 34 ++++++++++++++++++++++++++++++- tests/test_flowcontrol_streams.py | 8 ++------ 3 files changed, 36 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9407.misc.rst diff --git a/CHANGES/9407.misc.rst b/CHANGES/9407.misc.rst new file mode 100644 index 00000000000..d2a4e1e3ae3 --- /dev/null +++ b/CHANGES/9407.misc.rst @@ -0,0 +1 @@ +Reduced memory required for stream objects created during the client request lifecycle -- by :user:`bdraco`. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 1ed78ce5db0..6b805973754 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -39,6 +39,9 @@ class EofStream(Exception): class AsyncStreamIterator(Generic[_T]): + + __slots__ = ("read_func",) + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: self.read_func = read_func @@ -56,6 +59,9 @@ async def __anext__(self) -> _T: class ChunkTupleAsyncStreamIterator: + + __slots__ = ("_stream",) + def __init__(self, stream: "StreamReader") -> None: self._stream = stream @@ -70,6 +76,9 @@ async def __anext__(self) -> Tuple[bytes, bool]: class AsyncStreamReaderMixin: + + __slots__ = () + def __aiter__(self) -> AsyncStreamIterator[bytes]: return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] @@ -104,7 +113,25 @@ class StreamReader(AsyncStreamReaderMixin): """ - total_bytes = 0 + __slots__ = ( + "_protocol", + "_low_water", + "_high_water", + "_loop", + "_size", + "_cursor", + "_http_chunk_splits", + "_buffer", + "_buffer_offset", + "_eof", + "_waiter", + "_eof_waiter", + "_exception", + "_timer", + "_eof_callbacks", + "_eof_counter", + "total_bytes", + ) def __init__( self, @@ -131,6 +158,8 @@ def __init__( self._exception: Optional[BaseException] = None self._timer = TimerNoop() if timer is None else timer self._eof_callbacks: List[Callable[[], None]] = [] + self._eof_counter = 0 + self.total_bytes = 0 def __repr__(self) -> str: info = [self.__class__.__name__] @@ -517,6 +546,9 @@ def _read_nowait(self, n: int) -> bytes: class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] + + __slots__ = ("_read_eof_chunk",) + def __init__(self) -> None: self._read_eof_chunk = False diff --git a/tests/test_flowcontrol_streams.py b/tests/test_flowcontrol_streams.py index f9cce43bf4b..70e9b9b012c 100644 --- a/tests/test_flowcontrol_streams.py +++ b/tests/test_flowcontrol_streams.py @@ -12,16 +12,12 @@ def protocol(): @pytest.fixture def stream(loop, protocol): - out = streams.StreamReader(protocol, limit=1, loop=loop) - out._allow_pause = True - return out + return streams.StreamReader(protocol, limit=1, loop=loop) @pytest.fixture def buffer(loop, protocol): - out = streams.FlowControlDataQueue(protocol, limit=1, loop=loop) - out._allow_pause = True - return out + return streams.FlowControlDataQueue(protocol, limit=1, loop=loop) class TestFlowControlStreamReader: From b77943288424e37ed592984cfdaea2cc452d73c8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 4 Oct 2024 11:28:20 -0500 Subject: [PATCH 0712/1511] Release 3.10.9 (#9415) --- CHANGES.rst | 64 +++++++++++++++++++++++++++++++++++++++ CHANGES/7342.breaking.rst | 3 -- CHANGES/9342.misc.rst | 1 - CHANGES/9368.bugfix.rst | 3 -- CHANGES/9397.bugfix.rst | 3 -- CHANGES/9406.misc.rst | 1 - aiohttp/__init__.py | 2 +- 7 files changed, 65 insertions(+), 12 deletions(-) delete mode 100644 CHANGES/7342.breaking.rst delete mode 100644 CHANGES/9342.misc.rst delete mode 100644 CHANGES/9368.bugfix.rst delete mode 100644 CHANGES/9397.bugfix.rst delete mode 100644 CHANGES/9406.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 0cf93a5887c..71edb3798fc 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,70 @@ .. towncrier release notes start +3.10.9 (2024-10-04) +=================== + +Bug fixes +--------- + +- Fixed proxy headers being used in the ``ConnectionKey`` hash when a proxy was not being used -- by :user:`bdraco`. + + If default headers are used, they are also used for proxy headers. This could have led to creating connections that were not needed when one was already available. + + + *Related issues and pull requests on GitHub:* + :issue:`9368`. + + + +- Widened the type of the ``trace_request_ctx`` parameter of + :meth:`ClientSession.request() <aiohttp.ClientSession.request>` and friends + -- by :user:`layday`. + + + *Related issues and pull requests on GitHub:* + :issue:`9397`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Fixed failure to try next host after single-host connection timeout -- by :user:`brettdh`. + + The default client :class:`aiohttp.ClientTimeout` params has changed to include a ``sock_connect`` timeout of 30 seconds so that this correct behavior happens by default. + + + *Related issues and pull requests on GitHub:* + :issue:`7342`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of resolving hosts with Python 3.12+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9342`. + + + +- Reduced memory required for timer objects created during the client request lifecycle -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9406`. + + + + +---- + + 3.10.8 (2024-09-28) =================== diff --git a/CHANGES/7342.breaking.rst b/CHANGES/7342.breaking.rst deleted file mode 100644 index 1fa511c4c97..00000000000 --- a/CHANGES/7342.breaking.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed failure to try next host after single-host connection timeout -- by :user:`brettdh`. - -The default client :class:`aiohttp.ClientTimeout` params has changed to include a ``sock_connect`` timeout of 30 seconds so that this correct behavior happens by default. diff --git a/CHANGES/9342.misc.rst b/CHANGES/9342.misc.rst deleted file mode 100644 index 379e52dfc90..00000000000 --- a/CHANGES/9342.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of resolving hosts with Python 3.12+ -- by :user:`bdraco`. diff --git a/CHANGES/9368.bugfix.rst b/CHANGES/9368.bugfix.rst deleted file mode 100644 index 7a9d8c7087e..00000000000 --- a/CHANGES/9368.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed proxy headers being used in the ``ConnectionKey`` hash when a proxy was not being used -- by :user:`bdraco`. - -If default headers are used, they are also used for proxy headers. This could have led to creating connections that were not needed when one was already available. diff --git a/CHANGES/9397.bugfix.rst b/CHANGES/9397.bugfix.rst deleted file mode 100644 index ff5a235d07e..00000000000 --- a/CHANGES/9397.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Widened the type of the ``trace_request_ctx`` parameter of -:meth:`ClientSession.request() <aiohttp.ClientSession.request>` and friends --- by :user:`layday`. diff --git a/CHANGES/9406.misc.rst b/CHANGES/9406.misc.rst deleted file mode 100644 index 0a0f7e78677..00000000000 --- a/CHANGES/9406.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Reduced memory required for timer objects created during the client request lifecycle -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 491cfded254..a65b3987222 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.9.dev0" +__version__ = "3.10.9" from typing import TYPE_CHECKING, Tuple From 3ea557ab73934c238a699796955b648fcca2b6a7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 4 Oct 2024 13:41:46 -0500 Subject: [PATCH 0713/1511] Increment version to 3.10.10.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index a65b3987222..b65dd45000b 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.9" +__version__ = "3.10.10.dev0" from typing import TYPE_CHECKING, Tuple From cd54f116627a5f3586370a5bde54dca0b85d56b9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 4 Oct 2024 14:15:21 -0500 Subject: [PATCH 0714/1511] [3.11] Fix duplicate cancelling check in TimerContext due to merge conflict resolution error (#9417) --- aiohttp/helpers.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index bc626fd939e..81b79792dea 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -681,12 +681,6 @@ def __enter__(self) -> BaseTimerContext: # raise asyncio.TimeoutError or let the cancellation propagate self._cancelling = task.cancelling() - if sys.version_info >= (3, 11): - # Remember if the task was already cancelling - # so when we __exit__ we can decide if we should - # raise asyncio.TimeoutError or let the cancellation propagate - self._cancelling = task.cancelling() - if self._cancelled: raise asyncio.TimeoutError from None From 29edef9521f78c2cad37aef7bdaa4bb19ab74ed9 Mon Sep 17 00:00:00 2001 From: Shubh Agarwal <shubhagarwa8888@gmail.com> Date: Mon, 7 Oct 2024 05:01:40 +0530 Subject: [PATCH 0715/1511] Disable retry_persistent_connection in tests --- CHANGES/9141.misc.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/client.py | 8 ++++++-- aiohttp/test_utils.py | 1 + tests/test_test_utils.py | 22 ++++++++++++++++++++++ 5 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9141.misc.rst diff --git a/CHANGES/9141.misc.rst b/CHANGES/9141.misc.rst new file mode 100644 index 00000000000..d23439fa742 --- /dev/null +++ b/CHANGES/9141.misc.rst @@ -0,0 +1,2 @@ +Disabled automatic retries of failed requests in :class:`aiohttp.test_utils.TestClient`'s client session +(which could potentially hide errors in tests) -- by :user:`ShubhAgarwal-dev`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index b195486e76b..e34aab90cf5 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -300,6 +300,7 @@ Sergey Skripnick Serhii Charykov Serhii Kostel Serhiy Storchaka +Shubh Agarwal Simon Kennedy Sin-Woo Bang Stanislas Plum diff --git a/aiohttp/client.py b/aiohttp/client.py index 3407c010263..6d6660e306a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -230,7 +230,6 @@ class ClientSession: "_base_url", "_source_traceback", "_connector", - "requote_redirect_url", "_loop", "_cookie_jar", "_connector_owner", @@ -254,6 +253,8 @@ class ClientSession: "_resolve_charset", "_default_proxy", "_default_proxy_auth", + "_retry_connection", + "requote_redirect_url", ] ) @@ -402,6 +403,7 @@ def __init__( self._default_proxy = proxy self._default_proxy_auth = proxy_auth + self._retry_connection: bool = True def __init_subclass__(cls: Type["ClientSession"]) -> None: warnings.warn( @@ -593,7 +595,9 @@ async def _request( try: with timer: # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests - retry_persistent_connection = method in IDEMPOTENT_METHODS + retry_persistent_connection = ( + self._retry_connection and method in IDEMPOTENT_METHODS + ) while True: url, auth_from_url = strip_auth_from_url(url) if not url.raw_host: diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index a85662b9fb2..be6e9b3353e 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -313,6 +313,7 @@ def __init__( if cookie_jar is None: cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) + self._session._retry_connection = False self._closed = False self._responses: List[ClientResponse] = [] self._websockets: List[ClientWebSocketResponse] = [] diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 70d74fb69f0..241e7e8cc64 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -11,6 +11,7 @@ import aiohttp from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.test_utils import ( AioHTTPTestCase, RawTestServer as _RawTestServer, @@ -334,6 +335,27 @@ def test_noop(self) -> None: result.stdout.fnmatch_lines(["*RuntimeError*"]) +async def test_disable_retry_persistent_connection( + aiohttp_client: AiohttpClient, +) -> None: + num_requests = 0 + + async def handler(request: web.Request) -> web.Response: + nonlocal num_requests + + num_requests += 1 + request.protocol.force_close() + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + with pytest.raises(aiohttp.ServerDisconnectedError): + await client.get("/") + + assert num_requests == 1 + + async def test_server_context_manager(app, loop) -> None: async with TestServer(app, loop=loop) as server: async with aiohttp.ClientSession(loop=loop) as client: From 47d25e895b8908d39dcd329d9e95992d94cca27c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:22:01 +0000 Subject: [PATCH 0716/1511] Bump actions/cache from 4.0.2 to 4.1.0 (#9425) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.0.2 to 4.1.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.1.0</h2> <h2>What's Changed</h2> <ul> <li>Fix cache-hit output when cache missed by <a href="https://github.com/fchimpan"><code>@​fchimpan</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1404">actions/cache#1404</a></li> <li>Deprecate <code>save-always</code> input by <a href="https://github.com/joshmgross"><code>@​joshmgross</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1452">actions/cache#1452</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/ottlinger"><code>@​ottlinger</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1437">actions/cache#1437</a></li> <li><a href="https://github.com/Olegt0rr"><code>@​Olegt0rr</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1377">actions/cache#1377</a></li> <li><a href="https://github.com/fchimpan"><code>@​fchimpan</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1404">actions/cache#1404</a></li> <li><a href="https://github.com/x612skm"><code>@​x612skm</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1434">actions/cache#1434</a></li> <li><a href="https://github.com/todgru"><code>@​todgru</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1311">actions/cache#1311</a></li> <li><a href="https://github.com/Jcambass"><code>@​Jcambass</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1463">actions/cache#1463</a></li> <li><a href="https://github.com/mackey0225"><code>@​mackey0225</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1462">actions/cache#1462</a></li> <li><a href="https://github.com/quatquatt"><code>@​quatquatt</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1445">actions/cache#1445</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4.0.2...v4.1.0">https://github.com/actions/cache/compare/v4.0.2...v4.1.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.1.0</h3> <ul> <li>Ensure <code>cache-hit</code> output is set when a cache is missed - <a href="https://redirect.github.com/actions/cache/pull/1404">#1404</a></li> <li>Deprecate <code>save-always</code> input - <a href="https://redirect.github.com/actions/cache/pull/1452">#1452</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/2cdf405574d6ef1f33a1d12acccd3ae82f47b3f2"><code>2cdf405</code></a> Prepare <code>4.1.0</code> release (<a href="https://redirect.github.com/actions/cache/issues/1464">#1464</a>)</li> <li><a href="https://github.com/actions/cache/commit/a11fb02296c06498a496a240dc672c5bdf85c574"><code>a11fb02</code></a> restore action's README now references v4 instead of v3 (<a href="https://redirect.github.com/actions/cache/issues/1445">#1445</a>)</li> <li><a href="https://github.com/actions/cache/commit/cf7a75e7b9330700f4a055e401fe624394469d0f"><code>cf7a75e</code></a> Fix typo: depening -> depending (<a href="https://redirect.github.com/actions/cache/issues/1462">#1462</a>)</li> <li><a href="https://github.com/actions/cache/commit/c74ca4022c9c3055a63985d9a25f9a7cc1ffc5d2"><code>c74ca40</code></a> Deprecate <code>save-always</code> input (<a href="https://redirect.github.com/actions/cache/issues/1452">#1452</a>)</li> <li><a href="https://github.com/actions/cache/commit/f8a7ab490b91e20065f92e4ff28bc4b9474b83ca"><code>f8a7ab4</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1463">#1463</a> from actions/Jcambass-patch-1</li> <li><a href="https://github.com/actions/cache/commit/45b7be0774ee094895ecce56182ca96e60b360c9"><code>45b7be0</code></a> Add workflow file for publishing releases to immutable action package</li> <li><a href="https://github.com/actions/cache/commit/81382a721fc89d96eca335d0c3ba33144b2baa9d"><code>81382a7</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1311">#1311</a> from todgru/todgru/v4-documentation-update</li> <li><a href="https://github.com/actions/cache/commit/c4ee99a3bdb9b3eeaeccc57bffd49a5641203371"><code>c4ee99a</code></a> Merge branch 'main' into todgru/v4-documentation-update</li> <li><a href="https://github.com/actions/cache/commit/57b8e405f0f6efe89131ba09709ce4bc33291a51"><code>57b8e40</code></a> Clarify that the <code>restore-keys</code> input is a string in the docs (<a href="https://redirect.github.com/actions/cache/issues/1434">#1434</a>)</li> <li><a href="https://github.com/actions/cache/commit/40c3b67b2955d93d83b27ed164edd0756bc24049"><code>40c3b67</code></a> Fix cache-hit output when cache missed (<a href="https://redirect.github.com/actions/cache/issues/1404">#1404</a>)</li> <li>Additional commits viewable in <a href="https://github.com/actions/cache/compare/v4.0.2...v4.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.0.2&new-version=4.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 6a7f4d42ef1..fb93e15a1fa 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.1.0 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.1.0 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.0.2 + uses: actions/cache@v4.1.0 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 17f8cab4b4e27e508d2ad058c817d96ef394b854 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:32:39 +0000 Subject: [PATCH 0717/1511] Bump build from 1.2.2 to 1.2.2.post1 (#9427) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [build](https://github.com/pypa/build) from 1.2.2 to 1.2.2.post1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/build/releases">build's releases</a>.</em></p> <blockquote> <h2>1.2.2.post1</h2> <!-- raw HTML omitted --> <p>This release only makes metadata (Python 3.13 classifier), docs, and test suite changes.</p> <h2>What's Changed</h2> <ul> <li>ci: add Python 3.13 by <a href="https://github.com/henryiii"><code>@​henryiii</code></a> in <a href="https://redirect.github.com/pypa/build/pull/815">pypa/build#815</a></li> <li>docs: mention conda-forge name in README by <a href="https://github.com/henryiii"><code>@​henryiii</code></a> in <a href="https://redirect.github.com/pypa/build/pull/816">pypa/build#816</a></li> <li>docs: add a missing ` in README by <a href="https://github.com/SigureMo"><code>@​SigureMo</code></a> in <a href="https://redirect.github.com/pypa/build/pull/817">pypa/build#817</a></li> <li>tests: fix under pyproject-hooks 1.2 by <a href="https://github.com/layday"><code>@​layday</code></a> in <a href="https://redirect.github.com/pypa/build/pull/824">pypa/build#824</a></li> <li>ci: add PyPI attestations by <a href="https://github.com/henryiii"><code>@​henryiii</code></a> in <a href="https://redirect.github.com/pypa/build/pull/821">pypa/build#821</a></li> <li>chore: 1.2.2.post1 by <a href="https://github.com/henryiii"><code>@​henryiii</code></a> in <a href="https://redirect.github.com/pypa/build/pull/820">pypa/build#820</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/SigureMo"><code>@​SigureMo</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/build/pull/817">pypa/build#817</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/build/compare/1.2.2...1.2.2.post1">https://github.com/pypa/build/compare/1.2.2...1.2.2.post1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/build/blob/main/CHANGELOG.rst">build's changelog</a>.</em></p> <blockquote> <p>+++++++++ Changelog +++++++++</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/build/commit/2f667024a90718da24c5bdfdb264944436adf82e"><code>2f66702</code></a> chore: 1.2.2.post1 (<a href="https://redirect.github.com/pypa/build/issues/820">#820</a>)</li> <li><a href="https://github.com/pypa/build/commit/0580c6d125fd5479dae3dde36923bfd9b5220a37"><code>0580c6d</code></a> ci: add PyPI attestations (<a href="https://redirect.github.com/pypa/build/issues/821">#821</a>)</li> <li><a href="https://github.com/pypa/build/commit/e0e911cc895ca22559be2b80b04be27e33220b87"><code>e0e911c</code></a> tests: fix under pyproject-hooks 1.2</li> <li><a href="https://github.com/pypa/build/commit/a73ecbdf16d8a8abb44cbbe95e9ab5f8f2a7c9b9"><code>a73ecbd</code></a> pre-commit: bump repositories</li> <li><a href="https://github.com/pypa/build/commit/56b350439e54d164aed89f251dc39eb7536c0b71"><code>56b3504</code></a> pre-commit: bump repositories (<a href="https://redirect.github.com/pypa/build/issues/819">#819</a>)</li> <li><a href="https://github.com/pypa/build/commit/481ca546a5c9f50f255d245fb75d841f2e2e0d4b"><code>481ca54</code></a> pre-commit: bump repositories (<a href="https://redirect.github.com/pypa/build/issues/818">#818</a>)</li> <li><a href="https://github.com/pypa/build/commit/025836ae620e22d017396f7712237b8423b1f5c1"><code>025836a</code></a> docs: add a missing ` in README (<a href="https://redirect.github.com/pypa/build/issues/817">#817</a>)</li> <li><a href="https://github.com/pypa/build/commit/ae373408f0d4541e9ec8ce711b640ad2faddce4e"><code>ae37340</code></a> docs: mention conda-forge name in README (<a href="https://redirect.github.com/pypa/build/issues/816">#816</a>)</li> <li><a href="https://github.com/pypa/build/commit/f81aac058003f6df7414b19e45c848c8b7ed7c75"><code>f81aac0</code></a> ci: add Python 3.13 (<a href="https://redirect.github.com/pypa/build/issues/815">#815</a>)</li> <li>See full diff in <a href="https://github.com/pypa/build/compare/1.2.2...1.2.2.post1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=build&package-manager=pip&previous-version=1.2.2&new-version=1.2.2.post1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 77bcdf3fa4d..fde83a1fb34 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -30,7 +30,7 @@ babel==2.16.0 # via sphinx brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.2.2 +build==1.2.2.post1 # via pip-tools certifi==2024.8.30 # via requests diff --git a/requirements/dev.txt b/requirements/dev.txt index 5852032638c..01db9213d9b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ babel==2.16.0 # via sphinx brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.2.2 +build==1.2.2.post1 # via pip-tools certifi==2024.8.30 # via requests From ae9d7680be4e60713b79ea928aab9465ae2a35b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 11:22:08 +0000 Subject: [PATCH 0718/1511] Bump yarl from 1.13.1 to 1.14.0 (#9431) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [yarl](https://github.com/aio-libs/yarl) from 1.13.1 to 1.14.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.14.0</h2> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Switched to using the :mod:<code>propcache <propcache.api></code> package for property caching -- by :user:<code>bdraco</code>.</p> <p>The :mod:<code>propcache <propcache.api></code> package is derived from the property caching code in :mod:<code>yarl</code> and has been broken out to avoid maintaining it for multiple projects.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1169">#1169</a>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>Started testing with Hypothesis -- by :user:<code>webknjaz</code> and :user:<code>bdraco</code>.</p> <p>Special thanks to :user:<code>Zac-HD</code> for helping us get started with this framework.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/860">#860</a>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of :py:meth:<code>~yarl.URL.is_default_port</code> when no explicit port is set -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1168">#1168</a>.</p> </li> <li> <p>Improved performance of converting :class:<code>~yarl.URL</code> to a string when no explicit port is set -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1170">#1170</a>.</p> </li> <li> <p>Improved performance of the :py:meth:<code>~yarl.URL.origin</code> method -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1175">#1175</a>.</p> </li> <li> <p>Improved performance of encoding hosts -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1176">#1176</a>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.14.0</h1> <p><em>(2024-10-08)</em></p> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Switched to using the :mod:<code>propcache <propcache.api></code> package for property caching -- by :user:<code>bdraco</code>.</p> <p>The :mod:<code>propcache <propcache.api></code> package is derived from the property caching code in :mod:<code>yarl</code> and has been broken out to avoid maintaining it for multiple projects.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1169</code>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>Started testing with Hypothesis -- by :user:<code>webknjaz</code> and :user:<code>bdraco</code>.</p> <p>Special thanks to :user:<code>Zac-HD</code> for helping us get started with this framework.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>860</code>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of :py:meth:<code>~yarl.URL.is_default_port</code> when no explicit port is set -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1168</code>.</p> </li> <li> <p>Improved performance of converting :class:<code>~yarl.URL</code> to a string when no explicit port is set -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1170</code>.</p> </li> <li> <p>Improved performance of the :py:meth:<code>~yarl.URL.origin</code> method -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1175</code>.</p> </li> <li> <p>Improved performance of encoding hosts -- by :user:<code>bdraco</code>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/5a4f23dc6198d51a8ac5e6d8e1123c27a3c8d593"><code>5a4f23d</code></a> Release 1.14.0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1180">#1180</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/4f35eda33e289d9ad8de2653e204ea89ed1e8749"><code>4f35eda</code></a> Migrate to using propcache for property caching (<a href="https://redirect.github.com/aio-libs/yarl/issues/1169">#1169</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/31c9feebf6edadef339c9c9706889d530f9b5f7e"><code>31c9fee</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/yarl/issues/1179">#1179</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/2f12eceb804b4889aace32ea761ddaaf291efb1a"><code>2f12ece</code></a> Bump pre-commit from 3.8.0 to 4.0.0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1178">#1178</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/ba2c6f7f7fe69a356e8da9e64d2a20ce15321b27"><code>ba2c6f7</code></a> 🧪 Integrate Hypothesis in tests (<a href="https://redirect.github.com/aio-libs/yarl/issues/860">#860</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/755ba9c235c964a1d8ba820e3b3ca68c210bdd66"><code>755ba9c</code></a> Cleanup some unnecessary internal property accesses (<a href="https://redirect.github.com/aio-libs/yarl/issues/1177">#1177</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/d2892dadc1ba97626f3d89656374eef5b5598d7e"><code>d2892da</code></a> Improve performance of _encode_host (<a href="https://redirect.github.com/aio-libs/yarl/issues/1176">#1176</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/cf080332160558c948a00cae9ce43d1d24ccabb0"><code>cf08033</code></a> Improve performance of building the origin (<a href="https://redirect.github.com/aio-libs/yarl/issues/1175">#1175</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/f81ac5340a63b8c8359da995754b86b1d29045db"><code>f81ac53</code></a> Remove reference to alpine linux from readme (<a href="https://redirect.github.com/aio-libs/yarl/issues/1172">#1172</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/79d53eb2625dddcaa6d4d4a0041aa5c339aac08b"><code>79d53eb</code></a> Improve performance of converting URL to a string (<a href="https://redirect.github.com/aio-libs/yarl/issues/1170">#1170</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/yarl/compare/v1.13.1...v1.14.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.13.1&new-version=1.14.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 4 +++- requirements/constraints.txt | 4 +++- requirements/dev.txt | 4 +++- requirements/runtime-deps.txt | 4 +++- requirements/test.txt | 4 +++- 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index eeca7016a96..a1cf4b90f88 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,6 +32,8 @@ multidict==6.1.0 # yarl packaging==24.1 # via gunicorn +propcache==0.2.0 + # via yarl pycares==4.4.0 # via aiodns pycparser==2.22 @@ -40,5 +42,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.13.1 +yarl==1.14.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fde83a1fb34..fdc0e0d037c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -138,6 +138,8 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in +propcache==0.2.0 + # via yarl proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 @@ -274,7 +276,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.13.1 +yarl==1.14.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 01db9213d9b..d8ddb53546f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -135,6 +135,8 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in +propcache==0.2.0 + # via yarl proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 @@ -266,7 +268,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.13.1 +yarl==1.14.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4656f681d2f..ed709d2dbe6 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -28,11 +28,13 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl +propcache==0.2.0 + # via yarl pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.13.1 +yarl==1.14.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 15642eb7a87..ed3701d2989 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,6 +71,8 @@ packaging==24.1 # pytest pluggy==1.5.0 # via pytest +propcache==0.2.0 + # via yarl proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 @@ -137,5 +139,5 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.13.1 +yarl==1.14.0 # via -r requirements/runtime-deps.in From 366dc4f804c2046bf5597f96e6bebaa78dd4ef0d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 11:47:14 +0000 Subject: [PATCH 0719/1511] Bump rich from 13.9.1 to 13.9.2 (#9432) Bumps [rich](https://github.com/Textualize/rich) from 13.9.1 to 13.9.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>The Splitting segments Release</h2> <p>A hotfix for highlighting in the table, and a fix for <code>Segment.split_cells</code></p> <h2>[13.9.2] - 2024-10-04</h2> <h3>Fixed</h3> <ul> <li>Fixed <code>Table</code> columns not highlighting when added by <code>add_row</code> <a href="https://redirect.github.com/Textualize/rich/issues/3517">Textualize/rich#3517</a></li> <li>Fixed an issue with Segment.split_cells reported in Textual <a href="https://redirect.github.com/Textualize/textual/issues/5090">Textualize/textual#5090</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[13.9.2] - 2024-10-04</h2> <h3>Fixed</h3> <ul> <li>Fixed <code>Table</code> columns not highlighting when added by <code>add_row</code> <a href="https://redirect.github.com/Textualize/rich/issues/3517">Textualize/rich#3517</a></li> <li>Fixed an issue with Segment.split_cells reported in Textual <a href="https://redirect.github.com/Textualize/textual/issues/5090">Textualize/textual#5090</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/0f2f51b872d14588de3a65968f5cdde6fb5694a3"><code>0f2f51b</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3521">#3521</a> from Textualize/splitcells-fix</li> <li><a href="https://github.com/Textualize/rich/commit/8b84ee998ab51967a7649b47b11412717e89db7e"><code>8b84ee9</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3514">#3514</a> from mdmintz/complete-the-3.7-drop</li> <li><a href="https://github.com/Textualize/rich/commit/661ae8dfacb04b317f0c50689ddd9b1d2f19e926"><code>661ae8d</code></a> version bump</li> <li><a href="https://github.com/Textualize/rich/commit/834d1785f4ff422dcc9cc94984225f1d2ff8f527"><code>834d178</code></a> tests</li> <li><a href="https://github.com/Textualize/rich/commit/babf74a7eafb0a989efd88dcfb969cfa2922a58d"><code>babf74a</code></a> more tests</li> <li><a href="https://github.com/Textualize/rich/commit/4f40703e4fa01a749b306b2161a425a314b85606"><code>4f40703</code></a> fix for split cells</li> <li><a href="https://github.com/Textualize/rich/commit/66074922edabeac684434b4d87ee994fb97b0627"><code>6607492</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3518">#3518</a> from TomJGooding/fix-table-highlight-columns-added-b...</li> <li><a href="https://github.com/Textualize/rich/commit/e732952eb53e62bfe936192dd84fb5c1d4d5d4ee"><code>e732952</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3519">#3519</a> from TomJGooding/docs-table-add-column-highlight-option</li> <li><a href="https://github.com/Textualize/rich/commit/0176befbbab4b5515e72fad924540e247fc591cb"><code>0176bef</code></a> docs(table): add column highlight option</li> <li><a href="https://github.com/Textualize/rich/commit/16b3830408df98db41967774e59175a1f919ce25"><code>16b3830</code></a> fix(table): highlight columns added by add_row</li> <li>Additional commits viewable in <a href="https://github.com/Textualize/rich/compare/v13.9.1...v13.9.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.9.1&new-version=13.9.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fdc0e0d037c..0ca3180f80e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -195,7 +195,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.9.1 +rich==13.9.2 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d8ddb53546f..e5037b0d6d5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -190,7 +190,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.9.1 +rich==13.9.2 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index e2e096113e2..ff3ac5de48e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -86,7 +86,7 @@ pyyaml==6.0.2 # via pre-commit requests==2.32.3 # via python-on-whales -rich==13.9.1 +rich==13.9.2 # via typer shellingham==1.5.4 # via typer diff --git a/requirements/test.txt b/requirements/test.txt index ed3701d2989..5b6517cc5da 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -104,7 +104,7 @@ regex==2024.9.11 # via re-assert requests==2.32.3 # via python-on-whales -rich==13.9.1 +rich==13.9.2 # via typer setuptools-git==1.2 # via -r requirements/test.in From 0acab44bcad9e8084836048477df796f8957d2a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 8 Oct 2024 14:43:54 +0200 Subject: [PATCH 0720/1511] [PR #9394/e9edd04 backport][3.11] Migrate to using propcache for property caching (#9434) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- .gitignore | 2 -- CHANGES/9394.packaging.rst | 6 ++++ aiohttp/_helpers.pyi | 6 ---- aiohttp/_helpers.pyx | 35 --------------------- aiohttp/helpers.py | 50 ++---------------------------- docs/conf.py | 1 + requirements/base.txt | 4 ++- requirements/constraints.txt | 4 ++- requirements/dev.txt | 4 ++- requirements/runtime-deps.in | 1 + requirements/runtime-deps.txt | 4 ++- setup.cfg | 1 + setup.py | 1 - tests/test_helpers.py | 57 ----------------------------------- 14 files changed, 23 insertions(+), 153 deletions(-) create mode 100644 CHANGES/9394.packaging.rst delete mode 100644 aiohttp/_helpers.pyi delete mode 100644 aiohttp/_helpers.pyx diff --git a/.gitignore b/.gitignore index 7d38dd91998..62770ddc80a 100644 --- a/.gitignore +++ b/.gitignore @@ -37,8 +37,6 @@ aiohttp/_find_header.c aiohttp/_headers.html aiohttp/_headers.pxi -aiohttp/_helpers.c -aiohttp/_helpers.html aiohttp/_http_parser.c aiohttp/_http_parser.html aiohttp/_http_writer.c diff --git a/CHANGES/9394.packaging.rst b/CHANGES/9394.packaging.rst new file mode 100644 index 00000000000..456ac0f52c8 --- /dev/null +++ b/CHANGES/9394.packaging.rst @@ -0,0 +1,6 @@ +Switched to using the :mod:`propcache <propcache.api>` package for property caching +-- by :user:`bdraco`. + +The :mod:`propcache <propcache.api>` package is derived from the property caching +code in :mod:`yarl` and has been broken out to avoid maintaining it for multiple +projects. diff --git a/aiohttp/_helpers.pyi b/aiohttp/_helpers.pyi deleted file mode 100644 index 1e358937024..00000000000 --- a/aiohttp/_helpers.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Any - -class reify: - def __init__(self, wrapped: Any) -> None: ... - def __get__(self, inst: Any, owner: Any) -> Any: ... - def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/aiohttp/_helpers.pyx b/aiohttp/_helpers.pyx deleted file mode 100644 index 5f089225dc8..00000000000 --- a/aiohttp/_helpers.pyx +++ /dev/null @@ -1,35 +0,0 @@ - -cdef _sentinel = object() - -cdef class reify: - """Use as a class method decorator. It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - - """ - - cdef object wrapped - cdef object name - - def __init__(self, wrapped): - self.wrapped = wrapped - self.name = wrapped.__name__ - - @property - def __doc__(self): - return self.wrapped.__doc__ - - def __get__(self, inst, owner): - if inst is None: - return self - cdef dict cache = inst._cache - val = cache.get(self.name, _sentinel) - if val is _sentinel: - val = self.wrapped(inst) - cache[self.name] = val - return val - - def __set__(self, inst, value): - raise AttributeError("reified property is read-only") diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 81b79792dea..895aa3916f6 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -47,6 +47,7 @@ import attr from multidict import MultiDict, MultiDictProxy, MultiMapping +from propcache.api import under_cached_property as reify from yarl import URL from . import hdrs @@ -57,7 +58,7 @@ else: import async_timeout -__all__ = ("BasicAuth", "ChainMapProxy", "ETag") +__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify") IS_MACOS = platform.system() == "Darwin" IS_WINDOWS = platform.system() == "Windows" @@ -425,53 +426,6 @@ def content_disposition_header( return value -class _TSelf(Protocol, Generic[_T]): - _cache: Dict[str, _T] - - -class reify(Generic[_T]): - """Use as a class method decorator. - - It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - """ - - def __init__(self, wrapped: Callable[..., _T]) -> None: - self.wrapped = wrapped - self.__doc__ = wrapped.__doc__ - self.name = wrapped.__name__ - - def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise - - def __set__(self, inst: _TSelf[_T], value: _T) -> None: - raise AttributeError("reified property is read-only") - - -reify_py = reify - -try: - from ._helpers import reify as reify_c - - if not NO_EXTENSIONS: - reify = reify_c # type: ignore[misc,assignment] -except ImportError: - pass - - def is_ip_address(host: Optional[str]) -> bool: """Check if host looks like an IP Address. diff --git a/docs/conf.py b/docs/conf.py index 5cbf398e6a9..f60c8ffcf8c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -75,6 +75,7 @@ "pytest": ("http://docs.pytest.org/en/latest/", None), "python": ("http://docs.python.org/3", None), "multidict": ("https://multidict.readthedocs.io/en/stable/", None), + "propcache": ("https://propcache.aio-libs.org/en/stable", None), "yarl": ("https://yarl.readthedocs.io/en/stable/", None), "aiosignal": ("https://aiosignal.readthedocs.io/en/stable/", None), "aiohttpjinja2": ("https://aiohttp-jinja2.readthedocs.io/en/stable/", None), diff --git a/requirements/base.txt b/requirements/base.txt index a1cf4b90f88..734e543c5f9 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -33,7 +33,9 @@ multidict==6.1.0 packaging==24.1 # via gunicorn propcache==0.2.0 - # via yarl + # via + # -r requirements/runtime-deps.in + # yarl pycares==4.4.0 # via aiodns pycparser==2.22 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0ca3180f80e..554c3af5118 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -139,7 +139,9 @@ pluggy==1.5.0 pre-commit==3.5.0 # via -r requirements/lint.in propcache==0.2.0 - # via yarl + # via + # -r requirements/runtime-deps.in + # yarl proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 diff --git a/requirements/dev.txt b/requirements/dev.txt index e5037b0d6d5..26924314e4e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -136,7 +136,9 @@ pluggy==1.5.0 pre-commit==3.5.0 # via -r requirements/lint.in propcache==0.2.0 - # via yarl + # via + # -r requirements/runtime-deps.in + # yarl proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 4b8ab98dd08..7af9fb50246 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -9,4 +9,5 @@ Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 +propcache >= 0.2.0 yarl >= 1.13.0, < 2.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index ed709d2dbe6..13d1cfac572 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -29,7 +29,9 @@ multidict==6.1.0 # -r requirements/runtime-deps.in # yarl propcache==0.2.0 - # via yarl + # via + # -r requirements/runtime-deps.in + # yarl pycares==4.4.0 # via aiodns pycparser==2.22 diff --git a/setup.cfg b/setup.cfg index 781fc4ca40f..a26d472b22a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,6 +55,7 @@ install_requires = attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 + propcache >= 0.2.0 yarl >= 1.13.0, < 2.0 [options.exclude_package_data] diff --git a/setup.py b/setup.py index 808f539d259..3a90ae2e20a 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,6 @@ define_macros=[("LLHTTP_STRICT_MODE", 0)], include_dirs=["vendor/llhttp/build"], ), - Extension("aiohttp._helpers", ["aiohttp/_helpers.c"]), Extension("aiohttp._http_writer", ["aiohttp/_http_writer.c"]), ] diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 6f45ceca0b9..1aba1aae3bd 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -2,7 +2,6 @@ import base64 import datetime import gc -import platform import sys import weakref from math import ceil, modf @@ -22,9 +21,6 @@ should_remove_content_length, ) -IS_PYPY = platform.python_implementation() == "PyPy" - - # ------------------- parse_mimetype ---------------------------------- @@ -208,59 +204,6 @@ def test_basic_auth_from_not_url() -> None: helpers.BasicAuth.from_url("http://user:pass@example.com") -class ReifyMixin: - reify = NotImplemented - - def test_reify(self) -> None: - class A: - def __init__(self): - self._cache = {} - - @self.reify - def prop(self): - return 1 - - a = A() - assert 1 == a.prop - - def test_reify_class(self) -> None: - class A: - def __init__(self): - self._cache = {} - - @self.reify - def prop(self): - """Docstring.""" - return 1 - - assert isinstance(A.prop, self.reify) - assert "Docstring." == A.prop.__doc__ - - def test_reify_assignment(self) -> None: - class A: - def __init__(self): - self._cache = {} - - @self.reify - def prop(self): - return 1 - - a = A() - - with pytest.raises(AttributeError): - a.prop = 123 - - -class TestPyReify(ReifyMixin): - reify = helpers.reify_py - - -if not helpers.NO_EXTENSIONS and not IS_PYPY and hasattr(helpers, "reify_c"): - - class TestCReify(ReifyMixin): - reify = helpers.reify_c - - # ----------------------------------- is_ip_address() ---------------------- From a580f6a811d929b0ca87199e0a8e35d2f7c3e752 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 10:46:54 +0000 Subject: [PATCH 0721/1511] Bump actions/cache from 4.1.0 to 4.1.1 (#9439) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.1.0 to 4.1.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.1.1</h2> <h2>What's Changed</h2> <ul> <li>Restore original behavior of <code>cache-hit</code> output by <a href="https://github.com/joshmgross"><code>@​joshmgross</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1467">actions/cache#1467</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4.1.0...v4.1.1">https://github.com/actions/cache/compare/v4.1.0...v4.1.1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.1.1</h3> <ul> <li>Restore original behavior of <code>cache-hit</code> output - <a href="https://redirect.github.com/actions/cache/pull/1467">#1467</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/3624ceb22c1c5a301c8db4169662070a689d9ea8"><code>3624ceb</code></a> Restore original behavior of <code>cache-hit</code> output (<a href="https://redirect.github.com/actions/cache/issues/1467">#1467</a>)</li> <li>See full diff in <a href="https://github.com/actions/cache/compare/v4.1.0...v4.1.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.1.0&new-version=4.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index fb93e15a1fa..db8bd718763 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.1 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.1 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.1.0 + uses: actions/cache@v4.1.1 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 1fdaf71097d2885f8614a9595b14a7fb55ad604e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:06:17 +0000 Subject: [PATCH 0722/1511] Bump charset-normalizer from 3.3.2 to 3.4.0 (#9441) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [charset-normalizer](https://github.com/Ousret/charset_normalizer) from 3.3.2 to 3.4.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Ousret/charset_normalizer/releases">charset-normalizer's releases</a>.</em></p> <blockquote> <h2>Version 3.4.0</h2> <h2>🚀 charset-normalizer is raising awareness around HTTP/2, and HTTP/3!</h2> <p>Did you know that Internet Explorer 11 shipped with an optional HTTP/2 support back in 2013? also libcurl did ship it in 2014[...] All of this while our community is still struggling to make a firm advancement in HTTP clients. Now, many of you use Requests as the defacto http client, now, and for many years now, Requests has been frozen. Being left in a vegetative state and not evolving, this blocked millions of developers from using more advanced features.</p> <p>We promptly invite Python developers to look at the drop-in replacement for Requests, <a href="https://github.com/jawah/niquests">namely Niquests</a>. It leverage charset-normalizer in a better way! Check it out, you will be positively surprised! Don't wait another decade.</p> <p>We are thankful to <a href="https://github.com/microsoft"><code>@​microsoft</code></a> and involved parties for funding our work through the Microsoft FOSS Fund program.</p> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0">3.4.0</a> (2024-10-08)</h2> <h3>Added</h3> <ul> <li>Argument <code>--no-preemptive</code> in the CLI to prevent the detector to search for hints.</li> <li>Support for Python 3.13 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/512">#512</a>)</li> </ul> <h3>Fixed</h3> <ul> <li>Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.</li> <li>Improved the general reliability of the detector based on user feedbacks. (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/520">#520</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/509">#509</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/498">#498</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/407">#407</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/537">#537</a>)</li> <li>Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/381">#381</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md">charset-normalizer's changelog</a>.</em></p> <blockquote> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0">3.4.0</a> (2024-10-08)</h2> <h3>Added</h3> <ul> <li>Argument <code>--no-preemptive</code> in the CLI to prevent the detector to search for hints.</li> <li>Support for Python 3.13 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/512">#512</a>)</li> </ul> <h3>Fixed</h3> <ul> <li>Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.</li> <li>Improved the general reliability of the detector based on user feedbacks. (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/520">#520</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/509">#509</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/498">#498</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/407">#407</a>) (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/537">#537</a>)</li> <li>Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/381">#381</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jawah/charset_normalizer/commit/f3118e3b5132b34e4a888d8d6f7199aee4e85274"><code>f3118e3</code></a> :wrench: change download/upload artifact version to last working version</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/33e67e8303f65dfc70fc89a0521b64cd4be8e527"><code>33e67e8</code></a> :wrench: set compile-generator in generator_generic_slsa3 action</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/73dd24ca0ca28e23b254af4f5aa7767b8993eacc"><code>73dd24c</code></a> :wrench: add explicit build deps to setuptools</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/78f1e9ba677c2c8887e7658b7566c6a31ce55648"><code>78f1e9b</code></a> :wrench: attempt to fix cd.yml *3</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/56ae70201bba3116bb2f09ec380fb70c07bf4db1"><code>56ae702</code></a> :wrench: attempt to fix cd.yml *2</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/9720055dde3c146db76c7195f3c474df6212191e"><code>9720055</code></a> :wrench: attempt to fix cd.yml (macos part)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/1e10d06e26bacaab3b513e601037889d00ae54ad"><code>1e10d06</code></a> Update CHANGELOG.md</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/36c103a599dd8da8e68762d32fd87a264de3ec47"><code>36c103a</code></a> :bookmark: Release 3.4.0 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/545">#545</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/7658dfcfa537f9cdce873fb94b545859ab2f1d5e"><code>7658dfc</code></a> :arrow_up: Bump github/codeql-action from 3.26.11 to 3.26.12 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/544">#544</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/ca2535d8cc575fc7ecc144d6ab253216fcbc36fc"><code>ca2535d</code></a> :arrow_up: Bump github/codeql-action from 3.26.9 to 3.26.11 (<a href="https://redirect.github.com/Ousret/charset_normalizer/issues/542">#542</a>)</li> <li>Additional commits viewable in <a href="https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=charset-normalizer&package-manager=pip&previous-version=3.3.2&new-version=3.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 6 ++++-- 6 files changed, 9 insertions(+), 7 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 554c3af5118..e541a4c8459 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -40,7 +40,7 @@ cffi==1.17.1 # pycares cfgv==3.4.0 # via pre-commit -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 26924314e4e..4ec9895d0f0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -40,7 +40,7 @@ cffi==1.17.1 # pycares cfgv==3.4.0 # via pre-commit -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4f33e55ec4e..fe962ac233c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via sphinx certifi==2024.8.30 # via requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests click==8.1.7 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 1aa20f4daec..63e05289b02 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via sphinx certifi==2024.8.30 # via requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests click==8.1.7 # via towncrier diff --git a/requirements/lint.txt b/requirements/lint.txt index ff3ac5de48e..76bb506ca88 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -20,7 +20,7 @@ cffi==1.17.1 # pycares cfgv==3.4.0 # via pre-commit -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests click==8.1.7 # via diff --git a/requirements/test.txt b/requirements/test.txt index 5b6517cc5da..934c105b910 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -24,7 +24,7 @@ cffi==1.17.1 # via # cryptography # pycares -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests click==8.1.7 # via @@ -72,7 +72,9 @@ packaging==24.1 pluggy==1.5.0 # via pytest propcache==0.2.0 - # via yarl + # via + # -r requirements/runtime-deps.in + # yarl proxy-py==2.4.8 # via -r requirements/test.in pycares==4.4.0 From 497c35e3e5d814f84ef3807e8484dcac2c56fc6a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:39:09 +0000 Subject: [PATCH 0723/1511] Bump distlib from 0.3.8 to 0.3.9 (#9446) Bumps [distlib](https://github.com/pypa/distlib) from 0.3.8 to 0.3.9. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/distlib/blob/master/CHANGES.rst">distlib's changelog</a>.</em></p> <blockquote> <p>0.3.9</p> <pre><code> Released: 2024-10-09 <ul> <li> <p>scripts</p> <ul> <li> <p>Merge <a href="https://redirect.github.com/pypa/distlib/issues/215">#215</a>: preload script wrappers on Windows to assist with a pip issue (thanks, Paul Moore).</p> </li> <li> <p>Fix <a href="https://redirect.github.com/pypa/distlib/issues/220">#220</a>: Remove duplicated newline in shebang of windows launcher (thanks. A2uria).</p> </li> <li> <p>Fix <a href="https://redirect.github.com/pypa/distlib/issues/230">#230</a>: Add handling for cross-compilation environments (thanks, Russell Keith-Magee).</p> </li> </ul> </li> <li> <p>util</p> <ul> <li>Fix <a href="https://redirect.github.com/pypa/distlib/issues/224">#224</a>: Do not use the absolute path to cache wheel extensions (thanks, Stewart Miles).</li> </ul> </li> <li> <p>wheel</p> <ul> <li> <p>Fix <a href="https://redirect.github.com/pypa/distlib/issues/222">#222</a>: Support mounting wheels that use extensions without an EXTENSIONS file (thanks, Stewart Miles).</p> </li> <li> <p>Fix <a href="https://redirect.github.com/pypa/distlib/issues/225">#225</a>: Add support for wheel compatibility with the limited API (thanks, Stewart Miles). </code></pre></p> </li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/distlib/commit/fab584e358203ef0f4a22b9a00e3ed0e309665a6"><code>fab584e</code></a> Changes for 0.3.9.</li> <li><a href="https://github.com/pypa/distlib/commit/148fa95d7d9f102dcfe64a3b40f0acc3e855d7e7"><code>148fa95</code></a> Remove duplicated newline in shebang of windows launcher (<a href="https://redirect.github.com/pypa/distlib/issues/221">#221</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/d1f40d6325ec1751104ed290d8c3d4bcf426940a"><code>d1f40d6</code></a> Fix whitespace at end of file.</li> <li><a href="https://github.com/pypa/distlib/commit/2c4d2fa5747628a320369ed97b0b5f8033c56dbe"><code>2c4d2fa</code></a> Add support for wheel compatibility with the limited API. (<a href="https://redirect.github.com/pypa/distlib/issues/228">#228</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/fac84c7eda5b28af36c3139bb565659a9ed80aec"><code>fac84c7</code></a> Do not use the absolute path to cache wheel extensions. (<a href="https://redirect.github.com/pypa/distlib/issues/226">#226</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/6fbadf153dbe0fe217915e9293b08e400a3c91d8"><code>6fbadf1</code></a> Add wheel with a C extension to test mounting (<a href="https://redirect.github.com/pypa/distlib/issues/229">#229</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/a16423f447c76440ecaaeff4b52acb1c62e16541"><code>a16423f</code></a> Add handling for cross-compilation environments. (<a href="https://redirect.github.com/pypa/distlib/issues/231">#231</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/cbd4ae13495383d146e59ede0ce079253effcf40"><code>cbd4ae1</code></a> Upgrade codecov workflow to v4 (<a href="https://redirect.github.com/pypa/distlib/issues/232">#232</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/52350139e15046ae46d327807a7a528bdc8dacef"><code>5235013</code></a> Ignore .vscode project files. (<a href="https://redirect.github.com/pypa/distlib/issues/223">#223</a>)</li> <li><a href="https://github.com/pypa/distlib/commit/888c48b56886b03398646be1217508830427bd75"><code>888c48b</code></a> Preload script wrappers at import time (<a href="https://redirect.github.com/pypa/distlib/issues/215">#215</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/distlib/compare/0.3.8...0.3.9">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=distlib&package-manager=pip&previous-version=0.3.8&new-version=0.3.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e541a4c8459..9f1281cd31a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==43.0.1 # trustme cython==3.0.11 # via -r requirements/cython.in -distlib==0.3.8 +distlib==0.3.9 # via virtualenv docutils==0.20.1 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 4ec9895d0f0..68a12a54400 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -60,7 +60,7 @@ cryptography==43.0.1 # via # pyjwt # trustme -distlib==0.3.8 +distlib==0.3.9 # via virtualenv docutils==0.20.1 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 76bb506ca88..f073c29cdfe 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -28,7 +28,7 @@ click==8.1.7 # typer cryptography==43.0.1 # via trustme -distlib==0.3.8 +distlib==0.3.9 # via virtualenv exceptiongroup==1.2.2 # via pytest From 2fdda382ec7eeb086bb2f5916b07c8cab38cde48 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 07:35:14 -0500 Subject: [PATCH 0724/1511] Bump pypa/cibuildwheel from 2.21.2 to 2.21.3 (#9440) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index db8bd718763..2ee3ee7d8bf 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -354,7 +354,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.21.2 + uses: pypa/cibuildwheel@v2.21.3 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From bc9e5d6676906e4bef10fcb499d5f233ed2c394e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 13:44:02 +0000 Subject: [PATCH 0725/1511] [PR #9448/93e87c2e backport][3.10] Improve performance of fetching the content-length for web responses (#9449) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/helpers.py | 6 +----- aiohttp/web_response.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 1ea6a56db46..6ee70786cfb 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -814,11 +814,7 @@ def charset(self) -> Optional[str]: def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) - - if content_length is not None: - return int(content_length) - else: - return None + return None if content_length is None else int(content_length) def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 4d5095a4fea..2036a8d088b 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -709,7 +709,7 @@ def content_length(self) -> Optional[int]: return None if hdrs.CONTENT_LENGTH in self._headers: - return super().content_length + return int(self._headers[hdrs.CONTENT_LENGTH]) if self._compressed_body is not None: # Return length of the compressed body From 7d4b03c3501637cac6c40d4580d721bb52da0383 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:27:58 +0000 Subject: [PATCH 0726/1511] [PR #9448/93e87c2e backport][3.11] Improve performance of fetching the content-length for web responses (#9450) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/helpers.py | 6 +----- aiohttp/web_response.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 895aa3916f6..0435123a1a1 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -730,11 +730,7 @@ def charset(self) -> Optional[str]: def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) - - if content_length is not None: - return int(content_length) - else: - return None + return None if content_length is None else int(content_length) def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 5c0a3be1d21..fda2137be67 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -726,7 +726,7 @@ def content_length(self) -> Optional[int]: return None if hdrs.CONTENT_LENGTH in self._headers: - return super().content_length + return int(self._headers[hdrs.CONTENT_LENGTH]) if self._compressed_body is not None: # Return length of the compressed body From ba9b33e63a18841cdb7cf5f912d99ccbea91dee4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 09:35:22 -0500 Subject: [PATCH 0727/1511] [PR #9451/216e082 backport][3.10] Fix AsyncResolver swallowing the error message (#9452) --- CHANGES/9451.bugfix.rst | 1 + aiohttp/resolver.py | 4 ++-- tests/test_resolver.py | 30 +++++++++++++++++++++++++++++- 3 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9451.bugfix.rst diff --git a/CHANGES/9451.bugfix.rst b/CHANGES/9451.bugfix.rst new file mode 100644 index 00000000000..2adcbc66273 --- /dev/null +++ b/CHANGES/9451.bugfix.rst @@ -0,0 +1 @@ +Fixed error messages from :py:class:`~aiohttp.resolver.AsyncResolver` being swallowed -- by :user:`bdraco`. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 6283ec2b8d5..385ae21abf5 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -111,7 +111,7 @@ async def resolve( ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc + raise OSError(None, msg) from exc hosts: List[ResolveResult] = [] for node in resp.nodes: address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr @@ -145,7 +145,7 @@ async def resolve( ) if not hosts: - raise OSError("DNS lookup failed") + raise OSError(None, "DNS lookup failed") return hosts diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 8b2ea620037..6322bcfca64 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -339,7 +339,35 @@ async def test_async_resolver_query_ipv6_positive_lookup(loop) -> None: mock().query.assert_called_with("www.python.org", "AAAA") -async def test_async_resolver_aiodns_not_present(loop, monkeypatch) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_error_messages_passed( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + mock().getaddrinfo.side_effect = aiodns.error.DNSError(1, "Test error message") + resolver = AsyncResolver() + with pytest.raises(OSError, match="Test error message") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "Test error message" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_error_messages_passed_no_hosts( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result([]) + resolver = AsyncResolver() + with pytest.raises(OSError, match="DNS lookup failed") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "DNS lookup failed" + + +async def test_async_resolver_aiodns_not_present(loop: Any, monkeypatch: Any) -> None: monkeypatch.setattr("aiohttp.resolver.aiodns", None) with pytest.raises(RuntimeError): AsyncResolver(loop=loop) From 42d30d52b774357a80badc20d40e83ce4c165b67 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 09:44:16 -0500 Subject: [PATCH 0728/1511] [PR #9451/216e082 backport][3.11] Fix AsyncResolver swallowing the error message (#9453) --- CHANGES/9451.bugfix.rst | 1 + aiohttp/resolver.py | 4 ++-- tests/test_resolver.py | 30 +++++++++++++++++++++++++++++- 3 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9451.bugfix.rst diff --git a/CHANGES/9451.bugfix.rst b/CHANGES/9451.bugfix.rst new file mode 100644 index 00000000000..2adcbc66273 --- /dev/null +++ b/CHANGES/9451.bugfix.rst @@ -0,0 +1 @@ +Fixed error messages from :py:class:`~aiohttp.resolver.AsyncResolver` being swallowed -- by :user:`bdraco`. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 4f15e84071d..5240bfbd8d8 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -109,7 +109,7 @@ async def resolve( ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc + raise OSError(None, msg) from exc hosts: List[ResolveResult] = [] for node in resp.nodes: address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr @@ -143,7 +143,7 @@ async def resolve( ) if not hosts: - raise OSError("DNS lookup failed") + raise OSError(None, "DNS lookup failed") return hosts diff --git a/tests/test_resolver.py b/tests/test_resolver.py index e0e843f4782..842618c3053 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -332,7 +332,35 @@ async def test_async_resolver_query_ipv6_positive_lookup(loop) -> None: mock().query.assert_called_with("www.python.org", "AAAA") -async def test_async_resolver_aiodns_not_present(loop, monkeypatch) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_error_messages_passed( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + mock().getaddrinfo.side_effect = aiodns.error.DNSError(1, "Test error message") + resolver = AsyncResolver() + with pytest.raises(OSError, match="Test error message") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "Test error message" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_error_messages_passed_no_hosts( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result([]) + resolver = AsyncResolver() + with pytest.raises(OSError, match="DNS lookup failed") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "DNS lookup failed" + + +async def test_async_resolver_aiodns_not_present(loop: Any, monkeypatch: Any) -> None: monkeypatch.setattr("aiohttp.resolver.aiodns", None) with pytest.raises(RuntimeError): AsyncResolver(loop=loop) From dfaafac078d3f5ee5902933709f7264e9b1ffd70 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 10:46:50 -0500 Subject: [PATCH 0729/1511] [3.11] Fix AsyncResolver query fallback swallowing the error message (#9455) --- CHANGES/9455.bugfix.rst | 1 + aiohttp/resolver.py | 4 ++-- tests/test_resolver.py | 30 ++++++++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9455.bugfix.rst diff --git a/CHANGES/9455.bugfix.rst b/CHANGES/9455.bugfix.rst new file mode 120000 index 00000000000..da8457a1de6 --- /dev/null +++ b/CHANGES/9455.bugfix.rst @@ -0,0 +1 @@ +9451.bugfix.rst \ No newline at end of file diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 5240bfbd8d8..a988b0bf47f 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -159,7 +159,7 @@ async def _resolve_with_query( resp = await self._resolver.query(host, qtype) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc + raise OSError(None, msg) from exc hosts = [] for rr in resp: @@ -175,7 +175,7 @@ async def _resolve_with_query( ) if not hosts: - raise OSError("DNS lookup failed") + raise OSError(None, "DNS lookup failed") return hosts diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 842618c3053..b4606067079 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -332,6 +332,36 @@ async def test_async_resolver_query_ipv6_positive_lookup(loop) -> None: mock().query.assert_called_with("www.python.org", "AAAA") +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_query_fallback_error_messages_passed( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns with query fallback.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + del mock().gethostbyname + mock().query.side_effect = aiodns.error.DNSError(1, "Test error message") + resolver = AsyncResolver() + with pytest.raises(OSError, match="Test error message") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "Test error message" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_query_fallback_error_messages_passed_no_hosts( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns with query fallback.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + del mock().gethostbyname + mock().query.return_value = fake_query_result([]) + resolver = AsyncResolver() + with pytest.raises(OSError, match="DNS lookup failed") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "DNS lookup failed" + + @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") async def test_async_resolver_error_messages_passed( loop: asyncio.AbstractEventLoop, From ee87a049be63887c0a5d9f4e6008b4e47460f16c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:21:57 -0500 Subject: [PATCH 0730/1511] [PR #9455/dfaafac0 backport][3.10] Fix AsyncResolver query fallback swallowing the error message (#9456) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9455.bugfix.rst | 1 + aiohttp/resolver.py | 4 ++-- tests/test_resolver.py | 30 ++++++++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9455.bugfix.rst diff --git a/CHANGES/9455.bugfix.rst b/CHANGES/9455.bugfix.rst new file mode 120000 index 00000000000..da8457a1de6 --- /dev/null +++ b/CHANGES/9455.bugfix.rst @@ -0,0 +1 @@ +9451.bugfix.rst \ No newline at end of file diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 385ae21abf5..c01a46fbb01 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -161,7 +161,7 @@ async def _resolve_with_query( resp = await self._resolver.query(host, qtype) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc + raise OSError(None, msg) from exc hosts = [] for rr in resp: @@ -177,7 +177,7 @@ async def _resolve_with_query( ) if not hosts: - raise OSError("DNS lookup failed") + raise OSError(None, "DNS lookup failed") return hosts diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 6322bcfca64..f8fba501add 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -339,6 +339,36 @@ async def test_async_resolver_query_ipv6_positive_lookup(loop) -> None: mock().query.assert_called_with("www.python.org", "AAAA") +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_query_fallback_error_messages_passed( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns with query fallback.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + del mock().gethostbyname + mock().query.side_effect = aiodns.error.DNSError(1, "Test error message") + resolver = AsyncResolver() + with pytest.raises(OSError, match="Test error message") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "Test error message" + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_query_fallback_error_messages_passed_no_hosts( + loop: asyncio.AbstractEventLoop, +) -> None: + """Ensure error messages are passed through from aiodns with query fallback.""" + with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock: + del mock().gethostbyname + mock().query.return_value = fake_query_result([]) + resolver = AsyncResolver() + with pytest.raises(OSError, match="DNS lookup failed") as excinfo: + await resolver.resolve("x.org") + + assert excinfo.value.strerror == "DNS lookup failed" + + @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") async def test_async_resolver_error_messages_passed( loop: asyncio.AbstractEventLoop, From cdf3dcabeb9d80a46a239710b2d889657a932a54 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 12:40:12 -0500 Subject: [PATCH 0731/1511] [PR #9454/b20908e backport][3.10] Simplify DNS throttle implementation (#9457) --- CHANGES/9454.misc.rst | 1 + aiohttp/connector.py | 96 +++++++----- aiohttp/locks.py | 41 ------ tests/test_connector.py | 317 ++++++++++++++++++++++++++++++++++++++-- tests/test_locks.py | 54 ------- 5 files changed, 368 insertions(+), 141 deletions(-) create mode 100644 CHANGES/9454.misc.rst delete mode 100644 aiohttp/locks.py delete mode 100644 tests/test_locks.py diff --git a/CHANGES/9454.misc.rst b/CHANGES/9454.misc.rst new file mode 100644 index 00000000000..5c842590512 --- /dev/null +++ b/CHANGES/9454.misc.rst @@ -0,0 +1 @@ +Simplified DNS resolution throttling code to reduce chance of race conditions -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 1c1283190d4..6e3c9e18db8 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -9,7 +9,7 @@ from contextlib import suppress from http import HTTPStatus from http.cookies import SimpleCookie -from itertools import cycle, islice +from itertools import chain, cycle, islice from time import monotonic from types import TracebackType from typing import ( @@ -50,8 +50,14 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, is_ip_address, noop, sentinel -from .locks import EventResultOrError +from .helpers import ( + ceil_timeout, + is_ip_address, + noop, + sentinel, + set_exception, + set_result, +) from .resolver import DefaultResolver try: @@ -840,7 +846,9 @@ def __init__( self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._throttle_dns_futures: Dict[ + Tuple[str, int], Set["asyncio.Future[None]"] + ] = {} self._family = family self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) self._happy_eyeballs_delay = happy_eyeballs_delay @@ -849,8 +857,8 @@ def __init__( def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" - for ev in self._throttle_dns_events.values(): - ev.cancel() + for fut in chain.from_iterable(self._throttle_dns_futures.values()): + fut.cancel() for t in self._resolve_host_tasks: t.cancel() @@ -918,18 +926,35 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + futures: Set["asyncio.Future[None]"] # # If multiple connectors are resolving the same host, we wait # for the first one to resolve and then use the result for all of them. - # We use a throttle event to ensure that we only resolve the host once + # We use a throttle to ensure that we only resolve the host once # and then use the result for all the waiters. # + if key in self._throttle_dns_futures: + # get futures early, before any await (#4014) + futures = self._throttle_dns_futures[key] + future: asyncio.Future[None] = self._loop.create_future() + futures.add(future) + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + try: + await future + finally: + futures.discard(future) + return self._cached_hosts.next_addrs(key) + + # update dict early, before any await (#4014) + self._throttle_dns_futures[key] = futures = set() # In this case we need to create a task to ensure that we can shield # the task from cancellation as cancelling this lookup should not cancel # the underlying lookup or else the cancel event will get broadcast to # all the waiters across all connections. # - coro = self._resolve_host_with_throttle(key, host, port, traces) + coro = self._resolve_host_with_throttle(key, host, port, futures, traces) loop = asyncio.get_running_loop() if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to send immediately @@ -957,42 +982,39 @@ async def _resolve_host_with_throttle( key: Tuple[str, int], host: str, port: int, + futures: Set["asyncio.Future[None]"], traces: Optional[Sequence["Trace"]], ) -> List[ResolveResult]: - """Resolve host with a dns events throttle.""" - if key in self._throttle_dns_events: - # get event early, before any await (#4014) - event = self._throttle_dns_events[key] + """Resolve host and set result for all waiters. + + This method must be run in a task and shielded from cancellation + to avoid cancelling the underlying lookup. + """ + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: if traces: for trace in traces: - await trace.send_dns_cache_hit(host) - await event.wait() - else: - # update dict early, before any await (#4014) - self._throttle_dns_events[key] = EventResultOrError(self._loop) + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) if traces: for trace in traces: - await trace.send_dns_cache_miss(host) - try: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - addrs = await self._resolver.resolve(host, port, family=self._family) - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) + await trace.send_dns_resolvehost_end(host) - self._cached_hosts.add(key, addrs) - self._throttle_dns_events[key].set() - except BaseException as e: - # any DNS exception, independently of the implementation - # is set for the waiters to raise the same exception. - self._throttle_dns_events[key].set(exc=e) - raise - finally: - self._throttle_dns_events.pop(key) + self._cached_hosts.add(key, addrs) + for fut in futures: + set_result(fut, None) + except BaseException as e: + # any DNS exception is set for the waiters to raise the same exception. + # This coro is always run in task that is shielded from cancellation so + # we should never be propagating cancellation here. + for fut in futures: + set_exception(fut, e) + raise + finally: + self._throttle_dns_futures.pop(key) return self._cached_hosts.next_addrs(key) diff --git a/aiohttp/locks.py b/aiohttp/locks.py deleted file mode 100644 index de2dc83d09d..00000000000 --- a/aiohttp/locks.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import collections -from typing import Any, Deque, Optional - - -class EventResultOrError: - """Event asyncio lock helper class. - - Wraps the Event asyncio lock allowing either to awake the - locked Tasks without any error or raising an exception. - - thanks to @vorpalsmith for the simple design. - """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._exc: Optional[BaseException] = None - self._event = asyncio.Event() - self._waiters: Deque[asyncio.Future[Any]] = collections.deque() - - def set(self, exc: Optional[BaseException] = None) -> None: - self._exc = exc - self._event.set() - - async def wait(self) -> Any: - waiter = self._loop.create_task(self._event.wait()) - self._waiters.append(waiter) - try: - val = await waiter - finally: - self._waiters.remove(waiter) - - if self._exc is not None: - raise self._exc - - return val - - def cancel(self) -> None: - """Cancel all waiters""" - for waiter in self._waiters: - waiter.cancel() diff --git a/tests/test_connector.py b/tests/test_connector.py index a21dd872993..94eeb3ca85b 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -34,7 +34,6 @@ TCPConnector, _DNSCacheTable, ) -from aiohttp.locks import EventResultOrError from aiohttp.resolver import ResolveResult from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -1105,6 +1104,7 @@ def dns_response(loop): async def coro(): # simulates a network operation await asyncio.sleep(0) + await asyncio.sleep(0) return ["127.0.0.1"] return coro @@ -1766,8 +1766,8 @@ async def test_close_cancels_cleanup_handle(loop) -> None: async def test_close_cancels_resolve_host(loop: asyncio.AbstractEventLoop) -> None: cancelled = False - async def delay_resolve_host(*args: object) -> None: - """Delay _resolve_host() task in order to test cancellation.""" + async def delay_resolve(*args: object, **kwargs: object) -> None: + """Delay resolve() task in order to test cancellation.""" nonlocal cancelled try: await asyncio.sleep(10) @@ -1779,7 +1779,7 @@ async def delay_resolve_host(*args: object) -> None: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn, "_resolve_host_with_throttle", delay_resolve_host): + with mock.patch.object(conn._resolver, "resolve", delay_resolve): t = asyncio.create_task(conn.connect(req, [], ClientTimeout())) # Let it create the internal task await asyncio.sleep(0) @@ -1797,6 +1797,301 @@ async def delay_resolve_host(*args: object) -> None: await t +async def test_multiple_dns_resolution_requests_success( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that multiple DNS resolution requests are handled correctly.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task1 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + +async def test_multiple_dns_resolution_requests_failure( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that DNS resolution failure for multiple requests is handled correctly.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + raise OSError(None, "DNS Resolution mock failure") + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task1 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task3 + + +async def test_multiple_dns_resolution_requests_cancelled( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that DNS resolution cancellation does not affect other tasks.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + raise OSError(None, "DNS Resolution mock failure") + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + task1.cancel() + with pytest.raises(asyncio.CancelledError): + await task1 + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_multiple_dns_resolution_requests_first_cancelled( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that first DNS resolution cancellation does not make other resolutions fail.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + task1.cancel() + with pytest.raises(asyncio.CancelledError): + await task1 + + # The second and third tasks should still make the connection + # even if the first one is cancelled + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_multiple_dns_resolution_requests_first_fails_second_successful( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that first DNS resolution fails the first time and is successful the second time.""" + attempt = 0 + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + nonlocal attempt + for _ in range(3): + await asyncio.sleep(0) + attempt += 1 + if attempt == 1: + raise OSError(None, "DNS Resolution mock failure") + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task1 + + assert len(conn._resolve_host_tasks) == 0 + # The second task should also get the dns resolution failure + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + + # The third task is created after the resolution finished so + # it should try again and succeed + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None: tr = mock.Mock() @@ -2762,14 +3057,18 @@ async def test_connector_throttle_trace_race(loop): key = ("", 0) token = object() - class DummyTracer: - async def send_dns_cache_hit(self, *args, **kwargs): - event = connector._throttle_dns_events.pop(key) - event.set() + class DummyTracer(Trace): + def __init__(self) -> None: + """Dummy""" + + async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: + futures = connector._throttle_dns_futures.pop(key) + for fut in futures: + fut.set_result(None) connector._cached_hosts.add(key, [token]) connector = TCPConnector() - connector._throttle_dns_events[key] = EventResultOrError(loop) + connector._throttle_dns_futures[key] = set() traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] diff --git a/tests/test_locks.py b/tests/test_locks.py deleted file mode 100644 index 5f434eace97..00000000000 --- a/tests/test_locks.py +++ /dev/null @@ -1,54 +0,0 @@ -# Tests of custom aiohttp locks implementations -import asyncio - -import pytest - -from aiohttp.locks import EventResultOrError - - -class TestEventResultOrError: - async def test_set_exception(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - try: - await ev.wait() - except Exception as e: - return e - return 1 - - t = loop.create_task(c()) - await asyncio.sleep(0) - e = Exception() - ev.set(exc=e) - assert (await t) == e - - async def test_set(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - await ev.wait() - return 1 - - t = loop.create_task(c()) - await asyncio.sleep(0) - ev.set() - assert (await t) == 1 - - async def test_cancel_waiters(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - await ev.wait() - - t1 = loop.create_task(c()) - t2 = loop.create_task(c()) - await asyncio.sleep(0) - ev.cancel() - ev.set() - - with pytest.raises(asyncio.CancelledError): - await t1 - - with pytest.raises(asyncio.CancelledError): - await t2 From f48f2100dbda6de522c056513b665c3fa1b22a9c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 12:56:10 -0500 Subject: [PATCH 0732/1511] [PR #9454/b20908e backport][3.11] Simplify DNS throttle implementation (#9458) --- CHANGES/9454.misc.rst | 1 + aiohttp/connector.py | 96 +++++++----- aiohttp/locks.py | 41 ------ tests/test_connector.py | 317 ++++++++++++++++++++++++++++++++++++++-- tests/test_locks.py | 54 ------- 5 files changed, 368 insertions(+), 141 deletions(-) create mode 100644 CHANGES/9454.misc.rst delete mode 100644 aiohttp/locks.py delete mode 100644 tests/test_locks.py diff --git a/CHANGES/9454.misc.rst b/CHANGES/9454.misc.rst new file mode 100644 index 00000000000..5c842590512 --- /dev/null +++ b/CHANGES/9454.misc.rst @@ -0,0 +1 @@ +Simplified DNS resolution throttling code to reduce chance of race conditions -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 5947fdc6953..da503bded53 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -9,7 +9,7 @@ from contextlib import suppress from http import HTTPStatus from http.cookies import SimpleCookie -from itertools import cycle, islice +from itertools import chain, cycle, islice from time import monotonic from types import TracebackType from typing import ( @@ -49,8 +49,14 @@ ) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, is_ip_address, noop, sentinel -from .locks import EventResultOrError +from .helpers import ( + ceil_timeout, + is_ip_address, + noop, + sentinel, + set_exception, + set_result, +) from .resolver import DefaultResolver try: @@ -839,7 +845,9 @@ def __init__( self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._throttle_dns_futures: Dict[ + Tuple[str, int], Set["asyncio.Future[None]"] + ] = {} self._family = family self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) self._happy_eyeballs_delay = happy_eyeballs_delay @@ -848,8 +856,8 @@ def __init__( def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" - for ev in self._throttle_dns_events.values(): - ev.cancel() + for fut in chain.from_iterable(self._throttle_dns_futures.values()): + fut.cancel() for t in self._resolve_host_tasks: t.cancel() @@ -917,18 +925,35 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + futures: Set["asyncio.Future[None]"] # # If multiple connectors are resolving the same host, we wait # for the first one to resolve and then use the result for all of them. - # We use a throttle event to ensure that we only resolve the host once + # We use a throttle to ensure that we only resolve the host once # and then use the result for all the waiters. # + if key in self._throttle_dns_futures: + # get futures early, before any await (#4014) + futures = self._throttle_dns_futures[key] + future: asyncio.Future[None] = self._loop.create_future() + futures.add(future) + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + try: + await future + finally: + futures.discard(future) + return self._cached_hosts.next_addrs(key) + + # update dict early, before any await (#4014) + self._throttle_dns_futures[key] = futures = set() # In this case we need to create a task to ensure that we can shield # the task from cancellation as cancelling this lookup should not cancel # the underlying lookup or else the cancel event will get broadcast to # all the waiters across all connections. # - coro = self._resolve_host_with_throttle(key, host, port, traces) + coro = self._resolve_host_with_throttle(key, host, port, futures, traces) loop = asyncio.get_running_loop() if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to send immediately @@ -956,42 +981,39 @@ async def _resolve_host_with_throttle( key: Tuple[str, int], host: str, port: int, + futures: Set["asyncio.Future[None]"], traces: Optional[Sequence["Trace"]], ) -> List[ResolveResult]: - """Resolve host with a dns events throttle.""" - if key in self._throttle_dns_events: - # get event early, before any await (#4014) - event = self._throttle_dns_events[key] + """Resolve host and set result for all waiters. + + This method must be run in a task and shielded from cancellation + to avoid cancelling the underlying lookup. + """ + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: if traces: for trace in traces: - await trace.send_dns_cache_hit(host) - await event.wait() - else: - # update dict early, before any await (#4014) - self._throttle_dns_events[key] = EventResultOrError(self._loop) + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) if traces: for trace in traces: - await trace.send_dns_cache_miss(host) - try: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - addrs = await self._resolver.resolve(host, port, family=self._family) - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) + await trace.send_dns_resolvehost_end(host) - self._cached_hosts.add(key, addrs) - self._throttle_dns_events[key].set() - except BaseException as e: - # any DNS exception, independently of the implementation - # is set for the waiters to raise the same exception. - self._throttle_dns_events[key].set(exc=e) - raise - finally: - self._throttle_dns_events.pop(key) + self._cached_hosts.add(key, addrs) + for fut in futures: + set_result(fut, None) + except BaseException as e: + # any DNS exception is set for the waiters to raise the same exception. + # This coro is always run in task that is shielded from cancellation so + # we should never be propagating cancellation here. + for fut in futures: + set_exception(fut, e) + raise + finally: + self._throttle_dns_futures.pop(key) return self._cached_hosts.next_addrs(key) diff --git a/aiohttp/locks.py b/aiohttp/locks.py deleted file mode 100644 index de2dc83d09d..00000000000 --- a/aiohttp/locks.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import collections -from typing import Any, Deque, Optional - - -class EventResultOrError: - """Event asyncio lock helper class. - - Wraps the Event asyncio lock allowing either to awake the - locked Tasks without any error or raising an exception. - - thanks to @vorpalsmith for the simple design. - """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._exc: Optional[BaseException] = None - self._event = asyncio.Event() - self._waiters: Deque[asyncio.Future[Any]] = collections.deque() - - def set(self, exc: Optional[BaseException] = None) -> None: - self._exc = exc - self._event.set() - - async def wait(self) -> Any: - waiter = self._loop.create_task(self._event.wait()) - self._waiters.append(waiter) - try: - val = await waiter - finally: - self._waiters.remove(waiter) - - if self._exc is not None: - raise self._exc - - return val - - def cancel(self) -> None: - """Cancel all waiters""" - for waiter in self._waiters: - waiter.cancel() diff --git a/tests/test_connector.py b/tests/test_connector.py index bad4e4a2f6e..74713b74acd 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -29,7 +29,6 @@ TCPConnector, _DNSCacheTable, ) -from aiohttp.locks import EventResultOrError from aiohttp.resolver import ResolveResult from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @@ -1100,6 +1099,7 @@ def dns_response(loop): async def coro(): # simulates a network operation await asyncio.sleep(0) + await asyncio.sleep(0) return ["127.0.0.1"] return coro @@ -1761,8 +1761,8 @@ async def test_close_cancels_cleanup_handle(loop) -> None: async def test_close_cancels_resolve_host(loop: asyncio.AbstractEventLoop) -> None: cancelled = False - async def delay_resolve_host(*args: object) -> None: - """Delay _resolve_host() task in order to test cancellation.""" + async def delay_resolve(*args: object, **kwargs: object) -> None: + """Delay resolve() task in order to test cancellation.""" nonlocal cancelled try: await asyncio.sleep(10) @@ -1774,7 +1774,7 @@ async def delay_resolve_host(*args: object) -> None: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn, "_resolve_host_with_throttle", delay_resolve_host): + with mock.patch.object(conn._resolver, "resolve", delay_resolve): t = asyncio.create_task(conn.connect(req, [], ClientTimeout())) # Let it create the internal task await asyncio.sleep(0) @@ -1792,6 +1792,301 @@ async def delay_resolve_host(*args: object) -> None: await t +async def test_multiple_dns_resolution_requests_success( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that multiple DNS resolution requests are handled correctly.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task1 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + +async def test_multiple_dns_resolution_requests_failure( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that DNS resolution failure for multiple requests is handled correctly.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + raise OSError(None, "DNS Resolution mock failure") + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task1 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task3 + + +async def test_multiple_dns_resolution_requests_cancelled( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that DNS resolution cancellation does not affect other tasks.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + raise OSError(None, "DNS Resolution mock failure") + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + task1.cancel() + with pytest.raises(asyncio.CancelledError): + await task1 + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_multiple_dns_resolution_requests_first_cancelled( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that first DNS resolution cancellation does not make other resolutions fail.""" + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + for _ in range(3): + await asyncio.sleep(0) + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + task1.cancel() + with pytest.raises(asyncio.CancelledError): + await task1 + + # The second and third tasks should still make the connection + # even if the first one is cancelled + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task2 + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + +async def test_multiple_dns_resolution_requests_first_fails_second_successful( + loop: asyncio.AbstractEventLoop, +) -> None: + """Verify that first DNS resolution fails the first time and is successful the second time.""" + attempt = 0 + + async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + """Delayed resolve() task.""" + nonlocal attempt + for _ in range(3): + await asyncio.sleep(0) + attempt += 1 + if attempt == 1: + raise OSError(None, "DNS Resolution mock failure") + return [ + { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + }, + ] + + conn = aiohttp.TCPConnector(force_close=True) + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ): + task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task1 + + assert len(conn._resolve_host_tasks) == 0 + # The second task should also get the dns resolution failure + with pytest.raises( + aiohttp.ClientConnectorError, match="DNS Resolution mock failure" + ): + await task2 + + # The third task is created after the resolution finished so + # it should try again and succeed + task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # Ensure the task is running + assert len(conn._resolve_host_tasks) == 1 + + with pytest.raises( + aiohttp.ClientConnectorError, match="Forced connection to fail" + ): + await task3 + + # Verify the the task is finished + assert len(conn._resolve_host_tasks) == 0 + + async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None: tr = mock.Mock() @@ -2757,14 +3052,18 @@ async def test_connector_throttle_trace_race(loop): key = ("", 0) token = object() - class DummyTracer: - async def send_dns_cache_hit(self, *args, **kwargs): - event = connector._throttle_dns_events.pop(key) - event.set() + class DummyTracer(Trace): + def __init__(self) -> None: + """Dummy""" + + async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: + futures = connector._throttle_dns_futures.pop(key) + for fut in futures: + fut.set_result(None) connector._cached_hosts.add(key, [token]) connector = TCPConnector() - connector._throttle_dns_events[key] = EventResultOrError(loop) + connector._throttle_dns_futures[key] = set() traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] diff --git a/tests/test_locks.py b/tests/test_locks.py deleted file mode 100644 index 5f434eace97..00000000000 --- a/tests/test_locks.py +++ /dev/null @@ -1,54 +0,0 @@ -# Tests of custom aiohttp locks implementations -import asyncio - -import pytest - -from aiohttp.locks import EventResultOrError - - -class TestEventResultOrError: - async def test_set_exception(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - try: - await ev.wait() - except Exception as e: - return e - return 1 - - t = loop.create_task(c()) - await asyncio.sleep(0) - e = Exception() - ev.set(exc=e) - assert (await t) == e - - async def test_set(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - await ev.wait() - return 1 - - t = loop.create_task(c()) - await asyncio.sleep(0) - ev.set() - assert (await t) == 1 - - async def test_cancel_waiters(self, loop) -> None: - ev = EventResultOrError(loop=loop) - - async def c(): - await ev.wait() - - t1 = loop.create_task(c()) - t2 = loop.create_task(c()) - await asyncio.sleep(0) - ev.cancel() - ev.set() - - with pytest.raises(asyncio.CancelledError): - await t1 - - with pytest.raises(asyncio.CancelledError): - await t2 From 8b424c8eb0e13941602be0a317579f227fe1ec35 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 13:57:23 -0500 Subject: [PATCH 0733/1511] [PR #8456/b09d7cc backport][3.10] Add ClientConnectorDNSError for differentiating DNS errors from others (#9459) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Marcus Stojcevich <129109254+mstojcevich-cisco@users.noreply.github.com> --- CHANGES/8455.feature.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/__init__.py | 2 ++ aiohttp/client.py | 2 ++ aiohttp/client_exceptions.py | 9 +++++++++ aiohttp/connector.py | 3 ++- docs/client_reference.rst | 8 ++++++++ tests/test_client_functional.py | 33 ++++++++++++++++++++++++++++++++- 8 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8455.feature.rst diff --git a/CHANGES/8455.feature.rst b/CHANGES/8455.feature.rst new file mode 100644 index 00000000000..267e5243afa --- /dev/null +++ b/CHANGES/8455.feature.rst @@ -0,0 +1 @@ +Added :exc:`aiohttp.ClientConnectorDNSError` for differentiating DNS resolution errors from other connector errors -- by :user:`mstojcevich`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 52cb1d59ff3..3fb6686c322 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -221,6 +221,7 @@ Manuel Miranda Marat Sharafutdinov Marc Mueller Marco Paolini +Marcus Stojcevich Mariano Anaya Mariusz Masztalerczuk Marko Kohtala diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index b65dd45000b..246a9202b4e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -8,6 +8,7 @@ ClientConnectionError, ClientConnectionResetError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientError, @@ -127,6 +128,7 @@ "ClientConnectionError", "ClientConnectionResetError", "ClientConnectorCertificateError", + "ClientConnectorDNSError", "ClientConnectorError", "ClientConnectorSSLError", "ClientError", diff --git a/aiohttp/client.py b/aiohttp/client.py index 596d94bd8bf..93dec00a49c 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -42,6 +42,7 @@ ClientConnectionError, ClientConnectionResetError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientError, @@ -105,6 +106,7 @@ "ClientConnectionError", "ClientConnectionResetError", "ClientConnectorCertificateError", + "ClientConnectorDNSError", "ClientConnectorError", "ClientConnectorSSLError", "ClientError", diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 94991c42477..2cf6cf88328 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -30,6 +30,7 @@ "ClientConnectorError", "ClientProxyConnectionError", "ClientSSLError", + "ClientConnectorDNSError", "ClientConnectorSSLError", "ClientConnectorCertificateError", "ConnectionTimeoutError", @@ -206,6 +207,14 @@ def __str__(self) -> str: __reduce__ = BaseException.__reduce__ +class ClientConnectorDNSError(ClientConnectorError): + """DNS resolution failed during client connection. + + Raised in :class:`aiohttp.connector.TCPConnector` if + DNS resolution fails. + """ + + class ClientProxyConnectionError(ClientConnectorError): """Proxy connection error. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 6e3c9e18db8..1bdd14b7e25 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -39,6 +39,7 @@ from .client_exceptions import ( ClientConnectionError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientHttpProxyError, @@ -1319,7 +1320,7 @@ async def _create_direct_connection( raise # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself - raise ClientConnectorError(req.connection_key, exc) from exc + raise ClientConnectorDNSError(req.connection_key, exc) from exc last_exc: Optional[Exception] = None addr_infos = self._convert_hosts_to_addr_infos(hosts) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 1b582932523..c48e87e14cb 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2228,6 +2228,12 @@ Connection errors Derived from :exc:`ClientOSError` +.. class:: ClientConnectorDNSError + + DNS resolution error. + + Derived from :exc:`ClientConnectorError` + .. class:: ClientProxyConnectionError Derived from :exc:`ClientConnectorError` @@ -2309,6 +2315,8 @@ Hierarchy of exceptions * :exc:`ClientProxyConnectionError` + * :exc:`ClientConnectorDNSError` + * :exc:`ClientSSLError` * :exc:`ClientConnectorCertificateError` diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index f1b9c89ff97..30ceebddc97 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3116,7 +3116,38 @@ async def test_aiohttp_request_ctx_manager_not_found() -> None: assert False, "never executed" # pragma: no cover -async def test_yield_from_in_session_request(aiohttp_client) -> None: +async def test_raising_client_connector_dns_error_on_dns_failure() -> None: + """Verify that the exception raised when a DNS lookup fails is specific to DNS.""" + with mock.patch( + "aiohttp.connector.TCPConnector._resolve_host", autospec=True, spec_set=True + ) as mock_resolve_host: + mock_resolve_host.side_effect = OSError(None, "DNS lookup failed") + with pytest.raises(aiohttp.ClientConnectorDNSError, match="DNS lookup failed"): + async with aiohttp.request("GET", "http://wrong-dns-name.com"): + assert False, "never executed" + + +async def test_aiohttp_request_coroutine(aiohttp_server: AiohttpServer) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + not_an_awaitable = aiohttp.request("GET", server.make_url("/")) + with pytest.raises( + TypeError, + match="^object _SessionRequestContextManager " + "can't be used in 'await' expression$", + ): + await not_an_awaitable # type: ignore[misc] + + await not_an_awaitable._coro # coroutine 'ClientSession._request' was never awaited + await server.close() + + +async def test_yield_from_in_session_request(aiohttp_client: AiohttpClient) -> None: # a test for backward compatibility with yield from syntax async def handler(request): return web.Response() From c8d8917bd16a628a719427cc8b0376f573d9748c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 14:05:16 -0500 Subject: [PATCH 0734/1511] [PR #8456/b09d7cc backport][3.11] Add ClientConnectorDNSError for differentiating DNS errors from others (#9460) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Marcus Stojcevich <129109254+mstojcevich-cisco@users.noreply.github.com> --- CHANGES/8455.feature.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/__init__.py | 2 ++ aiohttp/client.py | 2 ++ aiohttp/client_exceptions.py | 9 +++++++++ aiohttp/connector.py | 3 ++- docs/client_reference.rst | 8 ++++++++ tests/test_client_functional.py | 33 ++++++++++++++++++++++++++++++++- 8 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8455.feature.rst diff --git a/CHANGES/8455.feature.rst b/CHANGES/8455.feature.rst new file mode 100644 index 00000000000..267e5243afa --- /dev/null +++ b/CHANGES/8455.feature.rst @@ -0,0 +1 @@ +Added :exc:`aiohttp.ClientConnectorDNSError` for differentiating DNS resolution errors from other connector errors -- by :user:`mstojcevich`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e34aab90cf5..ef0d7d81429 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -223,6 +223,7 @@ Manuel Miranda Marat Sharafutdinov Marc Mueller Marco Paolini +Marcus Stojcevich Mariano Anaya Mariusz Masztalerczuk Marko Kohtala diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index c5f13c6dc49..a08e5406900 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -8,6 +8,7 @@ ClientConnectionError, ClientConnectionResetError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientError, @@ -128,6 +129,7 @@ "ClientConnectionError", "ClientConnectionResetError", "ClientConnectorCertificateError", + "ClientConnectorDNSError", "ClientConnectorError", "ClientConnectorSSLError", "ClientError", diff --git a/aiohttp/client.py b/aiohttp/client.py index 6d6660e306a..1be2629dcd9 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -42,6 +42,7 @@ ClientConnectionError, ClientConnectionResetError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientError, @@ -109,6 +110,7 @@ "ClientConnectionError", "ClientConnectionResetError", "ClientConnectorCertificateError", + "ClientConnectorDNSError", "ClientConnectorError", "ClientConnectorSSLError", "ClientError", diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 94991c42477..2cf6cf88328 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -30,6 +30,7 @@ "ClientConnectorError", "ClientProxyConnectionError", "ClientSSLError", + "ClientConnectorDNSError", "ClientConnectorSSLError", "ClientConnectorCertificateError", "ConnectionTimeoutError", @@ -206,6 +207,14 @@ def __str__(self) -> str: __reduce__ = BaseException.__reduce__ +class ClientConnectorDNSError(ClientConnectorError): + """DNS resolution failed during client connection. + + Raised in :class:`aiohttp.connector.TCPConnector` if + DNS resolution fails. + """ + + class ClientProxyConnectionError(ClientConnectorError): """Proxy connection error. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index da503bded53..1be9d6d1201 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -38,6 +38,7 @@ from .client_exceptions import ( ClientConnectionError, ClientConnectorCertificateError, + ClientConnectorDNSError, ClientConnectorError, ClientConnectorSSLError, ClientHttpProxyError, @@ -1328,7 +1329,7 @@ async def _create_direct_connection( raise # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself - raise ClientConnectorError(req.connection_key, exc) from exc + raise ClientConnectorDNSError(req.connection_key, exc) from exc last_exc: Optional[Exception] = None addr_infos = self._convert_hosts_to_addr_infos(hosts) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 8495ecd9d8e..06b580b6338 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2272,6 +2272,12 @@ Connection errors Derived from :exc:`ClientOSError` +.. class:: ClientConnectorDNSError + + DNS resolution error. + + Derived from :exc:`ClientConnectorError` + .. class:: ClientProxyConnectionError Derived from :exc:`ClientConnectorError` @@ -2353,6 +2359,8 @@ Hierarchy of exceptions * :exc:`ClientProxyConnectionError` + * :exc:`ClientConnectorDNSError` + * :exc:`ClientSSLError` * :exc:`ClientConnectorCertificateError` diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index d39addc29a1..80cf56f8118 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3248,7 +3248,38 @@ async def test_aiohttp_request_ctx_manager_not_found() -> None: assert False, "never executed" # pragma: no cover -async def test_yield_from_in_session_request(aiohttp_client) -> None: +async def test_raising_client_connector_dns_error_on_dns_failure() -> None: + """Verify that the exception raised when a DNS lookup fails is specific to DNS.""" + with mock.patch( + "aiohttp.connector.TCPConnector._resolve_host", autospec=True, spec_set=True + ) as mock_resolve_host: + mock_resolve_host.side_effect = OSError(None, "DNS lookup failed") + with pytest.raises(aiohttp.ClientConnectorDNSError, match="DNS lookup failed"): + async with aiohttp.request("GET", "http://wrong-dns-name.com"): + assert False, "never executed" + + +async def test_aiohttp_request_coroutine(aiohttp_server: AiohttpServer) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + not_an_awaitable = aiohttp.request("GET", server.make_url("/")) + with pytest.raises( + TypeError, + match="^object _SessionRequestContextManager " + "can't be used in 'await' expression$", + ): + await not_an_awaitable # type: ignore[misc] + + await not_an_awaitable._coro # coroutine 'ClientSession._request' was never awaited + await server.close() + + +async def test_yield_from_in_session_request(aiohttp_client: AiohttpClient) -> None: # a test for backward compatibility with yield from syntax async def handler(request): return web.Response() From 47475c5b74768e516fee3c9546917e8688caf998 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 15:43:46 -0500 Subject: [PATCH 0735/1511] Release 3.10.10 (#9462) --- CHANGES.rst | 42 ++++++++++++++++++++++++++++++++++++++++ CHANGES/8455.feature.rst | 1 - CHANGES/9451.bugfix.rst | 1 - CHANGES/9454.misc.rst | 1 - CHANGES/9455.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 43 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/8455.feature.rst delete mode 100644 CHANGES/9451.bugfix.rst delete mode 100644 CHANGES/9454.misc.rst delete mode 120000 CHANGES/9455.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 71edb3798fc..0245204fe5e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,48 @@ .. towncrier release notes start +3.10.10 (2024-10-10) +==================== + +Bug fixes +--------- + +- Fixed error messages from :py:class:`~aiohttp.resolver.AsyncResolver` being swallowed -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9451`, :issue:`9455`. + + + + +Features +-------- + +- Added :exc:`aiohttp.ClientConnectorDNSError` for differentiating DNS resolution errors from other connector errors -- by :user:`mstojcevich`. + + + *Related issues and pull requests on GitHub:* + :issue:`8455`. + + + + +Miscellaneous internal changes +------------------------------ + +- Simplified DNS resolution throttling code to reduce chance of race conditions -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9454`. + + + + +---- + + 3.10.9 (2024-10-04) =================== diff --git a/CHANGES/8455.feature.rst b/CHANGES/8455.feature.rst deleted file mode 100644 index 267e5243afa..00000000000 --- a/CHANGES/8455.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added :exc:`aiohttp.ClientConnectorDNSError` for differentiating DNS resolution errors from other connector errors -- by :user:`mstojcevich`. diff --git a/CHANGES/9451.bugfix.rst b/CHANGES/9451.bugfix.rst deleted file mode 100644 index 2adcbc66273..00000000000 --- a/CHANGES/9451.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed error messages from :py:class:`~aiohttp.resolver.AsyncResolver` being swallowed -- by :user:`bdraco`. diff --git a/CHANGES/9454.misc.rst b/CHANGES/9454.misc.rst deleted file mode 100644 index 5c842590512..00000000000 --- a/CHANGES/9454.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Simplified DNS resolution throttling code to reduce chance of race conditions -- by :user:`bdraco`. diff --git a/CHANGES/9455.bugfix.rst b/CHANGES/9455.bugfix.rst deleted file mode 120000 index da8457a1de6..00000000000 --- a/CHANGES/9455.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -9451.bugfix.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 246a9202b4e..2be76a4db4a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.10.dev0" +__version__ = "3.10.10" from typing import TYPE_CHECKING, Tuple From b443019eceab7799f78f0ba83054e2d145c68d05 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 10 Oct 2024 16:59:44 -0500 Subject: [PATCH 0736/1511] Increment version to 3.10.11.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 2be76a4db4a..3c08b041af9 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.10" +__version__ = "3.10.11.dev0" from typing import TYPE_CHECKING, Tuple From 4a4ac43aa2b62e419bc232664a0df67798391d65 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 11 Oct 2024 09:57:44 -0500 Subject: [PATCH 0737/1511] [PR #9466/03851d1 backport][3.11] Improve performance of default auth (#9468) --- CHANGES/9466.feature.rst | 1 + aiohttp/client.py | 13 ++++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) create mode 120000 CHANGES/9466.feature.rst diff --git a/CHANGES/9466.feature.rst b/CHANGES/9466.feature.rst new file mode 120000 index 00000000000..a54874e90e0 --- /dev/null +++ b/CHANGES/9466.feature.rst @@ -0,0 +1 @@ +8966.feature.rst \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index 1be2629dcd9..53ef459df0e 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -230,6 +230,7 @@ class ClientSession: ATTRS = frozenset( [ "_base_url", + "_base_url_origin", "_source_traceback", "_connector", "_loop", @@ -309,10 +310,12 @@ def __init__( if base_url is None or isinstance(base_url, URL): self._base_url: Optional[URL] = base_url + self._base_url_origin = None if base_url is None else base_url.origin() else: self._base_url = URL(base_url) + self._base_url_origin = self._base_url.origin() assert ( - self._base_url.origin() == self._base_url + self._base_url_origin == self._base_url ), "Only absolute URLs without path part are supported" if timeout is sentinel or timeout is None: @@ -620,8 +623,12 @@ async def _request( if auth is None: auth = auth_from_url - if auth is None and ( - not self._base_url or self._base_url.origin() == url.origin() + if ( + auth is None + and self._default_auth + and ( + not self._base_url or self._base_url_origin == url.origin() + ) ): auth = self._default_auth # It would be confusing if we support explicit From b2e794c07bf2d8d62bb9ba727eb1abee959b2475 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 15:11:32 +0000 Subject: [PATCH 0738/1511] [PR #9465/8a4707ed backport][3.11] Small cleanups to pending changelog messages (#9469) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9079.misc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES/9079.misc.rst b/CHANGES/9079.misc.rst index db20492c9f8..fb6b84e7f22 100644 --- a/CHANGES/9079.misc.rst +++ b/CHANGES/9079.misc.rst @@ -1 +1 @@ -Increase minimum yarl version to 1.11.0 -- by :user:`bdraco`. +Increased minimum yarl version to 1.11.0 -- by :user:`bdraco`. From bbf7da337959fbcb9b20538e3fee8cc4b6360356 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:07:32 +0000 Subject: [PATCH 0739/1511] [PR #9470/5bded303 backport][3.10] Avoid calling update_cookies when there are no cookies (#9471) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9470.misc.rst | 1 + aiohttp/client.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9470.misc.rst diff --git a/CHANGES/9470.misc.rst b/CHANGES/9470.misc.rst new file mode 100644 index 00000000000..c363a0f8cfe --- /dev/null +++ b/CHANGES/9470.misc.rst @@ -0,0 +1 @@ +Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 93dec00a49c..a46c26537dd 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -356,7 +356,7 @@ def __init__( cookie_jar = CookieJar(loop=loop) self._cookie_jar = cookie_jar - if cookies is not None: + if cookies: self._cookie_jar.update_cookies(cookies) self._connector = connector @@ -706,7 +706,8 @@ async def _request( raise raise ClientOSError(*exc.args) from exc - self._cookie_jar.update_cookies(resp.cookies, resp.url) + if cookies := resp.cookies: + self._cookie_jar.update_cookies(cookies, resp.url) # redirects if resp.status in (301, 302, 303, 307, 308) and allow_redirects: From 935b335f09f39e806654ba0e096d345e80d9d554 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:07:40 +0000 Subject: [PATCH 0740/1511] [PR #9470/5bded303 backport][3.11] Avoid calling update_cookies when there are no cookies (#9472) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9470.misc.rst | 1 + aiohttp/client.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9470.misc.rst diff --git a/CHANGES/9470.misc.rst b/CHANGES/9470.misc.rst new file mode 100644 index 00000000000..c363a0f8cfe --- /dev/null +++ b/CHANGES/9470.misc.rst @@ -0,0 +1 @@ +Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 53ef459df0e..c3025cd5ca6 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -369,7 +369,7 @@ def __init__( cookie_jar = CookieJar(loop=loop) self._cookie_jar = cookie_jar - if cookies is not None: + if cookies: self._cookie_jar.update_cookies(cookies) self._connector = connector @@ -737,7 +737,8 @@ async def _request( raise raise ClientOSError(*exc.args) from exc - self._cookie_jar.update_cookies(resp.cookies, resp.url) + if cookies := resp.cookies: + self._cookie_jar.update_cookies(cookies, resp.url) # redirects if resp.status in (301, 302, 303, 307, 308) and allow_redirects: From c8bfde8de091d07e7d0376ba0dd58c4034abd45e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 19:03:11 +0000 Subject: [PATCH 0741/1511] [PR #9477/ce33b695 backport][3.10] Speed up matching the host header when updating client headers (#9478) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index d536c0a1ca4..c063c56314e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -3,6 +3,7 @@ import contextlib import functools import io +import itertools import re import sys import traceback @@ -244,6 +245,9 @@ class ClientRequest: } POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + _HOST_STRINGS = frozenset( + map("".join, itertools.product(*zip("host".upper(), "host".lower()))) + ) DEFAULT_HEADERS = { hdrs.ACCEPT: "*/*", @@ -470,7 +474,7 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: for key, value in headers: # type: ignore[misc] # A special case for Host header - if key.lower() == "host": + if key in self._HOST_STRINGS: self.headers[key] = value else: self.headers.add(key, value) From 02b399ba32ae058098ace7b2c386f3b17ffcfb0c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 19:11:03 +0000 Subject: [PATCH 0742/1511] [PR #9477/ce33b695 backport][3.11] Speed up matching the host header when updating client headers (#9479) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 7664ab15201..ac43e8b9437 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -3,6 +3,7 @@ import contextlib import functools import io +import itertools import re import sys import traceback @@ -244,6 +245,9 @@ class ClientRequest: } POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + _HOST_STRINGS = frozenset( + map("".join, itertools.product(*zip("host".upper(), "host".lower()))) + ) DEFAULT_HEADERS = { hdrs.ACCEPT: "*/*", @@ -466,7 +470,7 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: for key, value in headers: # type: ignore[misc] # A special case for Host header - if key.lower() == "host": + if key in self._HOST_STRINGS: self.headers[key] = value else: self.headers.add(key, value) From fe61180b7646c37967376ff37c6b8c4462187fc1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 11:23:25 +0000 Subject: [PATCH 0743/1511] Bump yarl from 1.14.0 to 1.15.2 (#9483) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.14.0 to 1.15.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.15.2</h2> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of converting :class:<code>~yarl.URL</code> to a string -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1234">#1234</a>.</p> </li> <li> <p>Improved performance of :py:meth:<code>~yarl.URL.joinpath</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1248">#1248</a>, <a href="https://redirect.github.com/aio-libs/yarl/issues/1250">#1250</a>.</p> </li> <li> <p>Improved performance of constructing query strings from :class:<code>~multidict.MultiDict</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1256">#1256</a>.</p> </li> <li> <p>Improved performance of constructing query strings with <code>int</code> values -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1259">#1259</a>.</p> </li> </ul> <hr /> <h2>1.15.1</h2> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of calling :py:meth:<code>~yarl.URL.build</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1222">#1222</a>.</p> </li> <li> <p>Improved performance of all :class:<code>~yarl.URL</code> methods that create new :class:<code>~yarl.URL</code> objects -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1226">#1226</a>.</p> </li> <li> <p>Improved performance of :class:<code>~yarl.URL</code> methods that modify the network location -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1229">#1229</a>.</p> </li> </ul> <hr /> <h2>1.15.0</h2> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.15.2</h1> <p><em>(2024-10-13)</em></p> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of converting :class:<code>~yarl.URL</code> to a string -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1234</code>.</p> </li> <li> <p>Improved performance of :py:meth:<code>~yarl.URL.joinpath</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1248</code>, :issue:<code>1250</code>.</p> </li> <li> <p>Improved performance of constructing query strings from :class:<code>~multidict.MultiDict</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1256</code>.</p> </li> <li> <p>Improved performance of constructing query strings with <code>int</code> values -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1259</code>.</p> </li> </ul> <hr /> <h1>1.15.1</h1> <p><em>(2024-10-12)</em></p> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Improved performance of calling :py:meth:<code>~yarl.URL.build</code> -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1222</code>.</p> </li> <li> <p>Improved performance of all :class:<code>~yarl.URL</code> methods that create new :class:<code>~yarl.URL</code> objects -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em></p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/33294bf084d2dde1ac1e8133b0125e1f142a8274"><code>33294bf</code></a> Release 1.15.2 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1260">#1260</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/4782275b8e59531b8bf59bae0949bf51e459a24f"><code>4782275</code></a> Improve performance of constructing query strings with <code>int</code> values (<a href="https://redirect.github.com/aio-libs/yarl/issues/1259">#1259</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/5d6ff6ce4b574a910c9a6438f001adb52f22f511"><code>5d6ff6c</code></a> Optimize building query strings with MultiDict (<a href="https://redirect.github.com/aio-libs/yarl/issues/1256">#1256</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/642b5e254f2da344957795108829b90c20bc77ed"><code>642b5e2</code></a> Add benchmarks for query strings with int values (<a href="https://redirect.github.com/aio-libs/yarl/issues/1257">#1257</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/ef4b4984dc670e597f8825d6bdd4caea5d107701"><code>ef4b498</code></a> Avoid some string copy in extend_query (<a href="https://redirect.github.com/aio-libs/yarl/issues/1251">#1251</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/5c56ba2795a4ee84f4b34c93b56e96e53e2f5f3b"><code>5c56ba2</code></a> Check val.netloc instead of self.absolute internally (<a href="https://redirect.github.com/aio-libs/yarl/issues/1252">#1252</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/91c4d9272ae0218d9ed04c2de6e50026059bc490"><code>91c4d92</code></a> Add additional benchmarks for common cases (<a href="https://redirect.github.com/aio-libs/yarl/issues/1254">#1254</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/e1a4d51e363cab08c8eb0ac194bd61d0cd3862db"><code>e1a4d51</code></a> Add some additional extend_query benchmarks for common cases (<a href="https://redirect.github.com/aio-libs/yarl/issues/1253">#1253</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/a5de0c03bf40f81b4bd0740eeb9019500b2d53f7"><code>a5de0c0</code></a> Avoid copying list to reverse segments when making children (<a href="https://redirect.github.com/aio-libs/yarl/issues/1250">#1250</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/404b542db329157521c597da98a7ccf7b343b4d1"><code>404b542</code></a> Avoid normalizing child paths when there are no dots in the path (<a href="https://redirect.github.com/aio-libs/yarl/issues/1248">#1248</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/yarl/compare/v1.14.0...v1.15.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.14.0&new-version=1.15.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 734e543c5f9..fd196e2823b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.14.0 +yarl==1.15.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9f1281cd31a..2e6e323be46 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -278,7 +278,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.14.0 +yarl==1.15.2 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 68a12a54400..0ab423b9ab4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -270,7 +270,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.14.0 +yarl==1.15.2 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 13d1cfac572..1524b728bba 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.14.0 +yarl==1.15.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 934c105b910..26e1f2dbcb3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -141,5 +141,5 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.14.0 +yarl==1.15.2 # via -r requirements/runtime-deps.in From f5b1cb4f1f9f5040983822ccca9930d9f78ec1ee Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Tue, 15 Oct 2024 01:19:44 +0800 Subject: [PATCH 0744/1511] [PR #9312/7d1056b backport][3.11] Replace aioredis with valkey in examples (#9476) Backport from PR #9312 (commit https://github.com/aio-libs/aiohttp/commit/7d1056b3ca5552621d562ac23df08ea564de7071) --- .github/codeql.yml | 4 +++- examples/background_tasks.py | 45 +++++++++++++++++------------------- requirements/constraints.txt | 7 +++--- requirements/dev.txt | 7 +++--- requirements/lint.in | 2 +- requirements/lint.txt | 7 +++--- 6 files changed, 34 insertions(+), 38 deletions(-) diff --git a/.github/codeql.yml b/.github/codeql.yml index dc4bcc4a4dc..bfb3f6feab6 100644 --- a/.github/codeql.yml +++ b/.github/codeql.yml @@ -1,3 +1,5 @@ query-filters: - exclude: - id: py/unsafe-cyclic-import + id: + - py/ineffectual-statement + - py/unsafe-cyclic-import diff --git a/examples/background_tasks.py b/examples/background_tasks.py index e9510c13fd7..4c026b81d01 100755 --- a/examples/background_tasks.py +++ b/examples/background_tasks.py @@ -1,13 +1,14 @@ #!/usr/bin/env python3 """Example of aiohttp.web.Application.on_startup signal handler""" import asyncio -from typing import List +from contextlib import suppress +from typing import AsyncIterator, List -import aioredis +import valkey.asyncio as valkey from aiohttp import web -redis_listener = web.AppKey("redis_listener", asyncio.Task[None]) +valkey_listener = web.AppKey("valkey_listener", asyncio.Task[None]) websockets = web.AppKey("websockets", List[web.WebSocketResponse]) @@ -29,32 +30,29 @@ async def on_shutdown(app: web.Application) -> None: await ws.close(code=999, message=b"Server shutdown") -async def listen_to_redis(app): - try: - sub = await aioredis.Redis(host="localhost", port=6379) - ch, *_ = await sub.subscribe("news") - async for msg in ch.iter(encoding="utf-8"): +async def listen_to_valkey(app: web.Application) -> None: + r = valkey.Valkey(host="localhost", port=6379, decode_responses=True) + channel = "news" + async with r.pubsub() as sub: + await sub.subscribe(channel) + async for msg in sub.listen(): + if msg["type"] != "message": + continue # Forward message to all connected websockets: for ws in app[websockets]: - await ws.send_str(f"{ch.name}: {msg}") - print(f"message in {ch.name}: {msg}") - except asyncio.CancelledError: - pass - finally: - print("Cancel Redis listener: close connection...") - await sub.unsubscribe(ch.name) - await sub.quit() - print("Redis connection closed.") + await ws.send_str(f"{channel}: {msg}") + print(f"message in {channel}: {msg}") -async def start_background_tasks(app: web.Application) -> None: - app[redis_listener] = asyncio.create_task(listen_to_redis(app)) +async def background_tasks(app: web.Application) -> AsyncIterator[None]: + app[valkey_listener] = asyncio.create_task(listen_to_valkey(app)) + yield -async def cleanup_background_tasks(app): print("cleanup background tasks...") - app[redis_listener].cancel() - await app[redis_listener] + app[valkey_listener].cancel() + with suppress(asyncio.CancelledError): + await app[valkey_listener] def init(): @@ -62,8 +60,7 @@ def init(): l: List[web.WebSocketResponse] = [] app[websockets] = l app.router.add_get("/news", websocket_handler) - app.on_startup.append(start_background_tasks) - app.on_cleanup.append(cleanup_background_tasks) + app.cleanup_ctx.append(background_tasks) app.on_shutdown.append(on_shutdown) return app diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2e6e323be46..6f3260e84f0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -12,8 +12,6 @@ aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in -aioredis==2.0.1 - # via -r requirements/lint.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.13 @@ -23,7 +21,7 @@ annotated-types==0.7.0 async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in - # aioredis + # valkey attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.16.0 @@ -255,7 +253,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # aioredis # annotated-types # multidict # mypy @@ -272,6 +269,8 @@ uvloop==0.21.0b1 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in +valkey==6.0.2 + # via -r requirements/lint.in virtualenv==20.26.6 # via pre-commit wait-for-it==2.2.2 diff --git a/requirements/dev.txt b/requirements/dev.txt index 0ab423b9ab4..30bb3f60a95 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -12,8 +12,6 @@ aiohappyeyeballs==2.4.3 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in -aioredis==2.0.1 - # via -r requirements/lint.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.13 @@ -23,7 +21,7 @@ annotated-types==0.7.0 async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in - # aioredis + # valkey attrs==24.2.0 # via -r requirements/runtime-deps.in babel==2.16.0 @@ -247,7 +245,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # aioredis # annotated-types # multidict # mypy @@ -264,6 +261,8 @@ uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpyt # via # -r requirements/base.in # -r requirements/lint.in +valkey==6.0.2 + # via -r requirements/lint.in virtualenv==20.26.6 # via pre-commit wait-for-it==2.2.2 diff --git a/requirements/lint.in b/requirements/lint.in index 0d46809a083..06d22fc737a 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,5 +1,4 @@ aiodns -aioredis freezegun mypy; implementation_name == "cpython" pre-commit @@ -9,3 +8,4 @@ python-on-whales slotscheck trustme uvloop; platform_system != "Windows" +valkey diff --git a/requirements/lint.txt b/requirements/lint.txt index f073c29cdfe..46ec86f3730 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -6,12 +6,10 @@ # aiodns==3.2.0 # via -r requirements/lint.in -aioredis==2.0.1 - # via -r requirements/lint.in annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 - # via aioredis + # via valkey certifi==2024.8.30 # via requests cffi==1.17.1 @@ -107,7 +105,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # aioredis # annotated-types # mypy # pydantic @@ -119,5 +116,7 @@ urllib3==2.2.3 # via requests uvloop==0.21.0b1 ; platform_system != "Windows" # via -r requirements/lint.in +valkey==6.0.2 + # via -r requirements/lint.in virtualenv==20.26.6 # via pre-commit From a9d7c65ebd80f62e55eaa20700d7fd8c239a8f78 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 10:51:50 +0000 Subject: [PATCH 0745/1511] Bump uvloop from 0.21.0b1 to 0.21.0 (#9487) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [uvloop](https://github.com/MagicStack/uvloop) from 0.21.0b1 to 0.21.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/MagicStack/uvloop/releases">uvloop's releases</a>.</em></p> <blockquote> <h2>v0.21.0</h2> <h1>Changes</h1> <ul> <li>Add cleanup_socket param on create_unix_server() (<a href="https://redirect.github.com/MagicStack/uvloop/issues/623">#623</a>) (by <a href="https://github.com/fantix"><code>@​fantix</code></a> in d6114d2)</li> </ul> <h1>Fixes</h1> <ul> <li> <p>Use cythonized SO_REUSEPORT rather than the unwrapped native one. (<a href="https://redirect.github.com/MagicStack/uvloop/issues/609">#609</a>) (by <a href="https://github.com/ptribble"><code>@​ptribble</code></a> in 4083a94e for <a href="https://redirect.github.com/MagicStack/uvloop/issues/550">#550</a>)</p> </li> <li> <p>UDP errors should result in protocol.error_received (<a href="https://redirect.github.com/MagicStack/uvloop/issues/601">#601</a>) (by <a href="https://github.com/jensbjorgensen"><code>@​jensbjorgensen</code></a> in 3c3bbeff)</p> </li> <li> <p>Updates for Cython3 (<a href="https://redirect.github.com/MagicStack/uvloop/issues/587">#587</a>) (by <a href="https://github.com/alan-brooks"><code>@​alan-brooks</code></a> in 3fba9fab for <a href="https://redirect.github.com/MagicStack/uvloop/issues/587">#587</a>)</p> </li> <li> <p>Test with Python 3.13 (<a href="https://redirect.github.com/MagicStack/uvloop/issues/610">#610</a>) (by <a href="https://github.com/edgarrmondragon"><code>@​edgarrmondragon</code></a> in fb5a139)</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/MagicStack/uvloop/commit/2e8feae2a6db57bb611ffa388d28443487f4db94"><code>2e8feae</code></a> bump cibuildwheel version</li> <li><a href="https://github.com/MagicStack/uvloop/commit/40e8a34c8bcee281a115d1ea4149205b849f7724"><code>40e8a34</code></a> uvloop 0.21.0</li> <li>See full diff in <a href="https://github.com/MagicStack/uvloop/compare/v0.21.0beta1...v0.21.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=uvloop&package-manager=pip&previous-version=0.21.0b1&new-version=0.21.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index fd196e2823b..2b3508c6dd7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -42,7 +42,7 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.15.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6f3260e84f0..0b2a8766941 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -265,7 +265,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.2.3 # via requests -uvloop==0.21.0b1 ; platform_system != "Windows" +uvloop==0.21.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 30bb3f60a95..85967c4041e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -257,7 +257,7 @@ uritemplate==4.1.1 # via gidgethub urllib3==2.2.3 # via requests -uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via # -r requirements/base.in # -r requirements/lint.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 46ec86f3730..b1060032662 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -114,7 +114,7 @@ typing-extensions==4.12.2 # typer urllib3==2.2.3 # via requests -uvloop==0.21.0b1 ; platform_system != "Windows" +uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 26e1f2dbcb3..9d54a0f3173 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -137,7 +137,7 @@ typing-extensions==4.12.2 # typer urllib3==2.2.3 # via requests -uvloop==0.21.0b1 ; platform_system != "Windows" and implementation_name == "cpython" +uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in From 1e5d5bf7917de4879a9875901ea6db605cbb8111 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 10:52:45 +0000 Subject: [PATCH 0746/1511] Bump proxy-py from 2.4.8 to 2.4.9 (#9488) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.8 to 2.4.9. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/abhinavsingh/proxy.py/releases">proxy-py's releases</a>.</em></p> <blockquote> <h2>v2.4.9</h2> <h2>What's Changed</h2> <ul> <li><code>GroutClientBasePlugin</code> and example <code>GroutClientPlugin</code> by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1488">abhinavsingh/proxy.py#1488</a></li> <li><code>GroutClientBasePlugin</code> ability to modify request object by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1489">abhinavsingh/proxy.py#1489</a></li> <li><code>grout</code> host header & dynamic routing doc by <a href="https://github.com/abhinavsingh"><code>@​abhinavsingh</code></a> in <a href="https://redirect.github.com/abhinavsingh/proxy.py/pull/1490">abhinavsingh/proxy.py#1490</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.8...v2.4.9">https://github.com/abhinavsingh/proxy.py/compare/v2.4.8...v2.4.9</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/9077c16b1b965e0ff27b2e2c23edd4116d477e98"><code>9077c16</code></a> <code>grout</code> host header & dynamic routing doc (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1490">#1490</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/79546ff0cd8a191022e7eb1c659cabd5831df1b9"><code>79546ff</code></a> <code>GroutClientBasePlugin</code> return None for route to drop the request</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/16ab675b548af88abf959ad26c5ff27ab7d23154"><code>16ab675</code></a> <code>GroutClientBasePlugin</code> ability to modify request object (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1489">#1489</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/0641864478066df0a918f87103744f06ad0c1217"><code>0641864</code></a> <code>GroutClientBasePlugin</code> and example <code>GroutClientPlugin</code> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1488">#1488</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/c703edac0b9bc7a7652dd6059a736acb19c4ddf3"><code>c703eda</code></a> Add <code>grout</code> wildcard domains and routing doc</li> <li>See full diff in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.8...v2.4.9">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.8&new-version=2.4.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0b2a8766941..45288cc9b92 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -140,7 +140,7 @@ propcache==0.2.0 # via # -r requirements/runtime-deps.in # yarl -proxy-py==2.4.8 +proxy-py==2.4.9 # via -r requirements/test.in pycares==4.4.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index 85967c4041e..735576fa485 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ propcache==0.2.0 # via # -r requirements/runtime-deps.in # yarl -proxy-py==2.4.8 +proxy-py==2.4.9 # via -r requirements/test.in pycares==4.4.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index 9d54a0f3173..af4740a52f2 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -75,7 +75,7 @@ propcache==0.2.0 # via # -r requirements/runtime-deps.in # yarl -proxy-py==2.4.8 +proxy-py==2.4.9 # via -r requirements/test.in pycares==4.4.0 # via aiodns From 56572b3bae4ae9cf13882bc38ced1186d23d10d7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 06:28:51 +0000 Subject: [PATCH 0747/1511] [PR #9485/da0099dc backport][3.10] Avoid passing client writer to response when already finished (#9492) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9485.misc.rst | 1 + aiohttp/client_reqrep.py | 26 +++++++++++++------------- 2 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 CHANGES/9485.misc.rst diff --git a/CHANGES/9485.misc.rst b/CHANGES/9485.misc.rst new file mode 100644 index 00000000000..bb0978abd46 --- /dev/null +++ b/CHANGES/9485.misc.rst @@ -0,0 +1 @@ +Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index c063c56314e..0c29e0d4594 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -355,17 +355,11 @@ def _writer(self) -> Optional["asyncio.Task[None]"]: return self.__writer @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + def _writer(self, writer: "asyncio.Task[None]") -> None: if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - if writer is None: - return - if writer.done(): - # The writer is already done, so we can reset it immediately. - self.__reset_writer() - else: - writer.add_done_callback(self.__reset_writer) + writer.add_done_callback(self.__reset_writer) def is_ssl(self) -> bool: return self.url.scheme in ("https", "wss") @@ -779,6 +773,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": await writer.write_headers(status_line, self.headers) coro = self.write_bytes(writer, conn) + task: Optional["asyncio.Task[None]"] if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to write # bytes immediately to avoid having to schedule @@ -787,7 +782,11 @@ async def send(self, conn: "Connection") -> "ClientResponse": else: task = self.loop.create_task(coro) - self._writer = task + if task.done(): + task = None + else: + self._writer = task + response_class = self.response_class assert response_class is not None self.response = response_class( @@ -864,7 +863,7 @@ def __init__( method: str, url: URL, *, - writer: "asyncio.Task[None]", + writer: "Optional[asyncio.Task[None]]", continue100: Optional["asyncio.Future[bool]"], timer: BaseTimerContext, request_info: RequestInfo, @@ -880,7 +879,8 @@ def __init__( self._real_url = url self._url = url.with_fragment(None) if url.raw_fragment else url self._body: Optional[bytes] = None - self._writer = writer + if writer is not None: + self._writer = writer self._continue = continue100 # None by default self._closed = True self._history: Tuple[ClientResponse, ...] = () @@ -924,8 +924,8 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: if writer is None: return if writer.done(): - # The writer is already done, so we can reset it immediately. - self.__reset_writer() + # The writer is already done, so we can clear it immediately. + self.__writer = None else: writer.add_done_callback(self.__reset_writer) From 50ad770c75ed7fcea6f644380d0aeace8ce7185a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 06:31:50 +0000 Subject: [PATCH 0748/1511] [PR #9485/da0099dc backport][3.11] Avoid passing client writer to response when already finished (#9493) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9485.misc.rst | 1 + aiohttp/client_reqrep.py | 26 +++++++++++++------------- 2 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 CHANGES/9485.misc.rst diff --git a/CHANGES/9485.misc.rst b/CHANGES/9485.misc.rst new file mode 100644 index 00000000000..bb0978abd46 --- /dev/null +++ b/CHANGES/9485.misc.rst @@ -0,0 +1 @@ +Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ac43e8b9437..46c96086682 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -355,17 +355,11 @@ def _writer(self) -> Optional["asyncio.Task[None]"]: return self.__writer @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + def _writer(self, writer: "asyncio.Task[None]") -> None: if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - if writer is None: - return - if writer.done(): - # The writer is already done, so we can reset it immediately. - self.__reset_writer() - else: - writer.add_done_callback(self.__reset_writer) + writer.add_done_callback(self.__reset_writer) def is_ssl(self) -> bool: return self.url.scheme in _SSL_SCHEMES @@ -769,6 +763,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": await writer.write_headers(status_line, self.headers) coro = self.write_bytes(writer, conn) + task: Optional["asyncio.Task[None]"] if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to write # bytes immediately to avoid having to schedule @@ -777,7 +772,11 @@ async def send(self, conn: "Connection") -> "ClientResponse": else: task = self.loop.create_task(coro) - self._writer = task + if task.done(): + task = None + else: + self._writer = task + response_class = self.response_class assert response_class is not None self.response = response_class( @@ -854,7 +853,7 @@ def __init__( method: str, url: URL, *, - writer: "asyncio.Task[None]", + writer: "Optional[asyncio.Task[None]]", continue100: Optional["asyncio.Future[bool]"], timer: BaseTimerContext, request_info: RequestInfo, @@ -870,7 +869,8 @@ def __init__( self._real_url = url self._url = url.with_fragment(None) if url.raw_fragment else url self._body: Optional[bytes] = None - self._writer = writer + if writer is not None: + self._writer = writer self._continue = continue100 # None by default self._closed = True self._history: Tuple[ClientResponse, ...] = () @@ -914,8 +914,8 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: if writer is None: return if writer.done(): - # The writer is already done, so we can reset it immediately. - self.__reset_writer() + # The writer is already done, so we can clear it immediately. + self.__writer = None else: writer.add_done_callback(self.__reset_writer) From cd45a7f3b8632b06343954923ff87309416a0ca3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 11:22:13 +0000 Subject: [PATCH 0749/1511] Bump setuptools from 75.1.0 to 75.2.0 (#9494) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.1.0 to 75.2.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.2.0</h1> <h2>Features</h2> <ul> <li>Made errors when parsing <code>Distribution</code> data more explicit about the expected type (<code>tuple[str, ...] | list[str]</code>) -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4578">#4578</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Fix a <code>TypeError</code> when a <code>Distribution</code>'s old included attribute was a <code>tuple</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4578">#4578</a>)</li> <li>Add workaround for <code>bdist_wheel --dist-info-dir</code> errors when customisation does not inherit from setuptools. (<a href="https://redirect.github.com/pypa/setuptools/issues/4684">#4684</a>)</li> </ul> <h1>v75.1.1</h1> <h2>Bugfixes</h2> <ul> <li>Re-use pre-existing <code>.dist-info</code> dir when creating wheels via the build backend APIs (PEP 517) and the <code>metadata_directory</code> argument is passed -- by :user:<code>pelson</code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/1825">#1825</a>)</li> <li>Changed <code>egg_info</code> command to avoid adding an empty <code>.egg-info</code> directory while iterating over entry-points. This avoids triggering integration problems with <code>importlib.metadata</code>/<code>importlib_metadata</code> (reference: <a href="https://redirect.github.com/pypa/pyproject-hooks/issues/206">pypa/pyproject-hooks#206</a>). (<a href="https://redirect.github.com/pypa/setuptools/issues/4680">#4680</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/61a5a03fbf8acc59e6e12144011aa06b85162bda"><code>61a5a03</code></a> Bump version: 75.1.1 → 75.2.0</li> <li><a href="https://github.com/pypa/setuptools/commit/8ad3ea7509e7f0168f86fbf191ae4e9f13229210"><code>8ad3ea7</code></a> Workaround for <code>bdist_wheel.dist_info_dir</code> problems (<a href="https://redirect.github.com/pypa/setuptools/issues/4684">#4684</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/9af08776190841f022136be4192dfeeafd65406d"><code>9af0877</code></a> Type sequence checks in setuptools/dist.py (<a href="https://redirect.github.com/pypa/setuptools/issues/4578">#4578</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/0534fde847e0bd0c2214d6821c042c0eb5c0ffc3"><code>0534fde</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/50b732a4006f3b84315d4473f7c203e4fe13aed9"><code>50b732a</code></a> Check for more specific error message</li> <li><a href="https://github.com/pypa/setuptools/commit/a663287c9c5f0bfc5e05addfb3a15fea7fc716c3"><code>a663287</code></a> Add pragma for edge-case code path</li> <li><a href="https://github.com/pypa/setuptools/commit/96be735ca2e77b7db876133dfda0b4df3ced4ac0"><code>96be735</code></a> Workaround for bdist_wheel.dist_info_dir problems</li> <li><a href="https://github.com/pypa/setuptools/commit/000a413e2af9c271166cebe6909ad664907887f1"><code>000a413</code></a> Deprecate public access to setuptools.dist.sequence</li> <li><a href="https://github.com/pypa/setuptools/commit/00995c1e3d45393931ffb2e326e503819bee1728"><code>00995c1</code></a> Use variable msg instead of tmpl in setuptools/dist</li> <li><a href="https://github.com/pypa/setuptools/commit/d457d0e87889aefe2093cd79ab4d1ee35d3101e7"><code>d457d0e</code></a> Type sequence checks in setuptools/dist.py</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v75.1.0...v75.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.1.0&new-version=75.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 45288cc9b92..2d6f93539f8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -287,7 +287,7 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==75.1.0 +setuptools==75.2.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 735576fa485..b8746a0e8e4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ zipp==3.20.2 # The following packages are considered to be unsafe in a requirements file: pip==24.2 # via pip-tools -setuptools==75.1.0 +setuptools==75.2.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index fe962ac233c..df393012548 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -83,5 +83,5 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 +setuptools==75.2.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 63e05289b02..43b7c6b7e8b 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -78,5 +78,5 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 +setuptools==75.2.0 # via incremental From e4aa5a724acf7a29e55a0fd0c3d0993bf1fa50e4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 17 Oct 2024 16:34:07 -1000 Subject: [PATCH 0750/1511] [PR #9498/13dc020 backport][3.11] Improve performance of WebSocketReader (#9502) --- aiohttp/http_websocket.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 62628e66d78..ad731c496c6 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -133,8 +133,12 @@ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: return loads(self.data) -WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) -WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) +# Constructing the tuple directly to avoid the overhead of +# the lambda and arg processing since NamedTuples are constructed +# with a run time built lambda +# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441 +WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None)) +WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None)) class WebSocketError(Exception): @@ -411,12 +415,14 @@ def _feed_data(self, data: bytes) -> None: WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" ) from exc - self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text)) + # tuple.__new__ is used to avoid the overhead of the lambda + msg = tuple.__new__(WSMessage, (WSMsgType.TEXT, text, "")) + self.queue.feed_data(msg, len(text)) continue - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged) - ) + # tuple.__new__ is used to avoid the overhead of the lambda + msg = tuple.__new__(WSMessage, (WSMsgType.BINARY, payload_merged, "")) + self.queue.feed_data(msg, len(payload_merged)) elif opcode == WSMsgType.CLOSE: if len(payload) >= 2: close_code = UNPACK_CLOSE_CODE(payload[:2])[0] @@ -431,26 +437,26 @@ def _feed_data(self, data: bytes) -> None: raise WebSocketError( WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" ) from exc - msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + msg = tuple.__new__( + WSMessage, (WSMsgType.CLOSE, close_code, close_message) + ) elif payload: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, f"Invalid close frame: {fin} {opcode} {payload!r}", ) else: - msg = WSMessage(WSMsgType.CLOSE, 0, "") + msg = tuple.__new__(WSMessage, (WSMsgType.CLOSE, 0, "")) self.queue.feed_data(msg, 0) elif opcode == WSMsgType.PING: - self.queue.feed_data( - WSMessage(WSMsgType.PING, payload, ""), len(payload) - ) + msg = tuple.__new__(WSMessage, (WSMsgType.PING, payload, "")) + self.queue.feed_data(msg, len(payload)) elif opcode == WSMsgType.PONG: - self.queue.feed_data( - WSMessage(WSMsgType.PONG, payload, ""), len(payload) - ) + msg = tuple.__new__(WSMessage, (WSMsgType.PONG, payload, "")) + self.queue.feed_data(msg, len(payload)) else: raise WebSocketError( From 9fd3aa1290bdf71a50a45828f07516dc7c69a345 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 02:50:06 +0000 Subject: [PATCH 0751/1511] [PR #9499/b45a7dab backport][3.11] Improve performance of creating the ConnectionKey (#9500) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 46c96086682..57df4b016e6 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -375,14 +375,17 @@ def connection_key(self) -> ConnectionKey: else: h = None url = self.url - return ConnectionKey( - url.raw_host or "", - url.port, - url.scheme in _SSL_SCHEMES, - self._ssl, - self.proxy, - self.proxy_auth, - h, + return tuple.__new__( + ConnectionKey, + ( + url.raw_host or "", + url.port, + url.scheme in _SSL_SCHEMES, + self._ssl, + self.proxy, + self.proxy_auth, + h, + ), ) @property From 1ebb0345ac8a81e8da15ade98e553c8875917b2c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 17 Oct 2024 16:59:30 -1000 Subject: [PATCH 0752/1511] [PR #9498/13dc020 backport][3.10] Improve performance of WebSocketReader (#9501) --- aiohttp/http_websocket.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index fb00ebc7d35..3c399351d78 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -133,8 +133,12 @@ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: return loads(self.data) -WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) -WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) +# Constructing the tuple directly to avoid the overhead of +# the lambda and arg processing since NamedTuples are constructed +# with a run time built lambda +# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441 +WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None)) +WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None)) class WebSocketError(Exception): @@ -411,12 +415,14 @@ def _feed_data(self, data: bytes) -> None: WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" ) from exc - self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text)) + # tuple.__new__ is used to avoid the overhead of the lambda + msg = tuple.__new__(WSMessage, (WSMsgType.TEXT, text, "")) + self.queue.feed_data(msg, len(text)) continue - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged) - ) + # tuple.__new__ is used to avoid the overhead of the lambda + msg = tuple.__new__(WSMessage, (WSMsgType.BINARY, payload_merged, "")) + self.queue.feed_data(msg, len(payload_merged)) elif opcode == WSMsgType.CLOSE: if len(payload) >= 2: close_code = UNPACK_CLOSE_CODE(payload[:2])[0] @@ -431,26 +437,26 @@ def _feed_data(self, data: bytes) -> None: raise WebSocketError( WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" ) from exc - msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + msg = tuple.__new__( + WSMessage, (WSMsgType.CLOSE, close_code, close_message) + ) elif payload: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, f"Invalid close frame: {fin} {opcode} {payload!r}", ) else: - msg = WSMessage(WSMsgType.CLOSE, 0, "") + msg = tuple.__new__(WSMessage, (WSMsgType.CLOSE, 0, "")) self.queue.feed_data(msg, 0) elif opcode == WSMsgType.PING: - self.queue.feed_data( - WSMessage(WSMsgType.PING, payload, ""), len(payload) - ) + msg = tuple.__new__(WSMessage, (WSMsgType.PING, payload, "")) + self.queue.feed_data(msg, len(payload)) elif opcode == WSMsgType.PONG: - self.queue.feed_data( - WSMessage(WSMsgType.PONG, payload, ""), len(payload) - ) + msg = tuple.__new__(WSMessage, (WSMsgType.PONG, payload, "")) + self.queue.feed_data(msg, len(payload)) else: raise WebSocketError( From f9cc28aa53b03a1b72a1076161850c4edeb09b08 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 12:52:27 +0000 Subject: [PATCH 0753/1511] Bump virtualenv from 20.26.6 to 20.27.0 (#9507) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.26.6 to 20.27.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.27.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.26.6 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2772">pypa/virtualenv#2772</a></li> <li>docs: fix the documentation typo on Extend Functionality page. by <a href="https://github.com/Mr-Sunglasses"><code>@​Mr-Sunglasses</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2773">pypa/virtualenv#2773</a></li> <li>Fix broken Windows zipapp and drop 3.7 support by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2783">pypa/virtualenv#2783</a></li> <li>Skip $PATH entries we cannot check rather than dying with PermissionError by <a href="https://github.com/hroncok"><code>@​hroncok</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2782">pypa/virtualenv#2782</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/Mr-Sunglasses"><code>@​Mr-Sunglasses</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2773">pypa/virtualenv#2773</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.26.5...20.27.0">https://github.com/pypa/virtualenv/compare/20.26.5...20.27.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.27.0 (2024-10-17)</h2> <p>Features - 20.27.0</p> <pre><code>- Drop 3.7 support as the CI environments no longer allow it running - by :user:`gaborbernat`. (:issue:`2758`) <p>Bugfixes - 20.27.0 </code></pre></p> <ul> <li> <p>When a <code>$PATH</code> entry cannot be checked for existence, skip it instead of terminating - by :user:<code>hroncok</code>. (:issue:<code>2782</code>)</p> </li> <li> <p>Upgrade embedded wheels:</p> <ul> <li>setuptools to <code>75.2.0</code> from <code>75.1.0</code></li> <li>Removed pip of <code>24.0</code></li> <li>Removed setuptools of <code>68.0.0</code></li> <li>Removed wheel of <code>0.42.0</code></li> </ul> <ul> <li>by :user:<code>gaborbernat</code>. (:issue:<code>2783</code>)</li> </ul> </li> <li> <p>Fix zipapp is broken on Windows post distlib <code>0.3.9</code> - by :user:<code>gaborbernat</code>. (:issue:<code>2784</code>)</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/de4465f88bd769858d53f988198ab04bff4b328e"><code>de4465f</code></a> release 20.27.0</li> <li><a href="https://github.com/pypa/virtualenv/commit/6f1605973a90bdc78cfd72560e24e460db103ab9"><code>6f16059</code></a> Skip $PATH entries we cannot check rather than dying with PermissionError (<a href="https://redirect.github.com/pypa/virtualenv/issues/2">#2</a>...</li> <li><a href="https://github.com/pypa/virtualenv/commit/f73a2f385316878d55e45980218d2e3c142aacb9"><code>f73a2f3</code></a> Fix broken Windows zipapp and drop 3.7 support (<a href="https://redirect.github.com/pypa/virtualenv/issues/2783">#2783</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/228b6152c053010b8160a3a13d2d84b10c99cf05"><code>228b615</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2781">#2781</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/b56d09248aecf584e985956185afb6745a7daac7"><code>b56d092</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2779">#2779</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/a4dff77fbf277686207c22b9a21b51685d54048e"><code>a4dff77</code></a> Bump pypa/gh-action-pypi-publish from 1.10.2 to 1.10.3 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2777">#2777</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/da72caac974b9a7e88d58b73c51a7c611e81ff04"><code>da72caa</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2775">#2775</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/349eddcb7fbe525aa70d0a18eff12fc398b1ee1b"><code>349eddc</code></a> docs: fix the documentation typo on Extend Functionality page. (<a href="https://redirect.github.com/pypa/virtualenv/issues/2773">#2773</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/fe8ca142fdafc0fa576ca9261d1a4a4c246e2eba"><code>fe8ca14</code></a> release 20.26.6 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2772">#2772</a>)</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.26.6...20.27.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.26.6&new-version=20.27.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2d6f93539f8..42b7a1e7477 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -271,7 +271,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.26.6 +virtualenv==20.27.0 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index b8746a0e8e4..2ebf51e10d2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -263,7 +263,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.26.6 +virtualenv==20.27.0 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index b1060032662..ca47b5fc5fa 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -118,5 +118,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.26.6 +virtualenv==20.27.0 # via pre-commit From 2c6859171816336467e11f548dfe70d5f69b295e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 11:37:18 +0000 Subject: [PATCH 0754/1511] Bump cryptography from 43.0.1 to 43.0.3 (#9515) Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.1 to 43.0.3. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>43.0.3 - 2024-10-18</p> <pre><code> * Fixed release metadata for ``cryptography-vectors`` <p>.. _v43-0-2:</p> <p>43.0.2 - 2024-10-18<br /> </code></pre></p> <ul> <li>Fixed compilation when using LibreSSL 4.0.0.</li> </ul> <p>.. _v43-0-1:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/c2afb4ffd2d65cc2e1a28c0af3fb52efb6cc688b"><code>c2afb4f</code></a> Backport metadata fix for vectors (<a href="https://redirect.github.com/pyca/cryptography/issues/11797">#11797</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/9a3cdb5e1353d3e26fc6710ce08bde0c60febfdf"><code>9a3cdb5</code></a> 43.0.2 release: fix libressl 4.0.0 (<a href="https://redirect.github.com/pyca/cryptography/issues/11796">#11796</a>)</li> <li>See full diff in <a href="https://github.com/pyca/cryptography/compare/43.0.1...43.0.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=43.0.1&new-version=43.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 42b7a1e7477..07e7a0d51da 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.1 +cryptography==43.0.3 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 2ebf51e10d2..499f4c9dfab 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.1 +cryptography==43.0.3 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index ca47b5fc5fa..53600bbf1f5 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -24,7 +24,7 @@ click==8.1.7 # via # slotscheck # typer -cryptography==43.0.1 +cryptography==43.0.3 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index af4740a52f2..9ed11fbcfbf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -34,7 +34,7 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.1 +cryptography==43.0.3 # via trustme exceptiongroup==1.2.2 # via pytest From d74e51b5cf93eb99a1fb6aa66950a863c7c056ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 12:02:44 +0000 Subject: [PATCH 0755/1511] Bump slotscheck from 0.19.0 to 0.19.1 (#9518) Bumps [slotscheck](https://github.com/ariebovenberg/slotscheck) from 0.19.0 to 0.19.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/ariebovenberg/slotscheck/releases">slotscheck's releases</a>.</em></p> <blockquote> <h2>0.19.1</h2> <ul> <li>Explicit Python 3.13 support, drop Python 3.8 support</li> <li>Documentation improvements</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/ariebovenberg/slotscheck/blob/main/CHANGELOG.rst">slotscheck's changelog</a>.</em></p> <blockquote> <h2>0.19.1 (2024-10-19)</h2> <ul> <li>Explicit Python 3.13 support, drop Python 3.8 support</li> <li>Documentation improvements</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/17c05034de05d4bf909ed5f4664ef7004a839dc9"><code>17c0503</code></a> Merge pull request <a href="https://redirect.github.com/ariebovenberg/slotscheck/issues/272">#272</a> from ariebovenberg/py313</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/2e0ba322054f8baa18abb4172432fbcdd8180f96"><code>2e0ba32</code></a> py313 support, drop py38</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/b57c48cdf59a54c96dce072e2cb43f1f7caca6cd"><code>b57c48c</code></a> Merge pull request <a href="https://redirect.github.com/ariebovenberg/slotscheck/issues/271">#271</a> from ariebovenberg/docs-fixes</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/704fe545eca99c8d6ea565686bdd95f69ebfb37e"><code>704fe54</code></a> migrate docs theme, various docs additions</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/8cb173fb16e8d0073b92894cfde22ad11c0e234b"><code>8cb173f</code></a> Merge pull request <a href="https://redirect.github.com/ariebovenberg/slotscheck/issues/268">#268</a> from ariebovenberg/dependabot/pip/mypy-1.12.0</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/92ea0c2d8a628a9fd1b0d1ba67faa99a871cca03"><code>92ea0c2</code></a> Bump mypy from 1.11.2 to 1.12.0</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/d4ca0056aafdfb1180c6d3bfcb1d45a8786cbbcf"><code>d4ca005</code></a> Merge pull request <a href="https://redirect.github.com/ariebovenberg/slotscheck/issues/265">#265</a> from ariebovenberg/dependabot/pip/tomli-2.0.2</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/c664863ca7a4391789ad38ab828c537bb3ae2309"><code>c664863</code></a> Bump tomli from 2.0.1 to 2.0.2</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/d823c0a7779337cf606128bd4883b6b36e5e3d16"><code>d823c0a</code></a> Merge pull request <a href="https://redirect.github.com/ariebovenberg/slotscheck/issues/264">#264</a> from ariebovenberg/dependabot/pip/pytest-8.3.3</li> <li><a href="https://github.com/ariebovenberg/slotscheck/commit/bf1083a42fbce0136fbaed6ffd7e038d860ee6fc"><code>bf1083a</code></a> Bump pytest from 8.3.2 to 8.3.3</li> <li>Additional commits viewable in <a href="https://github.com/ariebovenberg/slotscheck/compare/v0.19.0...v0.19.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.19.0&new-version=0.19.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 07e7a0d51da..7bbffe21478 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -203,7 +203,7 @@ shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil -slotscheck==0.19.0 +slotscheck==0.19.1 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 499f4c9dfab..055c2ffe42b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -198,7 +198,7 @@ shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil -slotscheck==0.19.0 +slotscheck==0.19.1 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 53600bbf1f5..326e813d5a4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -90,7 +90,7 @@ shellingham==1.5.4 # via typer six==1.16.0 # via python-dateutil -slotscheck==0.19.0 +slotscheck==0.19.1 # via -r requirements/lint.in tomli==2.0.2 # via From db5c4d1315c266c47a1f76d48d4296cbc19c6acf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:08:22 +0000 Subject: [PATCH 0756/1511] Bump actions/cache from 4.1.1 to 4.1.2 (#9525) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.1.1 to 4.1.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.1.2</h2> <h2>What's Changed</h2> <ul> <li>Add Bun example by <a href="https://github.com/idleberg"><code>@​idleberg</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1456">actions/cache#1456</a></li> <li>Revise <code>isGhes</code> logic by <a href="https://github.com/jww3"><code>@​jww3</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1474">actions/cache#1474</a></li> <li>Bump braces from 3.0.2 to 3.0.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1475">actions/cache#1475</a></li> <li>Add dependabot.yml to enable automatic dependency upgrades by <a href="https://github.com/Link"><code>@​Link</code></a>- in <a href="https://redirect.github.com/actions/cache/pull/1476">actions/cache#1476</a></li> <li>Bump actions/checkout from 3 to 4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1478">actions/cache#1478</a></li> <li>Bump actions/stale from 3 to 9 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1479">actions/cache#1479</a></li> <li>Bump github/codeql-action from 2 to 3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1483">actions/cache#1483</a></li> <li>Bump actions/setup-node from 3 to 4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1481">actions/cache#1481</a></li> <li>Prepare <code>4.1.2</code> release by <a href="https://github.com/Link"><code>@​Link</code></a>- in <a href="https://redirect.github.com/actions/cache/pull/1477">actions/cache#1477</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/idleberg"><code>@​idleberg</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1456">actions/cache#1456</a></li> <li><a href="https://github.com/jww3"><code>@​jww3</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1474">actions/cache#1474</a></li> <li><a href="https://github.com/Link"><code>@​Link</code></a>- made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1476">actions/cache#1476</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4...v4.1.2">https://github.com/actions/cache/compare/v4...v4.1.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.1.2</h3> <ul> <li>Add GitHub Enterprise Cloud instances hostname filters to inform API endpoint choices - <a href="https://redirect.github.com/actions/cache/pull/1474">#1474</a></li> <li>Security fix: Bump braces from 3.0.2 to 3.0.3 - <a href="https://redirect.github.com/actions/cache/pull/1475">#1475</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/6849a6489940f00c2f30c0fb92c6274307ccb58a"><code>6849a64</code></a> Release 4.1.2 <a href="https://redirect.github.com/actions/cache/issues/1477">#1477</a></li> <li><a href="https://github.com/actions/cache/commit/5a1720c49e91718f5d0ce7f154997c93e0f6159a"><code>5a1720c</code></a> Merge branch 'Link-/prep-4.1.2' of <a href="https://github.com/actions/cache">https://github.com/actions/cache</a> into Link...</li> <li><a href="https://github.com/actions/cache/commit/d9fef48d24d529bac46adcc1e42f49649de07ca9"><code>d9fef48</code></a> Merge branch 'main' into Link-/prep-4.1.2</li> <li><a href="https://github.com/actions/cache/commit/a50e8d027b022a55ef85ffa721ebd97d2b22bcda"><code>a50e8d0</code></a> Merge branch 'main' into Link-/prep-4.1.2</li> <li><a href="https://github.com/actions/cache/commit/acc9ae5c1401b6c072a7c89ff4c48ea7e30f4dbf"><code>acc9ae5</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1481">#1481</a> from actions/dependabot/github_actions/actions/setup...</li> <li><a href="https://github.com/actions/cache/commit/1ea5f18c31265ef251b3eb51ce8b857be5e7b044"><code>1ea5f18</code></a> Merge branch 'main' into Link-/prep-4.1.2</li> <li><a href="https://github.com/actions/cache/commit/cc679ff3baee71867c711040a5c21af61b262806"><code>cc679ff</code></a> Merge branch 'main' into dependabot/github_actions/actions/setup-node-4</li> <li><a href="https://github.com/actions/cache/commit/366d43d6f8aa1f3e5d28b2c98959d557d78ffa3f"><code>366d43d</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1483">#1483</a> from actions/dependabot/github_actions/github/codeql...</li> <li><a href="https://github.com/actions/cache/commit/02bf31969bcc471fd5f91b5896ae17beb0973413"><code>02bf319</code></a> Bump github/codeql-action from 2 to 3</li> <li><a href="https://github.com/actions/cache/commit/6f6220be5af96ba6940aca960265218cae17e8fd"><code>6f6220b</code></a> Merge branch 'main' into dependabot/github_actions/actions/setup-node-4</li> <li>Additional commits viewable in <a href="https://github.com/actions/cache/compare/v4.1.1...v4.1.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.1.1&new-version=4.1.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2ee3ee7d8bf..173fb9548eb 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.1.1 + uses: actions/cache@v4.1.2 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.1.1 + uses: actions/cache@v4.1.2 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.1.1 + uses: actions/cache@v4.1.2 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 43c9a095de1d55430c6a0d92a2273e094b138946 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:17:45 +0000 Subject: [PATCH 0757/1511] Bump rich from 13.9.2 to 13.9.3 (#9526) Bumps [rich](https://github.com/Textualize/rich) from 13.9.2 to 13.9.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>The irregular expression release</h2> <p>Fix a broken regex that resulted in the slow path being chosen for some operations. This fix should result in notable speedups for some operations, such as wrapping text.</p> <h2>[13.9.3] - 2024-10-22</h2> <h3>Fixed</h3> <ul> <li>Fixed broken regex that may have resulted in poor performance. <a href="https://redirect.github.com/Textualize/rich/pull/3535">Textualize/rich#3535</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[13.9.3] - 2024-10-22</h2> <h3>Fixed</h3> <ul> <li>Fixed broken regex that may have resulted in poor performance. <a href="https://redirect.github.com/Textualize/rich/pull/3535">Textualize/rich#3535</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/afcc5c5a957c75b325fd7cc45bd70b3ac6413ef4"><code>afcc5c5</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3535">#3535</a> from Textualize/regex-error</li> <li><a href="https://github.com/Textualize/rich/commit/60f3b615a706949f6ae9734eeaea519573af4522"><code>60f3b61</code></a> changelog</li> <li><a href="https://github.com/Textualize/rich/commit/04db8c2946373fdd3249cb2a6889481a43305aed"><code>04db8c2</code></a> update regex</li> <li><a href="https://github.com/Textualize/rich/commit/b93d3b6d98547b88320a5c82c362d5825bcdca4b"><code>b93d3b6</code></a> test single cell widths</li> <li><a href="https://github.com/Textualize/rich/commit/be42f1b082477f1ec4bdf5d05da7145c78418cda"><code>be42f1b</code></a> test and added box drawing characters</li> <li><a href="https://github.com/Textualize/rich/commit/ad6b8865ff88bcaddb42573befd32ce1c0ae170d"><code>ad6b886</code></a> version bump</li> <li><a href="https://github.com/Textualize/rich/commit/db2e3e89307d30e4096686083f03ef5689572bdf"><code>db2e3e8</code></a> assert cut</li> <li><a href="https://github.com/Textualize/rich/commit/68e1b6386db241d49f7713dae4ba3b59c0d30ba6"><code>68e1b63</code></a> fix regex</li> <li>See full diff in <a href="https://github.com/Textualize/rich/compare/v13.9.2...v13.9.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.9.2&new-version=13.9.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7bbffe21478..293b071b00a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -195,7 +195,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.9.2 +rich==13.9.3 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 055c2ffe42b..fdde0ed849e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -190,7 +190,7 @@ requests==2.32.3 # cherry-picker # python-on-whales # sphinx -rich==13.9.2 +rich==13.9.3 # via typer setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 326e813d5a4..c173456bf1a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -84,7 +84,7 @@ pyyaml==6.0.2 # via pre-commit requests==2.32.3 # via python-on-whales -rich==13.9.2 +rich==13.9.3 # via typer shellingham==1.5.4 # via typer diff --git a/requirements/test.txt b/requirements/test.txt index 9ed11fbcfbf..ef583b04162 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -106,7 +106,7 @@ regex==2024.9.11 # via re-assert requests==2.32.3 # via python-on-whales -rich==13.9.2 +rich==13.9.3 # via typer setuptools-git==1.2 # via -r requirements/test.in From e2ae49838fd5f7c88d5391d14d59f31e53b6cfa9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 24 Oct 2024 11:43:33 +0000 Subject: [PATCH 0758/1511] Bump frozenlist from 1.4.1 to 1.5.0 (#9532) Bumps [frozenlist](https://github.com/aio-libs/frozenlist) from 1.4.1 to 1.5.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/frozenlist/releases">frozenlist's releases</a>.</em></p> <blockquote> <h2>1.5.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>An incorrect signature of the <code>__class_getitem__</code> class method has been fixed, adding a missing <code>class_item</code> argument under Python 3.8 and older.</p> <p>This change also improves the code coverage of this method that was previously missing -- by :user:<code>webknjaz</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/567">#567</a>, <a href="https://redirect.github.com/aio-libs/frozenlist/issues/571">#571</a>.</p> </li> </ul> <h2>Improved documentation</h2> <ul> <li> <p>Rendered issue, PR, and commit links now lead to <code>frozenlist</code>'s repo instead of <code>yarl</code>'s repo.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/573">#573</a>.</p> </li> <li> <p>On the :doc:<code>Contributing docs <contributing/guidelines></code> page, a link to the <code>Towncrier philosophy</code> has been fixed.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/574">#574</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>A name of a temporary building directory now reflects that it's related to <code>frozenlist</code>, not <code>yarl</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/573">#573</a>.</p> </li> <li> <p>Declared Python 3.13 supported officially in the distribution package metadata.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/595">#595</a>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst">frozenlist's changelog</a>.</em></p> <blockquote> <h1>1.5.0 (2024-10-22)</h1> <h2>Bug fixes</h2> <ul> <li> <p>An incorrect signature of the <code>__class_getitem__</code> class method has been fixed, adding a missing <code>class_item</code> argument under Python 3.8 and older.</p> <p>This change also improves the code coverage of this method that was previously missing -- by :user:<code>webknjaz</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>567</code>, :issue:<code>571</code>.</p> </li> </ul> <h2>Improved documentation</h2> <ul> <li> <p>Rendered issue, PR, and commit links now lead to <code>frozenlist</code>'s repo instead of <code>yarl</code>'s repo.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>573</code>.</p> </li> <li> <p>On the :doc:<code>Contributing docs <contributing/guidelines></code> page, a link to the <code>Towncrier philosophy</code> has been fixed.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>574</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>A name of a temporary building directory now reflects that it's related to <code>frozenlist</code>, not <code>yarl</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>573</code>.</p> </li> <li> <p>Declared Python 3.13 supported officially in the distribution package metadata.</p> <p><em>Related issues and pull requests on GitHub:</em></p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/frozenlist/commit/91991e39d37700f4200a11cb24f6318872aac971"><code>91991e3</code></a> Release 1.5.0 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/612">#612</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/a0f0c7c475915a50e562f27b71dc10dc979f9aa4"><code>a0f0c7c</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/606">#606</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/7858bd30a7568f351a11e0d7e06aabd05ce1c5c1"><code>7858bd3</code></a> Increase release timeout (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/611">#611</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/313bebcece39c6aa8e4e0172127d85468b67cd00"><code>313bebc</code></a> Adjust linter timeout (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/610">#610</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/663600032033dcf31869da7764909b9903d269c8"><code>6636000</code></a> Add missing FUNDING.yml (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/609">#609</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/28e79f5f9b801c3f9a9d17eab5e09492c23de946"><code>28e79f5</code></a> Build(deps): Bump actions/cache from 3 to 4 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/576">#576</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/57417cea0163734082b72a40aac6499404805ea6"><code>57417ce</code></a> Build(deps): Bump pypa/cibuildwheel from 2.21.2 to 2.21.3 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/607">#607</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/2c242d55c265c4f477c34d114e9be8ce5ae5e621"><code>2c242d5</code></a> Build(deps): Bump pypa/cibuildwheel from 2.21.0 to 2.21.2 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/604">#604</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/b0ddbbf27b470de644ba0637d711a182ea7777d3"><code>b0ddbbf</code></a> Build(deps): Bump pypa/cibuildwheel from 2.20.0 to 2.21.0 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/600">#600</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/826e261ff2caaeeabdb7cce0bf72f991f1747aba"><code>826e261</code></a> Build(deps): Bump dependabot/fetch-metadata from 1.6.0 to 2.2.0 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/593">#593</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/frozenlist/compare/v1.4.1...v1.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=frozenlist&package-manager=pip&previous-version=1.4.1&new-version=1.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 2b3508c6dd7..56438d8ab23 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -18,7 +18,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.17.1 # via pycares -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 293b071b00a..f5bce7e64a0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -72,7 +72,7 @@ freezegun==1.5.1 # via # -r requirements/lint.in # -r requirements/test.in -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/dev.txt b/requirements/dev.txt index fdde0ed849e..c9bdcec8db5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -70,7 +70,7 @@ freezegun==1.5.1 # via # -r requirements/lint.in # -r requirements/test.in -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 1524b728bba..7aaddcd942d 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -18,7 +18,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.17.1 # via pycares -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/test.txt b/requirements/test.txt index ef583b04162..0128c19af0f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -40,7 +40,7 @@ exceptiongroup==1.2.2 # via pytest freezegun==1.5.1 # via -r requirements/test.in -frozenlist==1.4.1 +frozenlist==1.5.0 # via # -r requirements/runtime-deps.in # aiosignal From 446a7191b447a010bd732fab3377f6419d0e531e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 22:18:01 +0000 Subject: [PATCH 0759/1511] [PR #9534/5a79c0f3 backport][3.11] Add codspeed benchmarks (#9536) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 41 +++++++++++++++++++++++++ .mypy.ini | 14 +++++++++ requirements/lint.in | 1 + requirements/lint.txt | 8 ++++- requirements/test.in | 1 + requirements/test.txt | 6 ++++ tests/test_benchmarks_client_request.py | 22 +++++++++++++ tests/test_benchmarks_cookiejar.py | 26 ++++++++++++++++ 8 files changed, 118 insertions(+), 1 deletion(-) create mode 100644 tests/test_benchmarks_client_request.py create mode 100644 tests/test_benchmarks_cookiejar.py diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 173fb9548eb..fe266dd7e3b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -238,6 +238,47 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} + benchmark: + name: Benchmark + needs: gen_llhttp + + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout project + uses: actions/checkout@v4 + with: + submodules: true + - name: Setup Python 3.12 + id: python-install + uses: actions/setup-python@v5 + with: + python-version: 3.12 + cache: pip + cache-dependency-path: requirements/*.txt + - name: Update pip, wheel, setuptools, build, twine + run: | + python -m pip install -U pip wheel setuptools build twine + - name: Install dependencies + run: | + python -m pip install -r requirements/test.in -c requirements/test.txt + - name: Restore llhttp generated files + uses: actions/download-artifact@v3 + with: + name: llhttp + path: vendor/llhttp/build/ + - name: Cythonize + run: | + make cythonize + - name: Install self + run: python -m pip install -e . + - name: Run benchmarks + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: python -Im pytest --no-cov -vvvvv --codspeed + + check: # This job does nothing and is only used for the branch protection if: always() diff --git a/.mypy.ini b/.mypy.ini index 78001c36e8f..4d8e38d6952 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -36,3 +36,17 @@ ignore_missing_imports = True [mypy-gunicorn.*] ignore_missing_imports = True + +# Benchmark configuration is because pytest_codspeed is missing +# a py.typed file. Can be removed once the following PR is merged +# and released: +# https://github.com/CodSpeedHQ/pytest-codspeed/pull/53 +[mypy-test_benchmarks_client_request] +disable_error_code = + no-any-unimported, + misc + +[mypy-test_benchmarks_cookiejar] +disable_error_code = + no-any-unimported, + misc diff --git a/requirements/lint.in b/requirements/lint.in index 06d22fc737a..04eea1408da 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -4,6 +4,7 @@ mypy; implementation_name == "cpython" pre-commit pytest pytest-mock +pytest_codspeed python-on-whales slotscheck trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index c173456bf1a..6d15e42d3e0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -16,6 +16,7 @@ cffi==1.17.1 # via # cryptography # pycares + # pytest-codspeed cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.0 @@ -31,7 +32,9 @@ distlib==0.3.9 exceptiongroup==1.2.2 # via pytest filelock==3.16.1 - # via virtualenv + # via + # pytest-codspeed + # virtualenv freezegun==1.5.1 # via -r requirements/lint.in identify==2.6.1 @@ -73,7 +76,10 @@ pygments==2.18.0 pytest==8.3.3 # via # -r requirements/lint.in + # pytest-codspeed # pytest-mock +pytest-codspeed==2.2.1 + # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in python-dateutil==2.9.0.post0 diff --git a/requirements/test.in b/requirements/test.in index a88b54f3532..e16e39fe9b3 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -7,6 +7,7 @@ proxy.py >= 2.4.4rc4 pytest pytest-cov pytest-mock +pytest_codspeed python-on-whales re-assert setuptools-git diff --git a/requirements/test.txt b/requirements/test.txt index 0128c19af0f..7364abfd29f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -24,6 +24,7 @@ cffi==1.17.1 # via # cryptography # pycares + # pytest-codspeed charset-normalizer==3.4.0 # via requests click==8.1.7 @@ -38,6 +39,8 @@ cryptography==43.0.3 # via trustme exceptiongroup==1.2.2 # via pytest +filelock==3.16.1 + # via pytest-codspeed freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.5.0 @@ -90,8 +93,11 @@ pygments==2.18.0 pytest==8.3.3 # via # -r requirements/test.in + # pytest-codspeed # pytest-cov # pytest-mock +pytest-codspeed==2.2.1 + # via -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py new file mode 100644 index 00000000000..63c77dfcdc8 --- /dev/null +++ b/tests/test_benchmarks_client_request.py @@ -0,0 +1,22 @@ +"""codspeed benchmarks for client requests.""" + +import asyncio +from http.cookies import Morsel + +from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from yarl import URL + +from aiohttp.client_reqrep import ClientRequest + + +def test_client_request_update_cookies( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + req = ClientRequest("get", URL("http://python.org"), loop=loop) + morsel: "Morsel[str]" = Morsel() + morsel.set(key="string", val="Another string", coded_val="really") + morsel_cookie = {"str": morsel} + + @benchmark + def _run() -> None: + req.update_cookies(cookies=morsel_cookie) diff --git a/tests/test_benchmarks_cookiejar.py b/tests/test_benchmarks_cookiejar.py new file mode 100644 index 00000000000..508b49f68cb --- /dev/null +++ b/tests/test_benchmarks_cookiejar.py @@ -0,0 +1,26 @@ +"""codspeed benchmarks for cookies.""" + +from http.cookies import BaseCookie + +from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from yarl import URL + +from aiohttp.cookiejar import CookieJar + + +async def test_load_cookies_into_temp_cookiejar(benchmark: BenchmarkFixture) -> None: + """Benchmark for creating a temp CookieJar and filtering by URL. + + This benchmark matches what the client request does when cookies + are passed to the request. + """ + all_cookies: BaseCookie[str] = BaseCookie() + url = URL("http://example.com") + cookies = {"cookie1": "value1", "cookie2": "value2"} + + @benchmark + def _run() -> None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + all_cookies.load(req_cookies) From e2a231b4727dad7fe53b2e73653e9286cdda0e32 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 25 Oct 2024 12:31:19 -1000 Subject: [PATCH 0760/1511] [PR #9534/5a79c0f3 backport][3.10] Add codspeed benchmarks (#9537) --- .github/workflows/ci-cd.yml | 41 +++++++++++++++++++++++++ .mypy.ini | 14 +++++++++ requirements/lint.in | 1 + requirements/lint.txt | 20 ++++++++++-- requirements/test.in | 1 + requirements/test.txt | 6 ++++ tests/test_benchmarks_client_request.py | 22 +++++++++++++ tests/test_benchmarks_cookiejar.py | 26 ++++++++++++++++ 8 files changed, 129 insertions(+), 2 deletions(-) create mode 100644 tests/test_benchmarks_client_request.py create mode 100644 tests/test_benchmarks_cookiejar.py diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 4d77978d1e3..61ec5594616 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -240,6 +240,47 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} + benchmark: + name: Benchmark + needs: gen_llhttp + + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout project + uses: actions/checkout@v4 + with: + submodules: true + - name: Setup Python 3.12 + id: python-install + uses: actions/setup-python@v5 + with: + python-version: 3.12 + cache: pip + cache-dependency-path: requirements/*.txt + - name: Update pip, wheel, setuptools, build, twine + run: | + python -m pip install -U pip wheel setuptools build twine + - name: Install dependencies + run: | + python -m pip install -r requirements/test.in -c requirements/test.txt + - name: Restore llhttp generated files + uses: actions/download-artifact@v3 + with: + name: llhttp + path: vendor/llhttp/build/ + - name: Cythonize + run: | + make cythonize + - name: Install self + run: python -m pip install -e . + - name: Run benchmarks + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: python -Im pytest --no-cov -vvvvv --codspeed + + check: # This job does nothing and is only used for the branch protection if: always() diff --git a/.mypy.ini b/.mypy.ini index 78001c36e8f..4d8e38d6952 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -36,3 +36,17 @@ ignore_missing_imports = True [mypy-gunicorn.*] ignore_missing_imports = True + +# Benchmark configuration is because pytest_codspeed is missing +# a py.typed file. Can be removed once the following PR is merged +# and released: +# https://github.com/CodSpeedHQ/pytest-codspeed/pull/53 +[mypy-test_benchmarks_client_request] +disable_error_code = + no-any-unimported, + misc + +[mypy-test_benchmarks_cookiejar] +disable_error_code = + no-any-unimported, + misc diff --git a/requirements/lint.in b/requirements/lint.in index 0d46809a083..2a8ce66330f 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -5,6 +5,7 @@ mypy; implementation_name == "cpython" pre-commit pytest pytest-mock +pytest_codspeed python-on-whales slotscheck trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index 5f1b068cb1c..48c88933fab 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -18,6 +18,7 @@ cffi==1.17.0 # via # cryptography # pycares + # pytest-codspeed cfgv==3.3.1 # via pre-commit charset-normalizer==3.3.2 @@ -26,18 +27,24 @@ click==8.1.6 # via # slotscheck # typer +cryptography==43.0.3 + # via trustme distlib==0.3.7 # via virtualenv exceptiongroup==1.1.2 # via pytest filelock==3.12.2 - # via virtualenv + # via + # pytest-codspeed + # virtualenv freezegun==1.5.1 # via -r requirements/lint.in identify==2.5.26 # via pre-commit idna==3.7 - # via requests + # via + # requests + # trustme iniconfig==2.0.0 # via pytest markdown-it-py==3.0.0 @@ -69,9 +76,16 @@ pydantic-core==2.23.4 pygments==2.17.2 # via rich pytest==8.3.2 + # via + # -r requirements/lint.in + # pytest-codspeed + # pytest-mock +pytest-codspeed==2.2.1 # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in +python-dateutil==2.9.0.post0 + # via freezegun python-on-whales==0.72.0 # via -r requirements/lint.in pyyaml==6.0.1 @@ -82,6 +96,8 @@ rich==13.7.1 # via typer shellingham==1.5.4 # via typer +six==1.16.0 + # via python-dateutil slotscheck==0.19.0 # via -r requirements/lint.in tomli==2.0.1 diff --git a/requirements/test.in b/requirements/test.in index 686cd6dbf2e..801189ea72c 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -8,6 +8,7 @@ proxy.py >= 2.4.4rc4 pytest pytest-cov pytest-mock +pytest_codspeed python-on-whales re-assert setuptools-git diff --git a/requirements/test.txt b/requirements/test.txt index 8be1e493f2d..c03b33fb228 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -24,6 +24,7 @@ cffi==1.17.0 # via # cryptography # pycares + # pytest-codspeed charset-normalizer==3.2.0 # via requests click==8.1.6 @@ -38,6 +39,8 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest +filelock==3.16.1 + # via pytest-codspeed freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.4.1 @@ -80,8 +83,11 @@ pydantic-core==2.23.4 pytest==8.3.2 # via # -r requirements/test.in + # pytest-codspeed # pytest-cov # pytest-mock +pytest-codspeed==2.2.1 + # via -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py new file mode 100644 index 00000000000..63c77dfcdc8 --- /dev/null +++ b/tests/test_benchmarks_client_request.py @@ -0,0 +1,22 @@ +"""codspeed benchmarks for client requests.""" + +import asyncio +from http.cookies import Morsel + +from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from yarl import URL + +from aiohttp.client_reqrep import ClientRequest + + +def test_client_request_update_cookies( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + req = ClientRequest("get", URL("http://python.org"), loop=loop) + morsel: "Morsel[str]" = Morsel() + morsel.set(key="string", val="Another string", coded_val="really") + morsel_cookie = {"str": morsel} + + @benchmark + def _run() -> None: + req.update_cookies(cookies=morsel_cookie) diff --git a/tests/test_benchmarks_cookiejar.py b/tests/test_benchmarks_cookiejar.py new file mode 100644 index 00000000000..508b49f68cb --- /dev/null +++ b/tests/test_benchmarks_cookiejar.py @@ -0,0 +1,26 @@ +"""codspeed benchmarks for cookies.""" + +from http.cookies import BaseCookie + +from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from yarl import URL + +from aiohttp.cookiejar import CookieJar + + +async def test_load_cookies_into_temp_cookiejar(benchmark: BenchmarkFixture) -> None: + """Benchmark for creating a temp CookieJar and filtering by URL. + + This benchmark matches what the client request does when cookies + are passed to the request. + """ + all_cookies: BaseCookie[str] = BaseCookie() + url = URL("http://example.com") + cookies = {"cookie1": "value1", "cookie2": "value2"} + + @benchmark + def _run() -> None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + all_cookies.load(req_cookies) From 37ceac7d1f8deb0e512505c4dbac03d8005f37fe Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 23:30:32 +0000 Subject: [PATCH 0761/1511] [PR #9538/7982b599 backport][3.11] Add WebSocket read and send benchmarks for the http writer (#9540) Co-authored-by: J. Nick Koston <nick@koston.org> --- .mypy.ini | 5 ++ tests/test_benchmarks_http_websocket.py | 64 +++++++++++++++++++++++++ 2 files changed, 69 insertions(+) create mode 100644 tests/test_benchmarks_http_websocket.py diff --git a/.mypy.ini b/.mypy.ini index 4d8e38d6952..c4bc02d7b94 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -50,3 +50,8 @@ disable_error_code = disable_error_code = no-any-unimported, misc + +[mypy-test_benchmarks_http_websocket] +disable_error_code = + no-any-unimported, + misc diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py new file mode 100644 index 00000000000..a0a80dfd985 --- /dev/null +++ b/tests/test_benchmarks_http_websocket.py @@ -0,0 +1,64 @@ +"""codspeed benchmarks for http websocket.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] + +from aiohttp import DataQueue +from aiohttp.base_protocol import BaseProtocol +from aiohttp.http_websocket import ( + WebSocketReader, + WebSocketWriter, + WSMessage, + WSMsgType, +) + + +def test_read_one_hundred_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + queue: DataQueue[WSMessage] = DataQueue(loop=loop) + reader = WebSocketReader(queue, max_msg_size=2**16) + raw_message = ( + b'\x81~\x01!{"id":1,"src":"shellyplugus-c049ef8c30e4","dst":"aios-1453812500' + b'8","result":{"name":null,"id":"shellyplugus-c049ef8c30e4","mac":"C049EF8C30E' + b'4","slot":1,"model":"SNPL-00116US","gen":2,"fw_id":"20231219-133953/1.1.0-g3' + b'4b5d4f","ver":"1.1.0","app":"PlugUS","auth_en":false,"auth_domain":null}}' + ) + feed_data = reader.feed_data + + @benchmark + def _run() -> None: + for _ in range(100): + feed_data(raw_message) + + +def test_send_one_hundred_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + + class MockTransport(asyncio.Transport): + """Mock transport for testing that do no real I/O.""" + + def is_closing(self) -> bool: + """Swallow is_closing.""" + return False + + def write(self, data: bytes) -> None: + """Swallow writes.""" + + class MockProtocol(BaseProtocol): + + async def _drain_helper(self) -> None: + """Swallow drain.""" + + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport()) + raw_message = b"Hello, World!" * 100 + + async def _send_one_hundred_websocket_text_messages() -> None: + for _ in range(100): + await writer.send_frame(raw_message, WSMsgType.TEXT) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_text_messages()) From 2b43848c6750e546ac59599090304e8031c7cb99 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 26 Oct 2024 01:57:00 +0000 Subject: [PATCH 0762/1511] [PR #9538/7982b599 backport][3.10] Add WebSocket read and send benchmarks for the http writer (#9539) Co-authored-by: J. Nick Koston <nick@koston.org> --- .mypy.ini | 5 ++ tests/test_benchmarks_http_websocket.py | 64 +++++++++++++++++++++++++ 2 files changed, 69 insertions(+) create mode 100644 tests/test_benchmarks_http_websocket.py diff --git a/.mypy.ini b/.mypy.ini index 4d8e38d6952..c4bc02d7b94 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -50,3 +50,8 @@ disable_error_code = disable_error_code = no-any-unimported, misc + +[mypy-test_benchmarks_http_websocket] +disable_error_code = + no-any-unimported, + misc diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py new file mode 100644 index 00000000000..3502ab4ea73 --- /dev/null +++ b/tests/test_benchmarks_http_websocket.py @@ -0,0 +1,64 @@ +"""codspeed benchmarks for http websocket.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] + +from aiohttp import DataQueue +from aiohttp.base_protocol import BaseProtocol +from aiohttp.http_websocket import ( + WebSocketReader, + WebSocketWriter, + WSMessage, + WSMsgType, +) + + +def test_read_one_hundred_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + queue: DataQueue[WSMessage] = DataQueue(loop=loop) + reader = WebSocketReader(queue, max_msg_size=2**16) + raw_message = ( + b'\x81~\x01!{"id":1,"src":"shellyplugus-c049ef8c30e4","dst":"aios-1453812500' + b'8","result":{"name":null,"id":"shellyplugus-c049ef8c30e4","mac":"C049EF8C30E' + b'4","slot":1,"model":"SNPL-00116US","gen":2,"fw_id":"20231219-133953/1.1.0-g3' + b'4b5d4f","ver":"1.1.0","app":"PlugUS","auth_en":false,"auth_domain":null}}' + ) + feed_data = reader.feed_data + + @benchmark + def _run() -> None: + for _ in range(100): + feed_data(raw_message) + + +def test_send_one_hundred_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + + class MockTransport(asyncio.Transport): + """Mock transport for testing that do no real I/O.""" + + def is_closing(self) -> bool: + """Swallow is_closing.""" + return False + + def write(self, data: bytes) -> None: + """Swallow writes.""" + + class MockProtocol(BaseProtocol): + + async def _drain_helper(self) -> None: + """Swallow drain.""" + + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport()) + raw_message = b"Hello, World!" * 100 + + async def _send_one_hundred_websocket_text_messages() -> None: + for _ in range(100): + await writer._send_frame(raw_message, WSMsgType.TEXT) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_text_messages()) From 751af18f4a7460fc46abc6d3a42dfbf166c3ef62 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 27 Oct 2024 09:44:43 -1000 Subject: [PATCH 0763/1511] Separate `aiohttp.http_websocket` into multiple files (#9542) (#9545) --- CHANGES/9542.packaging.rst | 1 + Makefile | 11 +- aiohttp/_websocket/__init__.py | 1 + aiohttp/_websocket/helpers.py | 148 ++++ .../{_websocket.pyx => _websocket/mask.pyx} | 0 aiohttp/_websocket/models.py | 84 ++ aiohttp/_websocket/reader.py | 342 ++++++++ aiohttp/_websocket/writer.py | 183 +++++ aiohttp/http_websocket.py | 753 +----------------- setup.py | 2 +- tests/test_websocket_parser.py | 28 +- tests/test_websocket_writer.py | 2 +- 12 files changed, 797 insertions(+), 758 deletions(-) create mode 100644 CHANGES/9542.packaging.rst create mode 100644 aiohttp/_websocket/__init__.py create mode 100644 aiohttp/_websocket/helpers.py rename aiohttp/{_websocket.pyx => _websocket/mask.pyx} (100%) create mode 100644 aiohttp/_websocket/models.py create mode 100644 aiohttp/_websocket/reader.py create mode 100644 aiohttp/_websocket/writer.py diff --git a/CHANGES/9542.packaging.rst b/CHANGES/9542.packaging.rst new file mode 100644 index 00000000000..c77b962994f --- /dev/null +++ b/CHANGES/9542.packaging.rst @@ -0,0 +1 @@ +Separated ``aiohttp.http_websocket`` into multiple files to make it easier to maintain -- by :user:`bdraco`. diff --git a/Makefile b/Makefile index 2a40be049ee..be217d1d554 100644 --- a/Makefile +++ b/Makefile @@ -3,10 +3,10 @@ to-hash-one = $(dir $1).hash/$(addsuffix .hash,$(notdir $1)) to-hash = $(foreach fname,$1,$(call to-hash-one,$(fname))) -CYS := $(wildcard aiohttp/*.pyx) $(wildcard aiohttp/*.pyi) $(wildcard aiohttp/*.pxd) -PYXS := $(wildcard aiohttp/*.pyx) -CS := $(wildcard aiohttp/*.c) -PYS := $(wildcard aiohttp/*.py) +CYS := $(wildcard aiohttp/*.pyx) $(wildcard aiohttp/*.pyi) $(wildcard aiohttp/*.pxd) $(wildcard aiohttp/_websocket/*.pyx) $(wildcard aiohttp/_websocket/*.pyi) $(wildcard aiohttp/_websocket/*.pxd) +PYXS := $(wildcard aiohttp/*.pyx) $(wildcard aiohttp/_websocket/*.pyx) +CS := $(wildcard aiohttp/*.c) $(wildcard aiohttp/_websocket/*.c) +PYS := $(wildcard aiohttp/*.py) $(wildcard aiohttp/_websocket/*.py) IN := doc-spelling lint cython dev ALLS := $(sort $(CYS) $(CS) $(PYS) $(REQS)) @@ -60,6 +60,9 @@ aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py) aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cython -3 -o $@ $< -I aiohttp -Werror +aiohttp/_websocket/%.c: aiohttp/_websocket/%.pyx $(call to-hash,$(CYS)) + cython -3 -o $@ $< -I aiohttp -Werror + vendor/llhttp/node_modules: vendor/llhttp/package.json cd vendor/llhttp; npm ci diff --git a/aiohttp/_websocket/__init__.py b/aiohttp/_websocket/__init__.py new file mode 100644 index 00000000000..836257cc47a --- /dev/null +++ b/aiohttp/_websocket/__init__.py @@ -0,0 +1 @@ +"""WebSocket protocol versions 13 and 8.""" diff --git a/aiohttp/_websocket/helpers.py b/aiohttp/_websocket/helpers.py new file mode 100644 index 00000000000..41273dd3230 --- /dev/null +++ b/aiohttp/_websocket/helpers.py @@ -0,0 +1,148 @@ +"""Helpers for WebSocket protocol versions 13 and 8.""" + +import functools +import re +from struct import Struct +from typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple + +from ..helpers import NO_EXTENSIONS +from .models import WSHandshakeError + +UNPACK_LEN2 = Struct("!H").unpack_from +UNPACK_LEN3 = Struct("!Q").unpack_from +UNPACK_CLOSE_CODE = Struct("!H").unpack +PACK_LEN1 = Struct("!BB").pack +PACK_LEN2 = Struct("!BBH").pack +PACK_LEN3 = Struct("!BBQ").pack +PACK_CLOSE_CODE = Struct("!H").pack +PACK_RANDBITS = Struct("!L").pack +MSG_SIZE: Final[int] = 2**14 +MASK_LEN: Final[int] = 4 + +WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +# Used by _websocket_mask_python +@functools.lru_cache +def _xor_table() -> List[bytes]: + return [bytes(a ^ b for a in range(256)) for b in range(256)] + + +def _websocket_mask_python(mask: bytes, data: bytearray) -> None: + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytearray` + object of any length. The contents of `data` are masked with `mask`, + as specified in section 5.3 of RFC 6455. + + Note that this function mutates the `data` argument. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + + if data: + _XOR_TABLE = _xor_table() + a, b, c, d = (_XOR_TABLE[n] for n in mask) + data[::4] = data[::4].translate(a) + data[1::4] = data[1::4].translate(b) + data[2::4] = data[2::4].translate(c) + data[3::4] = data[3::4].translate(d) + + +if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover + websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython # type: ignore[import-not-found] + + websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + websocket_mask = _websocket_mask_python + + +_WS_EXT_RE: Final[Pattern[str]] = re.compile( + r"^(?:;\s*(?:" + r"(server_no_context_takeover)|" + r"(client_no_context_takeover)|" + r"(server_max_window_bits(?:=(\d+))?)|" + r"(client_max_window_bits(?:=(\d+))?)))*$" +) + +_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") + + +def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: + if not extstr: + return 0, False + + compress = 0 + notakeover = False + for ext in _WS_EXT_RE_SPLIT.finditer(extstr): + defext = ext.group(1) + # Return compress = 15 when get `permessage-deflate` + if not defext: + compress = 15 + break + match = _WS_EXT_RE.match(defext) + if match: + compress = 15 + if isserver: + # Server never fail to detect compress handshake. + # Server does not need to send max wbit to client + if match.group(4): + compress = int(match.group(4)) + # Group3 must match if group4 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # CONTINUE to next extension + if compress > 15 or compress < 9: + compress = 0 + continue + if match.group(1): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + else: + if match.group(6): + compress = int(match.group(6)) + # Group5 must match if group6 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # FAIL the parse progress + if compress > 15 or compress < 9: + raise WSHandshakeError("Invalid window size") + if match.group(2): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + # Return Fail if client side and not match + elif not isserver: + raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) + + return compress, notakeover + + +def ws_ext_gen( + compress: int = 15, isserver: bool = False, server_notakeover: bool = False +) -> str: + # client_notakeover=False not used for server + # compress wbit 8 does not support in zlib + if compress < 9 or compress > 15: + raise ValueError( + "Compress wbits must between 9 and 15, zlib does not support wbits=8" + ) + enabledext = ["permessage-deflate"] + if not isserver: + enabledext.append("client_max_window_bits") + + if compress < 15: + enabledext.append("server_max_window_bits=" + str(compress)) + if server_notakeover: + enabledext.append("server_no_context_takeover") + # if client_notakeover: + # enabledext.append('client_no_context_takeover') + return "; ".join(enabledext) diff --git a/aiohttp/_websocket.pyx b/aiohttp/_websocket/mask.pyx similarity index 100% rename from aiohttp/_websocket.pyx rename to aiohttp/_websocket/mask.pyx diff --git a/aiohttp/_websocket/models.py b/aiohttp/_websocket/models.py new file mode 100644 index 00000000000..7e89b965295 --- /dev/null +++ b/aiohttp/_websocket/models.py @@ -0,0 +1,84 @@ +"""Models for WebSocket protocol versions 13 and 8.""" + +import json +from enum import IntEnum +from typing import Any, Callable, Final, NamedTuple, Optional, cast + +WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + ABNORMAL_CLOSURE = 1006 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + BAD_GATEWAY = 1014 + + +class WSMsgType(IntEnum): + # websocket spec types + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xA + CLOSE = 0x8 + + # aiohttp specific types + CLOSING = 0x100 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closing = CLOSING + closed = CLOSED + error = ERROR + + +class WSMessage(NamedTuple): + type: WSMsgType + # To type correctly, this would need some kind of tagged union for each type. + data: Any + extra: Optional[str] + + def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + +# Constructing the tuple directly to avoid the overhead of +# the lambda and arg processing since NamedTuples are constructed +# with a run time built lambda +# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441 +WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None)) +WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None)) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code: int, message: str) -> None: + self.code = code + super().__init__(code, message) + + def __str__(self) -> str: + return cast(str, self.args[1]) + + +class WSHandshakeError(Exception): + """WebSocket protocol handshake error.""" diff --git a/aiohttp/_websocket/reader.py b/aiohttp/_websocket/reader.py new file mode 100644 index 00000000000..22ae7c1cc0d --- /dev/null +++ b/aiohttp/_websocket/reader.py @@ -0,0 +1,342 @@ +"""Reader for WebSocket protocol versions 13 and 8.""" + +from enum import IntEnum +from typing import Final, List, Optional, Set, Tuple + +from ..compression_utils import ZLibDecompressor +from ..helpers import set_exception +from ..streams import DataQueue +from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN2, UNPACK_LEN3, websocket_mask +from .models import ( + WS_DEFLATE_TRAILING, + WebSocketError, + WSCloseCode, + WSMessage, + WSMsgType, +) + +MESSAGE_TYPES_WITH_CONTENT: Final = frozenset( + { + WSMsgType.BINARY, + WSMsgType.TEXT, + WSMsgType.CONTINUATION, + } +) + +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + + +class WSParserState(IntEnum): + READ_HEADER = 1 + READ_PAYLOAD_LENGTH = 2 + READ_PAYLOAD_MASK = 3 + READ_PAYLOAD = 4 + + +class WebSocketReader: + def __init__( + self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + ) -> None: + self.queue = queue + self._max_msg_size = max_msg_size + + self._exc: Optional[BaseException] = None + self._partial = bytearray() + self._state = WSParserState.READ_HEADER + + self._opcode: Optional[int] = None + self._frame_fin = False + self._frame_opcode: Optional[int] = None + self._frame_payload = bytearray() + + self._tail: bytes = b"" + self._has_mask = False + self._frame_mask: Optional[bytes] = None + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed: Optional[bool] = None + self._decompressobj: Optional[ZLibDecompressor] = None + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + def feed_data(self, data: bytes) -> Tuple[bool, bytes]: + if self._exc: + return True, data + + try: + self._feed_data(data) + except Exception as exc: + self._exc = exc + set_exception(self.queue, exc) + return True, b"" + + return False, b"" + + def _feed_data(self, data: bytes) -> None: + for fin, opcode, payload, compressed in self.parse_frame(data): + if opcode in MESSAGE_TYPES_WITH_CONTENT: + # load text/binary + is_continuation = opcode == WSMsgType.CONTINUATION + if not fin: + # got partial frame payload + if not is_continuation: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + continue + + has_partial = bool(self._partial) + if is_continuation: + if self._opcode is None: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + opcode = self._opcode + self._opcode = None + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload + + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(assembled_payload), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor( + suppress_deflate_header=True + ) + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(assembled_payload) + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + # tuple.__new__ is used to avoid the overhead of the lambda + msg = tuple.__new__(WSMessage, (WSMsgType.TEXT, text, "")) + self.queue.feed_data(msg, len(text)) + continue + + # tuple.__new__ is used to avoid the overhead of the lambda + msg = tuple.__new__(WSMessage, (WSMsgType.BINARY, payload_merged, "")) + self.queue.feed_data(msg, len(payload_merged)) + elif opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = tuple.__new__( + WSMessage, (WSMsgType.CLOSE, close_code, close_message) + ) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", + ) + else: + msg = tuple.__new__(WSMessage, (WSMsgType.CLOSE, 0, "")) + + self.queue.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + msg = tuple.__new__(WSMessage, (WSMsgType.PING, payload, "")) + self.queue.feed_data(msg, len(payload)) + + elif opcode == WSMsgType.PONG: + msg = tuple.__new__(WSMessage, (WSMsgType.PONG, payload, "")) + self.queue.feed_data(msg, len(payload)) + + else: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) + + def parse_frame( + self, buf: bytes + ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + """Return the next frame from the socket.""" + frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] + if self._tail: + buf, self._tail = self._tail + buf, b"" + + start_pos: int = 0 + buf_length = len(buf) + + while True: + # read header + if self._state is WSParserState.READ_HEADER: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be larger than 125 bytes", + ) + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH + + # read payload length + if self._state is WSParserState.READ_PAYLOAD_LENGTH: + length_flag = self._payload_length_flag + if length_flag == 126: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + self._payload_length = UNPACK_LEN2(data)[0] + elif length_flag > 126: + if buf_length - start_pos < 8: + break + data = buf[start_pos : start_pos + 8] + start_pos += 8 + self._payload_length = UNPACK_LEN3(data)[0] + else: + self._payload_length = length_flag + + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + + # read payload mask + if self._state is WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos < 4: + break + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD + + if self._state is WSParserState.READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload += buf[start_pos:] + start_pos = buf_length + else: + self._payload_length = 0 + payload += buf[start_pos : start_pos + length] + start_pos = start_pos + length + + if self._payload_length != 0: + break + + if self._has_mask: + assert self._frame_mask is not None + websocket_mask(self._frame_mask, payload) + + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER + + self._tail = buf[start_pos:] + + return frames diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py new file mode 100644 index 00000000000..04fb22ea3d4 --- /dev/null +++ b/aiohttp/_websocket/writer.py @@ -0,0 +1,183 @@ +"""WebSocket protocol versions 13 and 8.""" + +import asyncio +import random +import zlib +from functools import partial +from typing import Any, Final, Optional, Union + +from ..base_protocol import BaseProtocol +from ..client_exceptions import ClientConnectionResetError +from ..compression_utils import ZLibCompressor +from .helpers import ( + MASK_LEN, + MSG_SIZE, + PACK_CLOSE_CODE, + PACK_LEN1, + PACK_LEN2, + PACK_LEN3, + PACK_RANDBITS, + websocket_mask, +) +from .models import WS_DEFLATE_TRAILING, WSMsgType + +DEFAULT_LIMIT: Final[int] = 2**16 + +# For websockets, keeping latency low is extremely important as implementations +# generally expect to be able to send and receive messages quickly. We use a +# larger chunk size than the default to reduce the number of executor calls +# since the executor is a significant source of latency and overhead when +# the chunks are small. A size of 5KiB was chosen because it is also the +# same value python-zlib-ng choose to use as the threshold to release the GIL. + +WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 + + +class WebSocketWriter: + def __init__( + self, + protocol: BaseProtocol, + transport: asyncio.Transport, + *, + use_mask: bool = False, + limit: int = DEFAULT_LIMIT, + random: random.Random = random.Random(), + compress: int = 0, + notakeover: bool = False, + ) -> None: + self.protocol = protocol + self.transport = transport + self.use_mask = use_mask + self.get_random_bits = partial(random.getrandbits, 32) + self.compress = compress + self.notakeover = notakeover + self._closing = False + self._limit = limit + self._output_size = 0 + self._compressobj: Any = None # actually compressobj + + async def send_frame( + self, message: bytes, opcode: int, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if self._closing and not (opcode & WSMsgType.CLOSE): + raise ClientConnectionResetError("Cannot write to closing transport") + + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + rsv = 0 + # Only compress larger packets (disabled) + # Does small packet needs to be compressed? + # if self.compress and opcode < 8 and len(message) > 124: + if (compress or self.compress) and opcode < 8: + # RSV1 (rsv = 0x40) is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + rsv = 0x40 + + if compress: + # Do not set self._compress if compressing is for this frame + compressobj = self._make_compress_obj(compress) + else: # self.compress + if not self._compressobj: + self._compressobj = self._make_compress_obj(self.compress) + compressobj = self._compressobj + + message = await compressobj.compress(message) + # Its critical that we do not return control to the event + # loop until we have finished sending all the compressed + # data. Otherwise we could end up mixing compressed frames + # if there are multiple coroutines compressing data. + message += compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ) + if message.endswith(WS_DEFLATE_TRAILING): + message = message[:-4] + + msg_length = len(message) + + use_mask = self.use_mask + mask_bit = 0x80 if use_mask else 0 + + # Depending on the message length, the header is assembled differently. + # The first byte is reserved for the opcode and the RSV bits. + first_byte = 0x80 | rsv | opcode + if msg_length < 126: + header = PACK_LEN1(first_byte, msg_length | mask_bit) + header_len = 2 + elif msg_length < (1 << 16): + header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) + header_len = 4 + else: + header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) + header_len = 10 + + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 + # If we are using a mask, we need to generate it randomly + # and apply it to the message before sending it. A mask is + # a 32-bit value that is applied to the message using a + # bitwise XOR operation. It is used to prevent certain types + # of attacks on the websocket protocol. The mask is only used + # when aiohttp is acting as a client. Servers do not use a mask. + if use_mask: + mask = PACK_RANDBITS(self.get_random_bits()) + message = bytearray(message) + websocket_mask(mask, message) + self._write(header + mask + message) + self._output_size += header_len + MASK_LEN + msg_length + + else: + if msg_length > MSG_SIZE: + self._write(header) + self._write(message) + else: + self._write(header + message) + + self._output_size += header_len + msg_length + + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. + if self._output_size > self._limit: + self._output_size = 0 + await self.protocol._drain_helper() + + def _make_compress_obj(self, compress: int) -> ZLibCompressor: + return ZLibCompressor( + level=zlib.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + + def _write(self, data: bytes) -> None: + if self.transport is None or self.transport.is_closing(): + raise ClientConnectionResetError("Cannot write to closing transport") + self.transport.write(data) + + async def pong(self, message: Union[bytes, str] = b"") -> None: + """Send pong message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self.send_frame(message, WSMsgType.PONG) + + async def ping(self, message: Union[bytes, str] = b"") -> None: + """Send ping message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self.send_frame(message, WSMsgType.PING) + + async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode("utf-8") + try: + await self.send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE + ) + finally: + self._closing = True diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index ad731c496c6..92b5f67f0c0 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -1,34 +1,17 @@ """WebSocket protocol versions 13 and 8.""" -import asyncio -import functools -import json -import random -import re -import sys -import zlib -from enum import IntEnum -from functools import partial -from struct import Struct -from typing import ( - Any, - Callable, - Final, - List, - NamedTuple, - Optional, - Pattern, - Set, - Tuple, - Union, - cast, +from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse +from ._websocket.models import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSCloseCode, + WSHandshakeError, + WSMessage, + WSMsgType, ) - -from .base_protocol import BaseProtocol -from .client_exceptions import ClientConnectionResetError -from .compression_utils import ZLibCompressor, ZLibDecompressor -from .helpers import NO_EXTENSIONS, set_exception -from .streams import DataQueue +from ._websocket.reader import WebSocketReader +from ._websocket.writer import WebSocketWriter __all__ = ( "WS_CLOSED_MESSAGE", @@ -40,714 +23,8 @@ "WebSocketError", "WSMsgType", "WSCloseCode", + "ws_ext_gen", + "ws_ext_parse", + "WSHandshakeError", + "WSMessage", ) - - -class WSCloseCode(IntEnum): - OK = 1000 - GOING_AWAY = 1001 - PROTOCOL_ERROR = 1002 - UNSUPPORTED_DATA = 1003 - ABNORMAL_CLOSURE = 1006 - INVALID_TEXT = 1007 - POLICY_VIOLATION = 1008 - MESSAGE_TOO_BIG = 1009 - MANDATORY_EXTENSION = 1010 - INTERNAL_ERROR = 1011 - SERVICE_RESTART = 1012 - TRY_AGAIN_LATER = 1013 - BAD_GATEWAY = 1014 - - -ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} - -# For websockets, keeping latency low is extremely important as implementations -# generally expect to be able to send and receive messages quickly. We use a -# larger chunk size than the default to reduce the number of executor calls -# since the executor is a significant source of latency and overhead when -# the chunks are small. A size of 5KiB was chosen because it is also the -# same value python-zlib-ng choose to use as the threshold to release the GIL. - -WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 - - -class WSMsgType(IntEnum): - # websocket spec types - CONTINUATION = 0x0 - TEXT = 0x1 - BINARY = 0x2 - PING = 0x9 - PONG = 0xA - CLOSE = 0x8 - - # aiohttp specific types - CLOSING = 0x100 - CLOSED = 0x101 - ERROR = 0x102 - - text = TEXT - binary = BINARY - ping = PING - pong = PONG - close = CLOSE - closing = CLOSING - closed = CLOSED - error = ERROR - - -MESSAGE_TYPES_WITH_CONTENT: Final = frozenset( - { - WSMsgType.BINARY, - WSMsgType.TEXT, - WSMsgType.CONTINUATION, - } -) - -WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - -UNPACK_LEN2 = Struct("!H").unpack_from -UNPACK_LEN3 = Struct("!Q").unpack_from -UNPACK_CLOSE_CODE = Struct("!H").unpack -PACK_LEN1 = Struct("!BB").pack -PACK_LEN2 = Struct("!BBH").pack -PACK_LEN3 = Struct("!BBQ").pack -PACK_CLOSE_CODE = Struct("!H").pack -PACK_RANDBITS = Struct("!L").pack -MSG_SIZE: Final[int] = 2**14 -DEFAULT_LIMIT: Final[int] = 2**16 -MASK_LEN: Final[int] = 4 - - -class WSMessage(NamedTuple): - type: WSMsgType - # To type correctly, this would need some kind of tagged union for each type. - data: Any - extra: Optional[str] - - def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: - """Return parsed JSON data. - - .. versionadded:: 0.22 - """ - return loads(self.data) - - -# Constructing the tuple directly to avoid the overhead of -# the lambda and arg processing since NamedTuples are constructed -# with a run time built lambda -# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441 -WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None)) -WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None)) - - -class WebSocketError(Exception): - """WebSocket protocol parser error.""" - - def __init__(self, code: int, message: str) -> None: - self.code = code - super().__init__(code, message) - - def __str__(self) -> str: - return cast(str, self.args[1]) - - -class WSHandshakeError(Exception): - """WebSocket protocol handshake error.""" - - -native_byteorder: Final[str] = sys.byteorder - - -# Used by _websocket_mask_python -@functools.lru_cache -def _xor_table() -> List[bytes]: - return [bytes(a ^ b for a in range(256)) for b in range(256)] - - -def _websocket_mask_python(mask: bytes, data: bytearray) -> None: - """Websocket masking function. - - `mask` is a `bytes` object of length 4; `data` is a `bytearray` - object of any length. The contents of `data` are masked with `mask`, - as specified in section 5.3 of RFC 6455. - - Note that this function mutates the `data` argument. - - This pure-python implementation may be replaced by an optimized - version when available. - - """ - assert isinstance(data, bytearray), data - assert len(mask) == 4, mask - - if data: - _XOR_TABLE = _xor_table() - a, b, c, d = (_XOR_TABLE[n] for n in mask) - data[::4] = data[::4].translate(a) - data[1::4] = data[1::4].translate(b) - data[2::4] = data[2::4].translate(c) - data[3::4] = data[3::4].translate(d) - - -if NO_EXTENSIONS: # pragma: no cover - _websocket_mask = _websocket_mask_python -else: - try: - from ._websocket import _websocket_mask_cython # type: ignore[import-not-found] - - _websocket_mask = _websocket_mask_cython - except ImportError: # pragma: no cover - _websocket_mask = _websocket_mask_python - -_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) - - -_WS_EXT_RE: Final[Pattern[str]] = re.compile( - r"^(?:;\s*(?:" - r"(server_no_context_takeover)|" - r"(client_no_context_takeover)|" - r"(server_max_window_bits(?:=(\d+))?)|" - r"(client_max_window_bits(?:=(\d+))?)))*$" -) - -_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") - - -def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: - if not extstr: - return 0, False - - compress = 0 - notakeover = False - for ext in _WS_EXT_RE_SPLIT.finditer(extstr): - defext = ext.group(1) - # Return compress = 15 when get `permessage-deflate` - if not defext: - compress = 15 - break - match = _WS_EXT_RE.match(defext) - if match: - compress = 15 - if isserver: - # Server never fail to detect compress handshake. - # Server does not need to send max wbit to client - if match.group(4): - compress = int(match.group(4)) - # Group3 must match if group4 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # CONTINUE to next extension - if compress > 15 or compress < 9: - compress = 0 - continue - if match.group(1): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - else: - if match.group(6): - compress = int(match.group(6)) - # Group5 must match if group6 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # FAIL the parse progress - if compress > 15 or compress < 9: - raise WSHandshakeError("Invalid window size") - if match.group(2): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - # Return Fail if client side and not match - elif not isserver: - raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) - - return compress, notakeover - - -def ws_ext_gen( - compress: int = 15, isserver: bool = False, server_notakeover: bool = False -) -> str: - # client_notakeover=False not used for server - # compress wbit 8 does not support in zlib - if compress < 9 or compress > 15: - raise ValueError( - "Compress wbits must between 9 and 15, zlib does not support wbits=8" - ) - enabledext = ["permessage-deflate"] - if not isserver: - enabledext.append("client_max_window_bits") - - if compress < 15: - enabledext.append("server_max_window_bits=" + str(compress)) - if server_notakeover: - enabledext.append("server_no_context_takeover") - # if client_notakeover: - # enabledext.append('client_no_context_takeover') - return "; ".join(enabledext) - - -class WSParserState(IntEnum): - READ_HEADER = 1 - READ_PAYLOAD_LENGTH = 2 - READ_PAYLOAD_MASK = 3 - READ_PAYLOAD = 4 - - -class WebSocketReader: - def __init__( - self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True - ) -> None: - self.queue = queue - self._max_msg_size = max_msg_size - - self._exc: Optional[BaseException] = None - self._partial = bytearray() - self._state = WSParserState.READ_HEADER - - self._opcode: Optional[int] = None - self._frame_fin = False - self._frame_opcode: Optional[int] = None - self._frame_payload = bytearray() - - self._tail: bytes = b"" - self._has_mask = False - self._frame_mask: Optional[bytes] = None - self._payload_length = 0 - self._payload_length_flag = 0 - self._compressed: Optional[bool] = None - self._decompressobj: Optional[ZLibDecompressor] = None - self._compress = compress - - def feed_eof(self) -> None: - self.queue.feed_eof() - - def feed_data(self, data: bytes) -> Tuple[bool, bytes]: - if self._exc: - return True, data - - try: - self._feed_data(data) - except Exception as exc: - self._exc = exc - set_exception(self.queue, exc) - return True, b"" - - return False, b"" - - def _feed_data(self, data: bytes) -> None: - for fin, opcode, payload, compressed in self.parse_frame(data): - if opcode in MESSAGE_TYPES_WITH_CONTENT: - # load text/binary - is_continuation = opcode == WSMsgType.CONTINUATION - if not fin: - # got partial frame payload - if not is_continuation: - self._opcode = opcode - self._partial += payload - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - continue - - has_partial = bool(self._partial) - if is_continuation: - if self._opcode is None: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) - opcode = self._opcode - self._opcode = None - # previous frame was non finished - # we should get continuation opcode - elif has_partial: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if has_partial: - assembled_payload = self._partial + payload - self._partial.clear() - else: - assembled_payload = payload - - if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(assembled_payload), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - if not self._decompressobj: - self._decompressobj = ZLibDecompressor( - suppress_deflate_header=True - ) - payload_merged = self._decompressobj.decompress_sync( - assembled_payload + _WS_DEFLATE_TRAILING, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(assembled_payload) - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # tuple.__new__ is used to avoid the overhead of the lambda - msg = tuple.__new__(WSMessage, (WSMsgType.TEXT, text, "")) - self.queue.feed_data(msg, len(text)) - continue - - # tuple.__new__ is used to avoid the overhead of the lambda - msg = tuple.__new__(WSMessage, (WSMsgType.BINARY, payload_merged, "")) - self.queue.feed_data(msg, len(payload_merged)) - elif opcode == WSMsgType.CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = tuple.__new__( - WSMessage, (WSMsgType.CLOSE, close_code, close_message) - ) - elif payload: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", - ) - else: - msg = tuple.__new__(WSMessage, (WSMsgType.CLOSE, 0, "")) - - self.queue.feed_data(msg, 0) - - elif opcode == WSMsgType.PING: - msg = tuple.__new__(WSMessage, (WSMsgType.PING, payload, "")) - self.queue.feed_data(msg, len(payload)) - - elif opcode == WSMsgType.PONG: - msg = tuple.__new__(WSMessage, (WSMsgType.PONG, payload, "")) - self.queue.feed_data(msg, len(payload)) - - else: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" - ) - - def parse_frame( - self, buf: bytes - ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: - """Return the next frame from the socket.""" - frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] - if self._tail: - buf, self._tail = self._tail + buf, b"" - - start_pos: int = 0 - buf_length = len(buf) - - while True: - # read header - if self._state is WSParserState.READ_HEADER: - if buf_length - start_pos < 2: - break - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) - - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F - - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be larger than 125 bytes", - ) - - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - - # read payload length - if self._state is WSParserState.READ_PAYLOAD_LENGTH: - length_flag = self._payload_length_flag - if length_flag == 126: - if buf_length - start_pos < 2: - break - data = buf[start_pos : start_pos + 2] - start_pos += 2 - self._payload_length = UNPACK_LEN2(data)[0] - elif length_flag > 126: - if buf_length - start_pos < 8: - break - data = buf[start_pos : start_pos + 8] - start_pos += 8 - self._payload_length = UNPACK_LEN3(data)[0] - else: - self._payload_length = length_flag - - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - - # read payload mask - if self._state is WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos < 4: - break - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - - if self._state is WSParserState.READ_PAYLOAD: - length = self._payload_length - payload = self._frame_payload - - chunk_len = buf_length - start_pos - if length >= chunk_len: - self._payload_length = length - chunk_len - payload += buf[start_pos:] - start_pos = buf_length - else: - self._payload_length = 0 - payload += buf[start_pos : start_pos + length] - start_pos = start_pos + length - - if self._payload_length != 0: - break - - if self._has_mask: - assert self._frame_mask is not None - _websocket_mask(self._frame_mask, payload) - - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - - self._tail = buf[start_pos:] - - return frames - - -class WebSocketWriter: - def __init__( - self, - protocol: BaseProtocol, - transport: asyncio.Transport, - *, - use_mask: bool = False, - limit: int = DEFAULT_LIMIT, - random: random.Random = random.Random(), - compress: int = 0, - notakeover: bool = False, - ) -> None: - self.protocol = protocol - self.transport = transport - self.use_mask = use_mask - self.get_random_bits = partial(random.getrandbits, 32) - self.compress = compress - self.notakeover = notakeover - self._closing = False - self._limit = limit - self._output_size = 0 - self._compressobj: Any = None # actually compressobj - - async def send_frame( - self, message: bytes, opcode: int, compress: Optional[int] = None - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if self._closing and not (opcode & WSMsgType.CLOSE): - raise ClientConnectionResetError("Cannot write to closing transport") - - # RSV are the reserved bits in the frame header. They are used to - # indicate that the frame is using an extension. - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 - rsv = 0 - # Only compress larger packets (disabled) - # Does small packet needs to be compressed? - # if self.compress and opcode < 8 and len(message) > 124: - if (compress or self.compress) and opcode < 8: - # RSV1 (rsv = 0x40) is set for compressed frames - # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 - rsv = 0x40 - - if compress: - # Do not set self._compress if compressing is for this frame - compressobj = self._make_compress_obj(compress) - else: # self.compress - if not self._compressobj: - self._compressobj = self._make_compress_obj(self.compress) - compressobj = self._compressobj - - message = await compressobj.compress(message) - # Its critical that we do not return control to the event - # loop until we have finished sending all the compressed - # data. Otherwise we could end up mixing compressed frames - # if there are multiple coroutines compressing data. - message += compressobj.flush( - zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH - ) - if message.endswith(_WS_DEFLATE_TRAILING): - message = message[:-4] - - msg_length = len(message) - - use_mask = self.use_mask - mask_bit = 0x80 if use_mask else 0 - - # Depending on the message length, the header is assembled differently. - # The first byte is reserved for the opcode and the RSV bits. - first_byte = 0x80 | rsv | opcode - if msg_length < 126: - header = PACK_LEN1(first_byte, msg_length | mask_bit) - header_len = 2 - elif msg_length < (1 << 16): - header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) - header_len = 4 - else: - header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) - header_len = 10 - - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 - # If we are using a mask, we need to generate it randomly - # and apply it to the message before sending it. A mask is - # a 32-bit value that is applied to the message using a - # bitwise XOR operation. It is used to prevent certain types - # of attacks on the websocket protocol. The mask is only used - # when aiohttp is acting as a client. Servers do not use a mask. - if use_mask: - mask = PACK_RANDBITS(self.get_random_bits()) - message = bytearray(message) - _websocket_mask(mask, message) - self._write(header + mask + message) - self._output_size += header_len + MASK_LEN + msg_length - - else: - if msg_length > MSG_SIZE: - self._write(header) - self._write(message) - else: - self._write(header + message) - - self._output_size += header_len + msg_length - - # It is safe to return control to the event loop when using compression - # after this point as we have already sent or buffered all the data. - - # Once we have written output_size up to the limit, we call the - # drain helper which waits for the transport to be ready to accept - # more data. This is a flow control mechanism to prevent the buffer - # from growing too large. The drain helper will return right away - # if the writer is not paused. - if self._output_size > self._limit: - self._output_size = 0 - await self.protocol._drain_helper() - - def _make_compress_obj(self, compress: int) -> ZLibCompressor: - return ZLibCompressor( - level=zlib.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) - - def _write(self, data: bytes) -> None: - if self.transport is None or self.transport.is_closing(): - raise ClientConnectionResetError("Cannot write to closing transport") - self.transport.write(data) - - async def pong(self, message: Union[bytes, str] = b"") -> None: - """Send pong message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self.send_frame(message, WSMsgType.PONG) - - async def ping(self, message: Union[bytes, str] = b"") -> None: - """Send ping message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self.send_frame(message, WSMsgType.PING) - - async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: - """Close the websocket, sending the specified code and message.""" - if isinstance(message, str): - message = message.encode("utf-8") - try: - await self.send_frame( - PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE - ) - finally: - self._closing = True diff --git a/setup.py b/setup.py index 3a90ae2e20a..590632cee7b 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ # NOTE: makefile cythonizes all Cython modules extensions = [ - Extension("aiohttp._websocket", ["aiohttp/_websocket.c"]), + Extension("aiohttp._websocket.mask", ["aiohttp/_websocket/mask.c"]), Extension( "aiohttp._http_parser", [ diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 13b46803a76..8931789ff47 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -7,17 +7,17 @@ import pytest import aiohttp -from aiohttp import http_websocket -from aiohttp.http import WebSocketError, WSCloseCode, WSMessage, WSMsgType -from aiohttp.http_websocket import ( - _WS_DEFLATE_TRAILING, +from aiohttp._websocket import helpers as _websocket_helpers +from aiohttp._websocket.helpers import ( PACK_CLOSE_CODE, PACK_LEN1, PACK_LEN2, PACK_LEN3, - WebSocketReader, - _websocket_mask, + websocket_mask, ) +from aiohttp._websocket.models import WS_DEFLATE_TRAILING +from aiohttp.http import WebSocketError, WSCloseCode, WSMessage, WSMsgType +from aiohttp.http_websocket import WebSocketReader def build_frame( @@ -28,7 +28,7 @@ def build_frame( compressobj = zlib.compressobj(wbits=-9) message = compressobj.compress(message) message = message + compressobj.flush(zlib.Z_SYNC_FLUSH) - if message.endswith(_WS_DEFLATE_TRAILING): + if message.endswith(WS_DEFLATE_TRAILING): message = message[:-4] msg_length = len(message) if use_mask: # pragma: no cover @@ -55,7 +55,7 @@ def build_frame( mask = random.randrange(0, 0xFFFFFFFF) mask = mask.to_bytes(4, "big") message = bytearray(message) - _websocket_mask(mask, message) + websocket_mask(mask, message) if noheader: return message else: @@ -388,31 +388,31 @@ def test_continuation_with_close_empty(out, parser) -> None: def test_websocket_mask_python() -> None: message = bytearray(websocket_mask_data) - http_websocket._websocket_mask_python(websocket_mask_mask, message) + _websocket_helpers._websocket_mask_python(websocket_mask_mask, message) assert message == websocket_mask_masked @pytest.mark.skipif( - not hasattr(http_websocket, "_websocket_mask_cython"), reason="Requires Cython" + not hasattr(_websocket_helpers, "_websocket_mask_cython"), reason="Requires Cython" ) def test_websocket_mask_cython() -> None: message = bytearray(websocket_mask_data) - http_websocket._websocket_mask_cython(websocket_mask_mask, message) + _websocket_helpers._websocket_mask_cython(websocket_mask_mask, message) # type: ignore[attr-defined] assert message == websocket_mask_masked def test_websocket_mask_python_empty() -> None: message = bytearray() - http_websocket._websocket_mask_python(websocket_mask_mask, message) + _websocket_helpers._websocket_mask_python(websocket_mask_mask, message) assert message == bytearray() @pytest.mark.skipif( - not hasattr(http_websocket, "_websocket_mask_cython"), reason="Requires Cython" + not hasattr(_websocket_helpers, "_websocket_mask_cython"), reason="Requires Cython" ) def test_websocket_mask_cython_empty() -> None: message = bytearray() - http_websocket._websocket_mask_cython(websocket_mask_mask, message) + _websocket_helpers._websocket_mask_cython(websocket_mask_mask, message) # type: ignore[attr-defined] assert message == bytearray() diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index e1766b72b12..0dc5f073497 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -141,7 +141,7 @@ async def test_concurrent_messages( and in the executor """ with mock.patch( - "aiohttp.http_websocket.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", max_sync_chunk_size + "aiohttp._websocket.writer.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", max_sync_chunk_size ): writer = WebSocketWriter(protocol, transport, compress=15) queue: DataQueue[WSMessage] = DataQueue(asyncio.get_running_loop()) From 765a5c23e66e785fb06cbd144083a1034cdf73d5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 27 Oct 2024 13:59:05 -1000 Subject: [PATCH 0764/1511] [PR #9543/6ae2f05 backport][3.11] Add a PXD only Cython extension for the WebSocket reader to improve performance (#9548) --- CHANGES/9543.feature.rst | 1 + Makefile | 8 +- aiohttp/_websocket/mask.pxd | 3 + aiohttp/_websocket/mask.pyx | 10 +- aiohttp/_websocket/reader.py | 349 ++---------------------------- aiohttp/_websocket/reader_c.pxd | 85 ++++++++ aiohttp/_websocket/reader_c.py | 1 + aiohttp/_websocket/reader_py.py | 364 ++++++++++++++++++++++++++++++++ setup.py | 1 + tests/test_websocket_parser.py | 48 ++++- 10 files changed, 515 insertions(+), 355 deletions(-) create mode 100644 CHANGES/9543.feature.rst create mode 100644 aiohttp/_websocket/mask.pxd create mode 100644 aiohttp/_websocket/reader_c.pxd create mode 120000 aiohttp/_websocket/reader_c.py create mode 100644 aiohttp/_websocket/reader_py.py diff --git a/CHANGES/9543.feature.rst b/CHANGES/9543.feature.rst new file mode 100644 index 00000000000..ee624ddc48d --- /dev/null +++ b/CHANGES/9543.feature.rst @@ -0,0 +1 @@ +Improved performance of reading WebSocket messages with a Cython implementation -- by :user:`bdraco`. diff --git a/Makefile b/Makefile index be217d1d554..2fae48f2635 100644 --- a/Makefile +++ b/Makefile @@ -56,6 +56,11 @@ endif aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py) ./tools/gen.py +# Special case for reader since we want to be able to disable +# the extension with AIOHTTP_NO_EXTENSIONS +aiohttp/_websocket/reader_c.c: aiohttp/_websocket/reader_c.py + cython -3 -o $@ $< -I aiohttp -Werror + # _find_headers generator creates _headers.pyi as well aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cython -3 -o $@ $< -I aiohttp -Werror @@ -74,7 +79,7 @@ vendor/llhttp/node_modules: vendor/llhttp/package.json generate-llhttp: .llhttp-gen .PHONY: cythonize -cythonize: .install-cython $(PYXS:.pyx=.c) +cythonize: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c .install-deps: .install-cython $(PYXS:.pyx=.c) $(call to-hash,$(CYS) $(REQS)) @python -m pip install -r requirements/dev.in -c requirements/dev.txt @@ -152,6 +157,7 @@ clean: @rm -f aiohttp/_http_parser.c @rm -f aiohttp/_http_writer.c @rm -f aiohttp/_websocket.c + @rm -f aiohttp/_websocket/reader_c.c @rm -rf .tox @rm -f .develop @rm -f .flake diff --git a/aiohttp/_websocket/mask.pxd b/aiohttp/_websocket/mask.pxd new file mode 100644 index 00000000000..90983de9ac7 --- /dev/null +++ b/aiohttp/_websocket/mask.pxd @@ -0,0 +1,3 @@ +"""Cython declarations for websocket masking.""" + +cpdef void _websocket_mask_cython(bytes mask, bytearray data) diff --git a/aiohttp/_websocket/mask.pyx b/aiohttp/_websocket/mask.pyx index 94318d2b1be..2d956c88996 100644 --- a/aiohttp/_websocket/mask.pyx +++ b/aiohttp/_websocket/mask.pyx @@ -8,7 +8,7 @@ cdef extern from "Python.h": from libc.stdint cimport uint32_t, uint64_t, uintmax_t -def _websocket_mask_cython(object mask, object data): +cpdef void _websocket_mask_cython(bytes mask, bytearray data): """Note, this function mutates its `data` argument """ cdef: @@ -21,14 +21,6 @@ def _websocket_mask_cython(object mask, object data): assert len(mask) == 4 - if not isinstance(mask, bytes): - mask = bytes(mask) - - if isinstance(data, bytearray): - data = <bytearray>data - else: - data = bytearray(data) - data_len = len(data) in_buf = <unsigned char*>PyByteArray_AsString(data) mask_buf = <const unsigned char*>PyBytes_AsString(mask) diff --git a/aiohttp/_websocket/reader.py b/aiohttp/_websocket/reader.py index 22ae7c1cc0d..254288ac7e7 100644 --- a/aiohttp/_websocket/reader.py +++ b/aiohttp/_websocket/reader.py @@ -1,342 +1,21 @@ """Reader for WebSocket protocol versions 13 and 8.""" -from enum import IntEnum -from typing import Final, List, Optional, Set, Tuple +from typing import TYPE_CHECKING -from ..compression_utils import ZLibDecompressor -from ..helpers import set_exception -from ..streams import DataQueue -from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN2, UNPACK_LEN3, websocket_mask -from .models import ( - WS_DEFLATE_TRAILING, - WebSocketError, - WSCloseCode, - WSMessage, - WSMsgType, -) +from ..helpers import NO_EXTENSIONS -MESSAGE_TYPES_WITH_CONTENT: Final = frozenset( - { - WSMsgType.BINARY, - WSMsgType.TEXT, - WSMsgType.CONTINUATION, - } -) +if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover + from .reader_py import WebSocketReader as WebSocketReaderPython -ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + WebSocketReader = WebSocketReaderPython +else: + try: + from .reader_c import ( # type: ignore[import-not-found] + WebSocketReader as WebSocketReaderCython, + ) + WebSocketReader = WebSocketReaderCython + except ImportError: # pragma: no cover + from .reader_py import WebSocketReader as WebSocketReaderPython -class WSParserState(IntEnum): - READ_HEADER = 1 - READ_PAYLOAD_LENGTH = 2 - READ_PAYLOAD_MASK = 3 - READ_PAYLOAD = 4 - - -class WebSocketReader: - def __init__( - self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True - ) -> None: - self.queue = queue - self._max_msg_size = max_msg_size - - self._exc: Optional[BaseException] = None - self._partial = bytearray() - self._state = WSParserState.READ_HEADER - - self._opcode: Optional[int] = None - self._frame_fin = False - self._frame_opcode: Optional[int] = None - self._frame_payload = bytearray() - - self._tail: bytes = b"" - self._has_mask = False - self._frame_mask: Optional[bytes] = None - self._payload_length = 0 - self._payload_length_flag = 0 - self._compressed: Optional[bool] = None - self._decompressobj: Optional[ZLibDecompressor] = None - self._compress = compress - - def feed_eof(self) -> None: - self.queue.feed_eof() - - def feed_data(self, data: bytes) -> Tuple[bool, bytes]: - if self._exc: - return True, data - - try: - self._feed_data(data) - except Exception as exc: - self._exc = exc - set_exception(self.queue, exc) - return True, b"" - - return False, b"" - - def _feed_data(self, data: bytes) -> None: - for fin, opcode, payload, compressed in self.parse_frame(data): - if opcode in MESSAGE_TYPES_WITH_CONTENT: - # load text/binary - is_continuation = opcode == WSMsgType.CONTINUATION - if not fin: - # got partial frame payload - if not is_continuation: - self._opcode = opcode - self._partial += payload - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - continue - - has_partial = bool(self._partial) - if is_continuation: - if self._opcode is None: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) - opcode = self._opcode - self._opcode = None - # previous frame was non finished - # we should get continuation opcode - elif has_partial: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if has_partial: - assembled_payload = self._partial + payload - self._partial.clear() - else: - assembled_payload = payload - - if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(assembled_payload), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - if not self._decompressobj: - self._decompressobj = ZLibDecompressor( - suppress_deflate_header=True - ) - payload_merged = self._decompressobj.decompress_sync( - assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(assembled_payload) - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # tuple.__new__ is used to avoid the overhead of the lambda - msg = tuple.__new__(WSMessage, (WSMsgType.TEXT, text, "")) - self.queue.feed_data(msg, len(text)) - continue - - # tuple.__new__ is used to avoid the overhead of the lambda - msg = tuple.__new__(WSMessage, (WSMsgType.BINARY, payload_merged, "")) - self.queue.feed_data(msg, len(payload_merged)) - elif opcode == WSMsgType.CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = tuple.__new__( - WSMessage, (WSMsgType.CLOSE, close_code, close_message) - ) - elif payload: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", - ) - else: - msg = tuple.__new__(WSMessage, (WSMsgType.CLOSE, 0, "")) - - self.queue.feed_data(msg, 0) - - elif opcode == WSMsgType.PING: - msg = tuple.__new__(WSMessage, (WSMsgType.PING, payload, "")) - self.queue.feed_data(msg, len(payload)) - - elif opcode == WSMsgType.PONG: - msg = tuple.__new__(WSMessage, (WSMsgType.PONG, payload, "")) - self.queue.feed_data(msg, len(payload)) - - else: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" - ) - - def parse_frame( - self, buf: bytes - ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: - """Return the next frame from the socket.""" - frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] - if self._tail: - buf, self._tail = self._tail + buf, b"" - - start_pos: int = 0 - buf_length = len(buf) - - while True: - # read header - if self._state is WSParserState.READ_HEADER: - if buf_length - start_pos < 2: - break - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) - - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F - - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be larger than 125 bytes", - ) - - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - - # read payload length - if self._state is WSParserState.READ_PAYLOAD_LENGTH: - length_flag = self._payload_length_flag - if length_flag == 126: - if buf_length - start_pos < 2: - break - data = buf[start_pos : start_pos + 2] - start_pos += 2 - self._payload_length = UNPACK_LEN2(data)[0] - elif length_flag > 126: - if buf_length - start_pos < 8: - break - data = buf[start_pos : start_pos + 8] - start_pos += 8 - self._payload_length = UNPACK_LEN3(data)[0] - else: - self._payload_length = length_flag - - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - - # read payload mask - if self._state is WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos < 4: - break - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - - if self._state is WSParserState.READ_PAYLOAD: - length = self._payload_length - payload = self._frame_payload - - chunk_len = buf_length - start_pos - if length >= chunk_len: - self._payload_length = length - chunk_len - payload += buf[start_pos:] - start_pos = buf_length - else: - self._payload_length = 0 - payload += buf[start_pos : start_pos + length] - start_pos = start_pos + length - - if self._payload_length != 0: - break - - if self._has_mask: - assert self._frame_mask is not None - websocket_mask(self._frame_mask, payload) - - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - - self._tail = buf[start_pos:] - - return frames + WebSocketReader = WebSocketReaderPython diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd new file mode 100644 index 00000000000..61ad7384fc1 --- /dev/null +++ b/aiohttp/_websocket/reader_c.pxd @@ -0,0 +1,85 @@ +import cython + +from .mask cimport _websocket_mask_cython as websocket_mask + + +cdef unsigned int READ_HEADER +cdef unsigned int READ_PAYLOAD_LENGTH +cdef unsigned int READ_PAYLOAD_MASK +cdef unsigned int READ_PAYLOAD + +cdef unsigned int OP_CODE_CONTINUATION +cdef unsigned int OP_CODE_TEXT +cdef unsigned int OP_CODE_BINARY +cdef unsigned int OP_CODE_CLOSE +cdef unsigned int OP_CODE_PING +cdef unsigned int OP_CODE_PONG + +cdef object UNPACK_LEN2 +cdef object UNPACK_LEN3 +cdef object UNPACK_CLOSE_CODE +cdef object TUPLE_NEW + +cdef object WSMsgType +cdef object WSMessage + +cdef object WS_MSG_TYPE_TEXT +cdef object WS_MSG_TYPE_BINARY + +cdef set ALLOWED_CLOSE_CODES +cdef set MESSAGE_TYPES_WITH_CONTENT + +cdef tuple EMPTY_FRAME +cdef tuple EMPTY_FRAME_ERROR + + +cdef class WebSocketReader: + + cdef object queue + cdef object _queue_feed_data + cdef unsigned int _max_msg_size + + cdef Exception _exc + cdef bytearray _partial + cdef unsigned int _state + + cdef object _opcode + cdef object _frame_fin + cdef object _frame_opcode + cdef bytearray _frame_payload + + cdef bytes _tail + cdef bint _has_mask + cdef bytes _frame_mask + cdef unsigned int _payload_length + cdef unsigned int _payload_length_flag + cdef object _compressed + cdef object _decompressobj + cdef bint _compress + + cpdef tuple feed_data(self, object data) + + @cython.locals( + is_continuation=bint, + fin=bint, + has_partial=bint, + payload_merged=bytes, + opcode="unsigned int", + ) + cpdef void _feed_data(self, bytes data) + + @cython.locals( + start_pos="unsigned int", + buf_len="unsigned int", + length="unsigned int", + chunk_size="unsigned int", + chunk_len="unsigned int", + buf_length="unsigned int", + data=bytes, + payload=bytearray, + first_byte=char, + second_byte=char, + has_mask=bint, + fin=bint, + ) + cpdef list parse_frame(self, bytes buf) diff --git a/aiohttp/_websocket/reader_c.py b/aiohttp/_websocket/reader_c.py new file mode 120000 index 00000000000..083cbb4331f --- /dev/null +++ b/aiohttp/_websocket/reader_c.py @@ -0,0 +1 @@ +reader_py.py \ No newline at end of file diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py new file mode 100644 index 00000000000..3fd9e868301 --- /dev/null +++ b/aiohttp/_websocket/reader_py.py @@ -0,0 +1,364 @@ +"""Reader for WebSocket protocol versions 13 and 8.""" + +from typing import Final, List, Optional, Set, Tuple, Union + +from ..compression_utils import ZLibDecompressor +from ..helpers import set_exception +from ..streams import DataQueue +from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN2, UNPACK_LEN3, websocket_mask +from .models import ( + WS_DEFLATE_TRAILING, + WebSocketError, + WSCloseCode, + WSMessage, + WSMsgType, +) + +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + +# States for the reader, used to parse the WebSocket frame +# integer values are used so they can be cythonized +READ_HEADER = 1 +READ_PAYLOAD_LENGTH = 2 +READ_PAYLOAD_MASK = 3 +READ_PAYLOAD = 4 + +WS_MSG_TYPE_BINARY = WSMsgType.BINARY +WS_MSG_TYPE_TEXT = WSMsgType.TEXT + +# WSMsgType values unpacked so they can by cythonized to ints +OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value +OP_CODE_TEXT = WSMsgType.TEXT.value +OP_CODE_BINARY = WSMsgType.BINARY.value +OP_CODE_CLOSE = WSMsgType.CLOSE.value +OP_CODE_PING = WSMsgType.PING.value +OP_CODE_PONG = WSMsgType.PONG.value + +EMPTY_FRAME_ERROR = (True, b"") +EMPTY_FRAME = (False, b"") + +TUPLE_NEW = tuple.__new__ + + +class WebSocketReader: + def __init__( + self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + ) -> None: + self.queue = queue + self._queue_feed_data = queue.feed_data + self._max_msg_size = max_msg_size + + self._exc: Optional[Exception] = None + self._partial = bytearray() + self._state = READ_HEADER + + self._opcode: Optional[int] = None + self._frame_fin = False + self._frame_opcode: Optional[int] = None + self._frame_payload = bytearray() + + self._tail: bytes = b"" + self._has_mask = False + self._frame_mask: Optional[bytes] = None + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed: Optional[bool] = None + self._decompressobj: Optional[ZLibDecompressor] = None + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + # data can be bytearray on Windows because proactor event loop uses bytearray + # and asyncio types this to Union[bytes, bytearray, memoryview] so we need + # coerce data to bytes if it is not + def feed_data( + self, data: Union[bytes, bytearray, memoryview] + ) -> Tuple[bool, bytes]: + if type(data) is not bytes: + data = bytes(data) + + if self._exc is not None: + return True, data + + try: + self._feed_data(data) + except Exception as exc: + self._exc = exc + set_exception(self.queue, exc) + return EMPTY_FRAME_ERROR + + return EMPTY_FRAME + + def _feed_data(self, data: bytes) -> None: + msg: WSMessage + for frame in self.parse_frame(data): + fin = frame[0] + opcode = frame[1] + payload = frame[2] + compressed = frame[3] + + is_continuation = opcode == OP_CODE_CONTINUATION + if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation: + # load text/binary + if not fin: + # got partial frame payload + if not is_continuation: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + continue + + has_partial = bool(self._partial) + if is_continuation: + if self._opcode is None: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + opcode = self._opcode + self._opcode = None + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload + + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(assembled_payload), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor( + suppress_deflate_header=True + ) + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(assembled_payload) + + if opcode == OP_CODE_TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + # XXX: The Text and Binary messages here can be a performance + # bottleneck, so we use tuple.__new__ to improve performance. + # This is not type safe, but many tests should fail in + # test_client_ws_functional.py if this is wrong. + self._queue_feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), len(text) + ) + else: + self._queue_feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), + len(payload_merged), + ) + elif opcode == OP_CODE_CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = TUPLE_NEW( + WSMessage, (WSMsgType.CLOSE, close_code, close_message) + ) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", + ) + else: + msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) + + self._queue_feed_data(msg, 0) + elif opcode == OP_CODE_PING: + msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) + self._queue_feed_data(msg, len(payload)) + + elif opcode == OP_CODE_PONG: + msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) + self._queue_feed_data(msg, len(payload)) + + else: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) + + def parse_frame( + self, buf: bytes + ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + """Return the next frame from the socket.""" + frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] + if self._tail: + buf, self._tail = self._tail + buf, b"" + + start_pos: int = 0 + buf_length = len(buf) + + while True: + # read header + if self._state == READ_HEADER: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte = data[0] + second_byte = data[1] + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be larger than 125 bytes", + ) + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = READ_PAYLOAD_LENGTH + + # read payload length + if self._state == READ_PAYLOAD_LENGTH: + length_flag = self._payload_length_flag + if length_flag == 126: + if buf_length - start_pos < 2: + break + data = buf[start_pos : start_pos + 2] + start_pos += 2 + self._payload_length = UNPACK_LEN2(data)[0] + elif length_flag > 126: + if buf_length - start_pos < 8: + break + data = buf[start_pos : start_pos + 8] + start_pos += 8 + self._payload_length = UNPACK_LEN3(data)[0] + else: + self._payload_length = length_flag + + self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD + + # read payload mask + if self._state == READ_PAYLOAD_MASK: + if buf_length - start_pos < 4: + break + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = READ_PAYLOAD + + if self._state == READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload += buf[start_pos:] + start_pos = buf_length + else: + self._payload_length = 0 + payload += buf[start_pos : start_pos + length] + start_pos = start_pos + length + + if self._payload_length != 0: + break + + if self._has_mask: + assert self._frame_mask is not None + websocket_mask(self._frame_mask, payload) + + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + self._frame_payload = bytearray() + self._state = READ_HEADER + + self._tail = buf[start_pos:] + + return frames diff --git a/setup.py b/setup.py index 590632cee7b..2f024e87ef2 100644 --- a/setup.py +++ b/setup.py @@ -41,6 +41,7 @@ include_dirs=["vendor/llhttp/build"], ), Extension("aiohttp._http_writer", ["aiohttp/_http_writer.c"]), + Extension("aiohttp._websocket.reader_c", ["aiohttp/_websocket/reader_c.c"]), ] diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 8931789ff47..5b52b6568fc 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -1,7 +1,9 @@ +import asyncio import pickle import random import struct import zlib +from typing import Union from unittest import mock import pytest @@ -20,6 +22,10 @@ from aiohttp.http_websocket import WebSocketReader +class PatchableWebSocketReader(WebSocketReader): + """WebSocketReader subclass that allows for patching parse_frame.""" + + def build_frame( message, opcode, use_mask=False, noheader=False, is_fin=True, compress=False ): @@ -82,8 +88,19 @@ def out(loop): @pytest.fixture() -def parser(out): - return WebSocketReader(out, 4 * 1024 * 1024) +def parser(out: aiohttp.DataQueue[WSMessage]) -> PatchableWebSocketReader: + return PatchableWebSocketReader(out, 4 * 1024 * 1024) + + +def test_feed_data_remembers_exception(parser: WebSocketReader) -> None: + """Verify that feed_data remembers an exception was already raised internally.""" + error, data = parser.feed_data(struct.pack("!BB", 0b01100000, 0b00000000)) + assert error is True + assert data == b"" + + error, data = parser.feed_data(b"") + assert error is True + assert data == b"" def test_parse_frame(parser) -> None: @@ -151,13 +168,24 @@ def test_parse_frame_header_payload_size(out, parser) -> None: raise out.exception() -def test_ping_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.PING, b"data", False)] +# Protractor event loop will call feed_data with bytearray. Since +# asyncio technically supports memoryview as well, we should test that. +@pytest.mark.parametrize( + argnames="data", + argvalues=[b"", bytearray(b""), memoryview(b"")], + ids=["bytes", "bytearray", "memoryview"], +) +def test_ping_frame( + out: aiohttp.DataQueue[WSMessage], + parser: WebSocketReader, + data: Union[bytes, bytearray, memoryview], +) -> None: + with mock.patch.object(parser, "parse_frame", autospec=True) as m: + m.return_value = [(1, WSMsgType.PING, b"data", False)] - parser.feed_data(b"") - res = out._buffer[0] - assert res == ((WSMsgType.PING, b"data", ""), 4) + parser.feed_data(data) + res = out._buffer[0] + assert res == ((WSMsgType.PING, b"data", ""), 4) def test_pong_frame(out, parser) -> None: @@ -465,8 +493,8 @@ def test_parse_compress_error_frame(parser) -> None: assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -def test_parse_no_compress_frame_single() -> None: - parser_no_compress = WebSocketReader(out, 0, compress=False) +async def test_parse_no_compress_frame_single(loop: asyncio.AbstractEventLoop) -> None: + parser_no_compress = WebSocketReader(aiohttp.DataQueue(loop), 0, compress=False) with pytest.raises(WebSocketError) as ctx: parser_no_compress.parse_frame(struct.pack("!BB", 0b11000001, 0b00000001)) parser_no_compress.parse_frame(b"1") From c1f8ca41906da56fccf14647276f7a8b8b3e0b40 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 27 Oct 2024 16:39:00 -1000 Subject: [PATCH 0765/1511] [PR #9549/be5fed2 backport][3.10] Simplify WebSocket writer (#9550) --- aiohttp/http_websocket.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 3c399351d78..c9a6d49176e 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -675,13 +675,16 @@ async def _send_frame( if msg_length < 126: header = PACK_LEN1(first_byte, msg_length | mask_bit) header_len = 2 - elif msg_length < (1 << 16): + elif msg_length < 65536: header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) header_len = 4 else: header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) header_len = 10 + if self.transport.is_closing(): + raise ClientConnectionResetError("Cannot write to closing transport") + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 # If we are using a mask, we need to generate it randomly # and apply it to the message before sending it. A mask is @@ -693,17 +696,15 @@ async def _send_frame( mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) - self._write(header + mask + message) - self._output_size += header_len + MASK_LEN + msg_length - + self.transport.write(header + mask + message) + self._output_size += MASK_LEN + elif msg_length > MSG_SIZE: + self.transport.write(header) + self.transport.write(message) else: - if msg_length > MSG_SIZE: - self._write(header) - self._write(message) - else: - self._write(header + message) + self.transport.write(header + message) - self._output_size += header_len + msg_length + self._output_size += header_len + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. @@ -724,11 +725,6 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, ) - def _write(self, data: bytes) -> None: - if self.transport is None or self.transport.is_closing(): - raise ClientConnectionResetError("Cannot write to closing transport") - self.transport.write(data) - async def pong(self, message: Union[bytes, str] = b"") -> None: """Send pong message.""" if isinstance(message, str): From 5ac23e2437955188f5eeb11306fb099fb65a051e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 27 Oct 2024 16:46:42 -1000 Subject: [PATCH 0766/1511] [PR #9549/be5fed2 backport][3.11] Simplify WebSocket writer (#9551) --- aiohttp/_websocket/writer.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index 04fb22ea3d4..0b11af97dfb 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -105,13 +105,16 @@ async def send_frame( if msg_length < 126: header = PACK_LEN1(first_byte, msg_length | mask_bit) header_len = 2 - elif msg_length < (1 << 16): + elif msg_length < 65536: header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) header_len = 4 else: header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) header_len = 10 + if self.transport.is_closing(): + raise ClientConnectionResetError("Cannot write to closing transport") + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 # If we are using a mask, we need to generate it randomly # and apply it to the message before sending it. A mask is @@ -123,17 +126,15 @@ async def send_frame( mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) websocket_mask(mask, message) - self._write(header + mask + message) - self._output_size += header_len + MASK_LEN + msg_length - + self.transport.write(header + mask + message) + self._output_size += MASK_LEN + elif msg_length > MSG_SIZE: + self.transport.write(header) + self.transport.write(message) else: - if msg_length > MSG_SIZE: - self._write(header) - self._write(message) - else: - self._write(header + message) + self.transport.write(header + message) - self._output_size += header_len + msg_length + self._output_size += header_len + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. @@ -154,11 +155,6 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, ) - def _write(self, data: bytes) -> None: - if self.transport is None or self.transport.is_closing(): - raise ClientConnectionResetError("Cannot write to closing transport") - self.transport.write(data) - async def pong(self, message: Union[bytes, str] = b"") -> None: """Send pong message.""" if isinstance(message, str): From ecb6490b5cc0dc577b9ff0e16507d70729cd8e1a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 04:33:06 +0000 Subject: [PATCH 0767/1511] [PR #9552/10f07ea7 backport][3.11] Add `aiohttp._websocket` to `packages` in `setup.cfg` (#9553) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9552.packaging.rst | 1 + setup.cfg | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 120000 CHANGES/9552.packaging.rst diff --git a/CHANGES/9552.packaging.rst b/CHANGES/9552.packaging.rst new file mode 120000 index 00000000000..c9dc8a14683 --- /dev/null +++ b/CHANGES/9552.packaging.rst @@ -0,0 +1 @@ +9542.packaging.rst \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index a26d472b22a..6408dde39bd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -43,7 +43,9 @@ classifiers = [options] python_requires = >=3.9 -packages = aiohttp +packages = + aiohttp + aiohttp._websocket # https://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag zip_safe = False include_package_data = True From b535981754a4d416012cbafd52f32230c9577ac1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 08:00:15 +0000 Subject: [PATCH 0768/1511] [PR #9554/ec19bfa7 backport][3.11] Optimize WebSocketReader for 2 byte length case (#9555) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9554.feature.rst | 1 + aiohttp/_websocket/helpers.py | 1 - aiohttp/_websocket/reader_c.pxd | 1 - aiohttp/_websocket/reader_py.py | 7 ++++--- 4 files changed, 5 insertions(+), 5 deletions(-) create mode 120000 CHANGES/9554.feature.rst diff --git a/CHANGES/9554.feature.rst b/CHANGES/9554.feature.rst new file mode 120000 index 00000000000..a93584bccd8 --- /dev/null +++ b/CHANGES/9554.feature.rst @@ -0,0 +1 @@ +9543.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/helpers.py b/aiohttp/_websocket/helpers.py index 41273dd3230..2f20dd04e01 100644 --- a/aiohttp/_websocket/helpers.py +++ b/aiohttp/_websocket/helpers.py @@ -8,7 +8,6 @@ from ..helpers import NO_EXTENSIONS from .models import WSHandshakeError -UNPACK_LEN2 = Struct("!H").unpack_from UNPACK_LEN3 = Struct("!Q").unpack_from UNPACK_CLOSE_CODE = Struct("!H").unpack PACK_LEN1 = Struct("!BB").pack diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 61ad7384fc1..a909eba84c5 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -15,7 +15,6 @@ cdef unsigned int OP_CODE_CLOSE cdef unsigned int OP_CODE_PING cdef unsigned int OP_CODE_PONG -cdef object UNPACK_LEN2 cdef object UNPACK_LEN3 cdef object UNPACK_CLOSE_CODE cdef object TUPLE_NEW diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 3fd9e868301..b4b57195a96 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -5,7 +5,7 @@ from ..compression_utils import ZLibDecompressor from ..helpers import set_exception from ..streams import DataQueue -from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN2, UNPACK_LEN3, websocket_mask +from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask from .models import ( WS_DEFLATE_TRAILING, WebSocketError, @@ -310,9 +310,10 @@ def parse_frame( if length_flag == 126: if buf_length - start_pos < 2: break - data = buf[start_pos : start_pos + 2] + first_byte = buf[start_pos] + second_byte = buf[start_pos + 1] start_pos += 2 - self._payload_length = UNPACK_LEN2(data)[0] + self._payload_length = first_byte << 8 | second_byte elif length_flag > 126: if buf_length - start_pos < 8: break From 6a1b08696ff8195a75e9ce23c3462b0862342447 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 27 Oct 2024 23:20:28 -1000 Subject: [PATCH 0769/1511] [PR #9556/df0c461 backport][3.11] Fix Cython `WebSocketReader` byte signedness (#9557) --- CHANGES/9556.feature.rst | 1 + aiohttp/_websocket/reader_c.pxd | 4 ++-- tests/test_websocket_parser.py | 13 ++++++++++++- 3 files changed, 15 insertions(+), 3 deletions(-) create mode 120000 CHANGES/9556.feature.rst diff --git a/CHANGES/9556.feature.rst b/CHANGES/9556.feature.rst new file mode 120000 index 00000000000..a93584bccd8 --- /dev/null +++ b/CHANGES/9556.feature.rst @@ -0,0 +1 @@ +9543.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index a909eba84c5..74eb07073ec 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -76,8 +76,8 @@ cdef class WebSocketReader: buf_length="unsigned int", data=bytes, payload=bytearray, - first_byte=char, - second_byte=char, + first_byte="unsigned char", + second_byte="unsigned char", has_mask=bint, fin=bint, ) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 5b52b6568fc..4de86fe1aa1 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -128,7 +128,18 @@ def test_parse_frame_length2(parser) -> None: assert (0, 1, b"1234", False) == (fin, opcode, payload, not not compress) -def test_parse_frame_length4(parser) -> None: +def test_parse_frame_length2_multi_byte(parser: WebSocketReader) -> None: + """Ensure a multi-byte length is parsed correctly.""" + expected_payload = b"1" * 32768 + parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) + parser.parse_frame(struct.pack("!H", 32768)) + res = parser.parse_frame(b"1" * 32768) + fin, opcode, payload, compress = res[0] + + assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) + + +def test_parse_frame_length4(parser: WebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 127)) parser.parse_frame(struct.pack("!Q", 4)) fin, opcode, payload, compress = parser.parse_frame(b"1234")[0] From 1417252e9d86b26832be941d898111f42dfd86a7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:53:36 +0000 Subject: [PATCH 0770/1511] [PR #9559/50656ca0 backport][3.11] Add benchmarks for sending masked WebSocket messages (#9561) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_websocket.py | 45 ++++++++++++++++++------- 1 file changed, 32 insertions(+), 13 deletions(-) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index a0a80dfd985..8fbeb5f4835 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -17,6 +17,7 @@ def test_read_one_hundred_websocket_text_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: + """Benchmark reading 100 WebSocket text messages.""" queue: DataQueue[WSMessage] = DataQueue(loop=loop) reader = WebSocketReader(queue, max_msg_size=2**16) raw_message = ( @@ -33,25 +34,27 @@ def _run() -> None: feed_data(raw_message) -def test_send_one_hundred_websocket_text_messages( - loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture -) -> None: +class MockTransport(asyncio.Transport): + """Mock transport for testing that do no real I/O.""" - class MockTransport(asyncio.Transport): - """Mock transport for testing that do no real I/O.""" + def is_closing(self) -> bool: + """Swallow is_closing.""" + return False - def is_closing(self) -> bool: - """Swallow is_closing.""" - return False + def write(self, data: bytes) -> None: + """Swallow writes.""" - def write(self, data: bytes) -> None: - """Swallow writes.""" - class MockProtocol(BaseProtocol): +class MockProtocol(BaseProtocol): - async def _drain_helper(self) -> None: - """Swallow drain.""" + async def _drain_helper(self) -> None: + """Swallow drain.""" + +def test_send_one_hundred_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 WebSocket text messages.""" writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport()) raw_message = b"Hello, World!" * 100 @@ -62,3 +65,19 @@ async def _send_one_hundred_websocket_text_messages() -> None: @benchmark def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) + + +def test_send_one_hundred_websocket_text_messages_with_mask( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 masked WebSocket text messages.""" + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport(), use_mask=True) + raw_message = b"Hello, World!" * 100 + + async def _send_one_hundred_websocket_text_messages() -> None: + for _ in range(100): + await writer.send_frame(raw_message, WSMsgType.TEXT) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_text_messages()) From 673deec1369ba249452a054d77aedd68befe63eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:10:06 +0000 Subject: [PATCH 0771/1511] [PR #9559/50656ca0 backport][3.10] Add benchmarks for sending masked WebSocket messages (#9560) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_websocket.py | 45 ++++++++++++++++++------- 1 file changed, 32 insertions(+), 13 deletions(-) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 3502ab4ea73..c48239a8c7a 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -17,6 +17,7 @@ def test_read_one_hundred_websocket_text_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: + """Benchmark reading 100 WebSocket text messages.""" queue: DataQueue[WSMessage] = DataQueue(loop=loop) reader = WebSocketReader(queue, max_msg_size=2**16) raw_message = ( @@ -33,25 +34,27 @@ def _run() -> None: feed_data(raw_message) -def test_send_one_hundred_websocket_text_messages( - loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture -) -> None: +class MockTransport(asyncio.Transport): + """Mock transport for testing that do no real I/O.""" - class MockTransport(asyncio.Transport): - """Mock transport for testing that do no real I/O.""" + def is_closing(self) -> bool: + """Swallow is_closing.""" + return False - def is_closing(self) -> bool: - """Swallow is_closing.""" - return False + def write(self, data: bytes) -> None: + """Swallow writes.""" - def write(self, data: bytes) -> None: - """Swallow writes.""" - class MockProtocol(BaseProtocol): +class MockProtocol(BaseProtocol): - async def _drain_helper(self) -> None: - """Swallow drain.""" + async def _drain_helper(self) -> None: + """Swallow drain.""" + +def test_send_one_hundred_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 WebSocket text messages.""" writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport()) raw_message = b"Hello, World!" * 100 @@ -62,3 +65,19 @@ async def _send_one_hundred_websocket_text_messages() -> None: @benchmark def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) + + +def test_send_one_hundred_websocket_text_messages_with_mask( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 masked WebSocket text messages.""" + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport(), use_mask=True) + raw_message = b"Hello, World!" * 100 + + async def _send_one_hundred_websocket_text_messages() -> None: + for _ in range(100): + await writer._send_frame(raw_message, WSMsgType.TEXT) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_text_messages()) From 1127bcd19ea28054563291b4aeffd3fadadd310c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:10:15 +0000 Subject: [PATCH 0772/1511] [PR #9558/0a706625 backport][3.11] Fix refactoring error from moving WebSocket mask (#9563) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9558.feature.rst | 1 + aiohttp/_websocket/helpers.py | 2 +- tests/test_websocket_parser.py | 3 +++ 3 files changed, 5 insertions(+), 1 deletion(-) create mode 120000 CHANGES/9558.feature.rst diff --git a/CHANGES/9558.feature.rst b/CHANGES/9558.feature.rst new file mode 120000 index 00000000000..e3e7a75e700 --- /dev/null +++ b/CHANGES/9558.feature.rst @@ -0,0 +1 @@ +9554.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/helpers.py b/aiohttp/_websocket/helpers.py index 2f20dd04e01..0bb58df9228 100644 --- a/aiohttp/_websocket/helpers.py +++ b/aiohttp/_websocket/helpers.py @@ -56,7 +56,7 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None: websocket_mask = _websocket_mask_python else: try: - from ._websocket import _websocket_mask_cython # type: ignore[import-not-found] + from .mask import _websocket_mask_cython # type: ignore[import-not-found] websocket_mask = _websocket_mask_cython except ImportError: # pragma: no cover diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 4de86fe1aa1..abddeadf5a1 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -438,6 +438,9 @@ def test_websocket_mask_cython() -> None: message = bytearray(websocket_mask_data) _websocket_helpers._websocket_mask_cython(websocket_mask_mask, message) # type: ignore[attr-defined] assert message == websocket_mask_masked + assert ( + _websocket_helpers.websocket_mask is _websocket_helpers._websocket_mask_cython # type: ignore[attr-defined] + ) def test_websocket_mask_python_empty() -> None: From 153350d09202da3e6076a2ad02395cbbd356a5a9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:19:28 +0000 Subject: [PATCH 0773/1511] Bump pip from 24.2 to 24.3.1 (#9562) Bumps [pip](https://github.com/pypa/pip) from 24.2 to 24.3.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pip/blob/main/NEWS.rst">pip's changelog</a>.</em></p> <blockquote> <h1>24.3.1 (2024-10-27)</h1> <h2>Bug Fixes</h2> <ul> <li>Allow multiple nested inclusions of the same requirements file again. (<code>[#13046](https://github.com/pypa/pip/issues/13046) <https://github.com/pypa/pip/issues/13046></code>_)</li> </ul> <h1>24.3 (2024-10-27)</h1> <h2>Deprecations and Removals</h2> <ul> <li>Deprecate wheel filenames that are not compliant with :pep:<code>440</code>. (<code>[#12918](https://github.com/pypa/pip/issues/12918) <https://github.com/pypa/pip/issues/12918></code>_)</li> </ul> <h2>Features</h2> <ul> <li>Detect recursively referencing requirements files and help users identify the source. (<code>[#12653](https://github.com/pypa/pip/issues/12653) <https://github.com/pypa/pip/issues/12653></code>_)</li> <li>Support for :pep:<code>730</code> iOS wheels. (<code>[#12961](https://github.com/pypa/pip/issues/12961) <https://github.com/pypa/pip/issues/12961></code>_)</li> </ul> <h2>Bug Fixes</h2> <ul> <li>Display a better error message when an already installed package has an invalid requirement. (<code>[#12953](https://github.com/pypa/pip/issues/12953) <https://github.com/pypa/pip/issues/12953></code>_)</li> <li>Ignore <code>PIP_TARGET</code> and <code>pip.conf</code> <code>global.target</code> when preparing a build environment. (<code>[#8438](https://github.com/pypa/pip/issues/8438) <https://github.com/pypa/pip/issues/8438></code>_)</li> <li>Restore support for macOS 10.12 and older (via truststore). (<code>[#12901](https://github.com/pypa/pip/issues/12901) <https://github.com/pypa/pip/issues/12901></code>_)</li> <li>Allow installing pip in editable mode in a virtual environment on Windows. (<code>[#12666](https://github.com/pypa/pip/issues/12666) <https://github.com/pypa/pip/issues/12666></code>_)</li> </ul> <h2>Vendored Libraries</h2> <ul> <li>Upgrade certifi to 2024.8.30</li> <li>Upgrade distlib to 0.3.9</li> <li>Upgrade truststore to 0.10.0</li> <li>Upgrade urllib3 to 1.26.20</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pip/commit/05293b6b55eca86490b7c2944bcc558a56064f0d"><code>05293b6</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/6a5db8b107bb0063c69dc5ccd39dbfef14ca7a32"><code>6a5db8b</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13047">#13047</a> from sbidoul/fix-13046</li> <li><a href="https://github.com/pypa/pip/commit/7be54ced1cca2c850e79e8fbe9ec2b76947b2b6f"><code>7be54ce</code></a> Don't fail when the same req file is included more than once</li> <li><a href="https://github.com/pypa/pip/commit/4f6aeb17ed540e181b9ad1dea8d7b5389effd21b"><code>4f6aeb1</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13044">#13044</a> from sbidoul/release/24.3</li> <li><a href="https://github.com/pypa/pip/commit/e1b1d51fe8d0f4b84b77206173ceb656caa2edeb"><code>e1b1d51</code></a> Bump for development</li> <li><a href="https://github.com/pypa/pip/commit/cdba22f49b425fe4a57a8daf992fd6335c8010a1"><code>cdba22f</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/27f8374e8dd49141bd2397c0e8e8093cf3676ff7"><code>27f8374</code></a> Update AUTHORS.txt</li> <li><a href="https://github.com/pypa/pip/commit/c79d01953357913f421f192f51ffa9bab0a75ba0"><code>c79d019</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13033">#13033</a> from sbidoul/vendoring-24.3-sbi</li> <li><a href="https://github.com/pypa/pip/commit/3ca89215a96f9b05619fc52bb778c19f26b84a9f"><code>3ca8921</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13041">#13041</a> from sethmlarson/truststore-0.10.0</li> <li><a href="https://github.com/pypa/pip/commit/0cc7375ff0a42ddfa19f23f42cb96d6d7c06d29b"><code>0cc7375</code></a> Upgrade vendored truststore to 0.10.0</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pip/compare/24.2...24.3.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=24.2&new-version=24.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 12 ++++++++++-- requirements/dev.txt | 12 ++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f5bce7e64a0..d815484c984 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -36,6 +36,7 @@ cffi==1.17.1 # via # cryptography # pycares + # pytest-codspeed cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.0 @@ -67,7 +68,9 @@ docutils==0.20.1 exceptiongroup==1.2.2 # via pytest filelock==3.16.1 - # via virtualenv + # via + # pytest-codspeed + # virtualenv freezegun==1.5.1 # via # -r requirements/lint.in @@ -168,8 +171,13 @@ pytest==8.3.3 # via # -r requirements/lint.in # -r requirements/test.in + # pytest-codspeed # pytest-cov # pytest-mock +pytest-codspeed==2.2.1 + # via + # -r requirements/lint.in + # -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 @@ -285,7 +293,7 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==24.2 +pip==24.3.1 # via pip-tools setuptools==75.2.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index c9bdcec8db5..4c1e643513a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -36,6 +36,7 @@ cffi==1.17.1 # via # cryptography # pycares + # pytest-codspeed cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.0 @@ -65,7 +66,9 @@ docutils==0.20.1 exceptiongroup==1.2.2 # via pytest filelock==3.16.1 - # via virtualenv + # via + # pytest-codspeed + # virtualenv freezegun==1.5.1 # via # -r requirements/lint.in @@ -163,8 +166,13 @@ pytest==8.3.3 # via # -r requirements/lint.in # -r requirements/test.in + # pytest-codspeed # pytest-cov # pytest-mock +pytest-codspeed==2.2.1 + # via + # -r requirements/lint.in + # -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 @@ -277,7 +285,7 @@ zipp==3.20.2 # importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==24.2 +pip==24.3.1 # via pip-tools setuptools==75.2.0 # via From 0d547b0016a792eacb2cd60f957f2e7796a1f876 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 28 Oct 2024 09:53:36 -1000 Subject: [PATCH 0774/1511] [PR #9443/06b2398 backport][3.10] Fix handling of redirects with authentication (#9570) Co-authored-by: Pierre-Louis Peeters <peetersp@dnb.com> Co-authored-by: Pierre-Louis Peeters <PLPeeters@users.noreply.github.com> --- CHANGES/9436.bugfix.rst | 1 + aiohttp/client.py | 11 ++++-- docs/client_advanced.rst | 38 ++++++++++++++++++ tests/test_client_functional.py | 69 +++++++++++++++++++++++++++++++-- 4 files changed, 113 insertions(+), 6 deletions(-) create mode 100644 CHANGES/9436.bugfix.rst diff --git a/CHANGES/9436.bugfix.rst b/CHANGES/9436.bugfix.rst new file mode 100644 index 00000000000..7bd7fbcfe28 --- /dev/null +++ b/CHANGES/9436.bugfix.rst @@ -0,0 +1 @@ +Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. diff --git a/aiohttp/client.py b/aiohttp/client.py index a46c26537dd..a6d279718ae 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -506,7 +506,7 @@ async def _request( warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) redirects = 0 - history = [] + history: List[ClientResponse] = [] version = self._version params = params or {} @@ -590,13 +590,18 @@ async def _request( else InvalidUrlClientError ) raise err_exc_cls(url) - if auth and auth_from_url: + # If `auth` was passed for an already authenticated URL, + # disallow only if this is the initial URL; this is to avoid issues + # with sketchy redirects that are not the caller's responsibility + if not history and (auth and auth_from_url): raise ValueError( "Cannot combine AUTH argument with " "credentials encoded in URL" ) - if auth is None: + # Override the auth with the one from the URL only if we + # have no auth, or if we got an auth from a redirect URL + if auth is None or (history and auth_from_url is not None): auth = auth_from_url if auth is None: auth = self._default_auth diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 524b0877450..02087ab64a9 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -56,6 +56,44 @@ For *text/plain* :: await session.post(url, data='Привет, Мир!') +Authentication +-------------- + +Instead of setting the ``Authorization`` header directly, +:class:`ClientSession` and individual request methods provide an ``auth`` +argument. An instance of :class:`BasicAuth` can be passed in like this:: + + auth = BasicAuth(login="...", password="...") + async with ClientSession(auth=auth) as session: + ... + +Note that if the request is redirected and the redirect URL contains +credentials, those credentials will supersede any previously set credentials. +In other words, if ``http://user@example.com`` redirects to +``http://other_user@example.com``, the second request will be authenticated +as ``other_user``. Providing both the ``auth`` parameter and authentication in +the *initial* URL will result in a :exc:`ValueError`. + +For other authentication flows, the ``Authorization`` header can be set +directly:: + + headers = {"Authorization": "Bearer eyJh...0M30"} + async with ClientSession(headers=headers) as session: + ... + +The authentication header for a session may be updated as and when required. +For example:: + + session.headers["Authorization"] = "Bearer eyJh...1OH0" + +Note that a *copy* of the headers dictionary is set as an attribute when +creating a :class:`ClientSession` instance (as a :class:`multidict.CIMultiDict` +object). Updating the original dictionary does not have any effect. + +In cases where the authentication header value expires periodically, an +:mod:`asyncio` task may be used to update the session's default headers in the +background. + .. note:: ``Authorization`` header will be removed if you get redirected to a different host or protocol. diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 30ceebddc97..83fe4348e94 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,7 +12,7 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Optional, Type +from typing import Any, AsyncIterator, Awaitable, Callable, List, Optional, Type from unittest import mock import pytest @@ -21,7 +21,7 @@ import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web -from aiohttp.abc import AbstractResolver +from aiohttp.abc import AbstractResolver, ResolveResult from aiohttp.client_exceptions import ( ClientResponseError, InvalidURL, @@ -36,7 +36,8 @@ from aiohttp.connector import Connection from aiohttp.http_writer import StreamWriter from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient -from aiohttp.test_utils import unused_port +from aiohttp.test_utils import TestServer, unused_port +from aiohttp.typedefs import Handler @pytest.fixture @@ -2888,6 +2889,68 @@ async def test_creds_in_auth_and_url() -> None: await session.close() +async def test_creds_in_auth_and_redirect_url( + create_server_for_url_and_handler: Callable[[URL, Handler], Awaitable[TestServer]], +) -> None: + """Verify that credentials in redirect URLs can and do override any previous credentials.""" + url_from = URL("http://example.com") + url_to = URL("http://user@example.com") + redirected = False + + async def srv(request: web.Request) -> web.Response: + nonlocal redirected + + assert request.host == url_from.host + + if not redirected: + redirected = True + raise web.HTTPMovedPermanently(url_to) + + return web.Response() + + server = await create_server_for_url_and_handler(url_from, srv) + + etc_hosts = { + (url_from.host, 80): server, + } + + class FakeResolver(AbstractResolver): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ) -> List[ResolveResult]: + server = etc_hosts[(host, port)] + assert server.port is not None + + return [ + { + "hostname": host, + "host": server.host, + "port": server.port, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ] + + async def close(self) -> None: + """Dummy""" + + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + + async with aiohttp.ClientSession(connector=connector) as client, client.get( + url_from, auth=aiohttp.BasicAuth("user", "pass") + ) as resp: + assert len(resp.history) == 1 + assert str(resp.url) == "http://example.com" + assert resp.status == 200 + assert ( + resp.request_info.headers.get("authorization") == "Basic dXNlcjo=" + ), "Expected redirect credentials to take precedence over provided auth" + + @pytest.fixture def create_server_for_url_and_handler(aiohttp_server, tls_certificate_authority): def create(url, srv): From a3b8129bb4f711186b0a97386e9a27122a549dc1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 28 Oct 2024 09:53:41 -1000 Subject: [PATCH 0775/1511] [PR #9443/06b2398 backport][3.11] Fix handling of redirects with authentication (#9571) Co-authored-by: Pierre-Louis Peeters <peetersp@dnb.com> Co-authored-by: Pierre-Louis Peeters <PLPeeters@users.noreply.github.com> --- CHANGES/9436.bugfix.rst | 1 + aiohttp/client.py | 11 +++-- docs/client_advanced.rst | 38 ++++++++++++++++++ tests/test_client_functional.py | 71 +++++++++++++++++++++++++++++++-- 4 files changed, 114 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9436.bugfix.rst diff --git a/CHANGES/9436.bugfix.rst b/CHANGES/9436.bugfix.rst new file mode 100644 index 00000000000..7bd7fbcfe28 --- /dev/null +++ b/CHANGES/9436.bugfix.rst @@ -0,0 +1 @@ +Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. diff --git a/aiohttp/client.py b/aiohttp/client.py index c3025cd5ca6..92eb87a764d 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -523,7 +523,7 @@ async def _request( warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) redirects = 0 - history = [] + history: List[ClientResponse] = [] version = self._version params = params or {} @@ -614,13 +614,18 @@ async def _request( else InvalidUrlClientError ) raise err_exc_cls(url) - if auth and auth_from_url: + # If `auth` was passed for an already authenticated URL, + # disallow only if this is the initial URL; this is to avoid issues + # with sketchy redirects that are not the caller's responsibility + if not history and (auth and auth_from_url): raise ValueError( "Cannot combine AUTH argument with " "credentials encoded in URL" ) - if auth is None: + # Override the auth with the one from the URL only if we + # have no auth, or if we got an auth from a redirect URL + if auth is None or (history and auth_from_url is not None): auth = auth_from_url if ( diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 8b27351c882..2d00418ffac 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -56,6 +56,44 @@ For *text/plain* :: await session.post(url, data='Привет, Мир!') +Authentication +-------------- + +Instead of setting the ``Authorization`` header directly, +:class:`ClientSession` and individual request methods provide an ``auth`` +argument. An instance of :class:`BasicAuth` can be passed in like this:: + + auth = BasicAuth(login="...", password="...") + async with ClientSession(auth=auth) as session: + ... + +Note that if the request is redirected and the redirect URL contains +credentials, those credentials will supersede any previously set credentials. +In other words, if ``http://user@example.com`` redirects to +``http://other_user@example.com``, the second request will be authenticated +as ``other_user``. Providing both the ``auth`` parameter and authentication in +the *initial* URL will result in a :exc:`ValueError`. + +For other authentication flows, the ``Authorization`` header can be set +directly:: + + headers = {"Authorization": "Bearer eyJh...0M30"} + async with ClientSession(headers=headers) as session: + ... + +The authentication header for a session may be updated as and when required. +For example:: + + session.headers["Authorization"] = "Bearer eyJh...1OH0" + +Note that a *copy* of the headers dictionary is set as an attribute when +creating a :class:`ClientSession` instance (as a :class:`multidict.CIMultiDict` +object). Updating the original dictionary does not have any effect. + +In cases where the authentication header value expires periodically, an +:mod:`asyncio` task may be used to update the session's default headers in the +background. + .. note:: ``Authorization`` header will be removed if you get redirected to a different host or protocol. diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 80cf56f8118..082c4db4d89 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,7 +12,7 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Optional, Type +from typing import Any, AsyncIterator, Awaitable, Callable, List, Optional, Type from unittest import mock import pytest @@ -21,7 +21,7 @@ import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web -from aiohttp.abc import AbstractResolver +from aiohttp.abc import AbstractResolver, ResolveResult from aiohttp.client_exceptions import ( ClientResponseError, InvalidURL, @@ -35,8 +35,9 @@ from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import Connection from aiohttp.http_writer import StreamWriter -from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient -from aiohttp.test_utils import unused_port +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer +from aiohttp.test_utils import TestClient, TestServer, unused_port +from aiohttp.typedefs import Handler @pytest.fixture @@ -2888,6 +2889,68 @@ async def test_creds_in_auth_and_url() -> None: await session.close() +async def test_creds_in_auth_and_redirect_url( + create_server_for_url_and_handler: Callable[[URL, Handler], Awaitable[TestServer]], +) -> None: + """Verify that credentials in redirect URLs can and do override any previous credentials.""" + url_from = URL("http://example.com") + url_to = URL("http://user@example.com") + redirected = False + + async def srv(request: web.Request) -> web.Response: + nonlocal redirected + + assert request.host == url_from.host + + if not redirected: + redirected = True + raise web.HTTPMovedPermanently(url_to) + + return web.Response() + + server = await create_server_for_url_and_handler(url_from, srv) + + etc_hosts = { + (url_from.host, 80): server, + } + + class FakeResolver(AbstractResolver): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ) -> List[ResolveResult]: + server = etc_hosts[(host, port)] + assert server.port is not None + + return [ + { + "hostname": host, + "host": server.host, + "port": server.port, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ] + + async def close(self) -> None: + """Dummy""" + + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + + async with aiohttp.ClientSession(connector=connector) as client, client.get( + url_from, auth=aiohttp.BasicAuth("user", "pass") + ) as resp: + assert len(resp.history) == 1 + assert str(resp.url) == "http://example.com" + assert resp.status == 200 + assert ( + resp.request_info.headers.get("authorization") == "Basic dXNlcjo=" + ), "Expected redirect credentials to take precedence over provided auth" + + @pytest.fixture def create_server_for_url_and_handler(aiohttp_server, tls_certificate_authority): def create(url, srv): From 78418f7272fd012c992f09ca8137cd6a36d26505 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 28 Oct 2024 10:43:55 -1000 Subject: [PATCH 0776/1511] [PR #9566/22f0831 backport][3.11] Refactor WebSocketWriter to remove high level protocol functions (#9569) --- aiohttp/_websocket/writer.py | 21 ++++++++---------- aiohttp/client_ws.py | 9 ++++---- aiohttp/web_ws.py | 9 ++++---- tests/test_client_ws_functional.py | 30 +++++++++++++++----------- tests/test_web_websocket_functional.py | 24 +++++++++++---------- tests/test_websocket_writer.py | 12 +++++------ 6 files changed, 56 insertions(+), 49 deletions(-) diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index 0b11af97dfb..e49b6224aec 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -34,6 +34,14 @@ class WebSocketWriter: + """WebSocket writer. + + The writer is responsible for sending messages to the client. It is + created by the protocol when a connection is established. The writer + should avoid implementing any application logic and should only be + concerned with the low-level details of the WebSocket protocol. + """ + def __init__( self, protocol: BaseProtocol, @@ -45,6 +53,7 @@ def __init__( compress: int = 0, notakeover: bool = False, ) -> None: + """Initialize a WebSocket writer.""" self.protocol = protocol self.transport = transport self.use_mask = use_mask @@ -155,18 +164,6 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, ) - async def pong(self, message: Union[bytes, str] = b"") -> None: - """Send pong message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self.send_frame(message, WSMsgType.PONG) - - async def ping(self, message: Union[bytes, str] = b"") -> None: - """Send ping message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self.send_frame(message, WSMsgType.PING) - async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index fb9fbee4a26..5eb9e7415ad 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -140,13 +140,14 @@ def _send_heartbeat(self) -> None: self._cancel_pong_response_cb() self._pong_response_cb = loop.call_at(when, self._pong_not_received) + coro = self._writer.send_frame(b"", WSMsgType.PING) if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to send the ping # immediately to avoid having to schedule # the task on the event loop. - ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + ping_task = asyncio.Task(coro, loop=loop, eager_start=True) else: - ping_task = loop.create_task(self._writer.ping()) + ping_task = loop.create_task(coro) if not ping_task.done(): self._ping_task = ping_task @@ -224,10 +225,10 @@ def exception(self) -> Optional[BaseException]: return self._exception async def ping(self, message: bytes = b"") -> None: - await self._writer.ping(message) + await self._writer.send_frame(message, WSMsgType.PING) async def pong(self, message: bytes = b"") -> None: - await self._writer.pong(message) + await self._writer.send_frame(message, WSMsgType.PONG) async def send_frame( self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 787c5cb1d39..528252a4433 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -153,13 +153,14 @@ def _send_heartbeat(self) -> None: self._cancel_pong_response_cb() self._pong_response_cb = loop.call_at(when, self._pong_not_received) + coro = self._writer.send_frame(b"", WSMsgType.PING) if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to send the ping # immediately to avoid having to schedule # the task on the event loop. - ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + ping_task = asyncio.Task(coro, loop=loop, eager_start=True) else: - ping_task = loop.create_task(self._writer.ping()) + ping_task = loop.create_task(coro) if not ping_task.done(): self._ping_task = ping_task @@ -371,13 +372,13 @@ def exception(self) -> Optional[BaseException]: async def ping(self, message: bytes = b"") -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") - await self._writer.ping(message) + await self._writer.send_frame(message, WSMsgType.PING) async def pong(self, message: bytes = b"") -> None: # unsolicited pong if self._writer is None: raise RuntimeError("Call .prepare() first") - await self._writer.pong(message) + await self._writer.send_frame(message, WSMsgType.PONG) async def send_frame( self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 0c4a081eafa..f28db879037 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,6 @@ import asyncio import sys -from typing import Any, NoReturn +from typing import Any, NoReturn, Optional from unittest import mock import pytest @@ -704,9 +704,11 @@ async def handler(request: web.Request) -> NoReturn: assert resp._conn is not None with mock.patch.object( resp._conn.transport, "write", side_effect=ClientConnectionResetError - ), mock.patch.object(resp._writer, "ping", wraps=resp._writer.ping) as ping: + ), mock.patch.object( + resp._writer, "send_frame", wraps=resp._writer.send_frame + ) as send_frame: await resp.receive() - ping_count = ping.call_count + ping_count = send_frame.call_args_list.count(mock.call(b"", WSMsgType.PING)) # Connection should be closed roughly after 1.5x heartbeat. await asyncio.sleep(0.2) assert ping_count == 1 @@ -842,7 +844,7 @@ async def handler(request): async def test_close_websocket_while_ping_inflight( - aiohttp_client: AiohttpClient, + aiohttp_client: AiohttpClient, loop: asyncio.AbstractEventLoop ) -> None: """Test closing the websocket while a ping is in-flight.""" ping_received = False @@ -866,23 +868,27 @@ async def handler(request: web.Request) -> NoReturn: await resp.send_bytes(b"ask") cancelled = False - ping_stated = False - - async def delayed_ping() -> None: - nonlocal cancelled, ping_stated - ping_stated = True + ping_started = loop.create_future() + + async def delayed_send_frame( + message: bytes, opcode: int, compress: Optional[int] = None + ) -> None: + assert opcode == WSMsgType.PING + nonlocal cancelled, ping_started + ping_started.set_result(None) try: await asyncio.sleep(1) except asyncio.CancelledError: cancelled = True raise - with mock.patch.object(resp._writer, "ping", delayed_ping): - await asyncio.sleep(0.1) + with mock.patch.object(resp._writer, "send_frame", delayed_send_frame): + async with async_timeout.timeout(1): + await ping_started await resp.close() await asyncio.sleep(0) - assert ping_stated is True + assert ping_started.result() is None assert cancelled is True diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 607ab6d7de3..10f9279803e 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -423,7 +423,7 @@ async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) - await ws.ping("data") + await ws.ping(b"data") await ws.receive() closed.set_result(None) return ws @@ -460,7 +460,7 @@ async def handler(request): ws = await client.ws_connect("/", autoping=False) - await ws.ping("data") + await ws.ping(b"data") msg = await ws.receive() assert msg.type == WSMsgType.PONG assert msg.data == b"data" @@ -478,7 +478,7 @@ async def handler(request): msg = await ws.receive() assert msg.type == WSMsgType.PING - await ws.pong("data") + await ws.pong(b"data") msg = await ws.receive() assert msg.type == WSMsgType.CLOSE @@ -493,7 +493,7 @@ async def handler(request): ws = await client.ws_connect("/", autoping=False) - await ws.ping("data") + await ws.ping(b"data") msg = await ws.receive() assert msg.type == WSMsgType.PONG assert msg.data == b"data" @@ -741,12 +741,14 @@ async def handler(request: web.Request) -> NoReturn: with mock.patch.object( ws_server._req.transport, "write", side_effect=ConnectionResetError ), mock.patch.object( - ws_server._writer, "ping", wraps=ws_server._writer.ping - ) as ping: + ws_server._writer, "send_frame", wraps=ws_server._writer.send_frame + ) as send_frame: try: await ws_server.receive() finally: - ping_count = ping.call_count + ping_count = send_frame.call_args_list.count( + mock.call(b"", WSMsgType.PING) + ) assert False app = web.Application() @@ -990,7 +992,7 @@ async def handler(request): msg = await ws.receive() assert msg.type == WSMsgType.PING await asyncio.sleep(0) - await ws.pong("data") + await ws.pong(b"data") msg = await ws.receive() assert msg.type == WSMsgType.CLOSE @@ -1006,7 +1008,7 @@ async def handler(request): ws = await client.ws_connect("/", autoping=False) await asyncio.sleep(0) - await ws.ping("data") + await ws.ping(b"data") msg = await ws.receive() assert msg.type == WSMsgType.PONG @@ -1036,7 +1038,7 @@ async def handler(request): msg = await ws.receive() assert msg.type == WSMsgType.PING await asyncio.sleep(0) - await ws.pong("data") + await ws.pong(b"data") msg = await ws.receive() assert msg.type == WSMsgType.CLOSE @@ -1052,7 +1054,7 @@ async def handler(request): ws = await client.ws_connect("/", autoping=False) await timed_out - await ws.ping("data") + await ws.ping(b"data") msg = await ws.receive() assert msg.type == WSMsgType.PONG diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index 0dc5f073497..93bd7064cb4 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -29,14 +29,14 @@ def writer(protocol, transport): return WebSocketWriter(protocol, transport, use_mask=False) -async def test_pong(writer) -> None: - await writer.pong() - writer.transport.write.assert_called_with(b"\x8a\x00") +async def test_pong(writer: WebSocketWriter) -> None: + await writer.send_frame(b"", WSMsgType.PONG) + writer.transport.write.assert_called_with(b"\x8a\x00") # type: ignore[attr-defined] -async def test_ping(writer) -> None: - await writer.ping() - writer.transport.write.assert_called_with(b"\x89\x00") +async def test_ping(writer: WebSocketWriter) -> None: + await writer.send_frame(b"", WSMsgType.PING) + writer.transport.write.assert_called_with(b"\x89\x00") # type: ignore[attr-defined] async def test_send_text(writer: WebSocketWriter) -> None: From 4592d03194ebd42638834519cdc65feb484a0f30 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 28 Oct 2024 11:26:32 -1000 Subject: [PATCH 0777/1511] Add support for adjusting the server WebSocket writer limit (#9572) (#9573) --- CHANGES/9572.feature.rst | 1 + aiohttp/web_ws.py | 9 ++++++++- docs/web_reference.rst | 8 +++++++- tests/test_web_websocket.py | 12 ++++++++++++ 4 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9572.feature.rst diff --git a/CHANGES/9572.feature.rst b/CHANGES/9572.feature.rst new file mode 100644 index 00000000000..9e6778819da --- /dev/null +++ b/CHANGES/9572.feature.rst @@ -0,0 +1 @@ +Added ``writer_limit`` to the :py:class:`~aiohttp.web.WebSocketResponse` to be able to adjust the limit before the writer forces the buffer to be drained -- by :user:`bdraco`. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 528252a4433..fa6d30276b5 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -10,6 +10,7 @@ from multidict import CIMultiDict from . import hdrs +from ._websocket.writer import DEFAULT_LIMIT from .abc import AbstractStreamWriter from .helpers import calculate_timeout_when, set_exception, set_result from .http import ( @@ -70,6 +71,7 @@ def __init__( protocols: Iterable[str] = (), compress: bool = True, max_msg_size: int = 4 * 1024 * 1024, + writer_limit: int = DEFAULT_LIMIT, ) -> None: super().__init__(status=101) self._protocols = protocols @@ -97,6 +99,7 @@ def __init__( self._compress = compress self._max_msg_size = max_msg_size self._ping_task: Optional[asyncio.Task[None]] = None + self._writer_limit = writer_limit def _cancel_heartbeat(self) -> None: self._cancel_pong_response_cb() @@ -305,7 +308,11 @@ def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: transport = request._protocol.transport assert transport is not None writer = WebSocketWriter( - request._protocol, transport, compress=compress, notakeover=notakeover + request._protocol, + transport, + compress=compress, + notakeover=notakeover, + limit=self._writer_limit, ) return protocol, writer diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 784cfa6e717..0bb72bb3678 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -954,7 +954,8 @@ and :ref:`aiohttp-web-signals` handlers:: .. class:: WebSocketResponse(*, timeout=10.0, receive_timeout=None, \ autoclose=True, autoping=True, heartbeat=None, \ - protocols=(), compress=True, max_msg_size=4194304) + protocols=(), compress=True, max_msg_size=4194304, \ + writer_limit=65536) Class for handling server-side websockets, inherited from :class:`StreamResponse`. @@ -1005,6 +1006,11 @@ and :ref:`aiohttp-web-signals` handlers:: ``request.transport.close()`` to avoid leaking resources. + :param int writer_limit: maximum size of write buffer, 64 KB by default. + Once the buffer is full, the websocket will pause + to drain the buffer. + + .. versionadded:: 3.11 The class supports ``async for`` statement for iterating over incoming messages:: diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index e45c96bf70f..9bc28838530 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -249,6 +249,18 @@ def test_closed_after_ctor() -> None: assert ws.close_code is None +async def test_raise_writer_limit(make_request) -> None: + """Test the writer limit can be adjusted.""" + req = make_request("GET", "/") + ws = WebSocketResponse(writer_limit=1234567) + await ws.prepare(req) + assert ws._reader is not None + assert ws._writer is not None + assert ws._writer._limit == 1234567 + ws._reader.feed_data(WS_CLOSED_MESSAGE) + await ws.close() + + async def test_send_str_closed(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() From 9cd73e34037ade4b1f24b1061642c226b06027aa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 28 Oct 2024 12:08:32 -1000 Subject: [PATCH 0778/1511] [PR #9574/998204d backport][3.11] Increase minimum yarl version to 1.17.0 (#9576) --- CHANGES/8909.breaking.rst | 1 + CHANGES/9079.breaking.rst | 1 + CHANGES/9079.misc.rst | 1 - CHANGES/9305.breaking.rst | 2 +- CHANGES/9574.breaking.rst | 1 + requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.in | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- setup.cfg | 2 +- 12 files changed, 11 insertions(+), 9 deletions(-) create mode 120000 CHANGES/8909.breaking.rst create mode 120000 CHANGES/9079.breaking.rst delete mode 100644 CHANGES/9079.misc.rst mode change 100644 => 120000 CHANGES/9305.breaking.rst create mode 100644 CHANGES/9574.breaking.rst diff --git a/CHANGES/8909.breaking.rst b/CHANGES/8909.breaking.rst new file mode 120000 index 00000000000..09e6008b8cd --- /dev/null +++ b/CHANGES/8909.breaking.rst @@ -0,0 +1 @@ +9574.breaking.rst \ No newline at end of file diff --git a/CHANGES/9079.breaking.rst b/CHANGES/9079.breaking.rst new file mode 120000 index 00000000000..09e6008b8cd --- /dev/null +++ b/CHANGES/9079.breaking.rst @@ -0,0 +1 @@ +9574.breaking.rst \ No newline at end of file diff --git a/CHANGES/9079.misc.rst b/CHANGES/9079.misc.rst deleted file mode 100644 index fb6b84e7f22..00000000000 --- a/CHANGES/9079.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Increased minimum yarl version to 1.11.0 -- by :user:`bdraco`. diff --git a/CHANGES/9305.breaking.rst b/CHANGES/9305.breaking.rst deleted file mode 100644 index 82fec1d21b4..00000000000 --- a/CHANGES/9305.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Increased minimum yarl version to 1.12.0 -- by :user:`bdraco`. diff --git a/CHANGES/9305.breaking.rst b/CHANGES/9305.breaking.rst new file mode 120000 index 00000000000..09e6008b8cd --- /dev/null +++ b/CHANGES/9305.breaking.rst @@ -0,0 +1 @@ +9574.breaking.rst \ No newline at end of file diff --git a/CHANGES/9574.breaking.rst b/CHANGES/9574.breaking.rst new file mode 100644 index 00000000000..4175991dfcf --- /dev/null +++ b/CHANGES/9574.breaking.rst @@ -0,0 +1 @@ +Increased minimum yarl version to 1.17.0 -- by :user:`bdraco`. diff --git a/requirements/base.txt b/requirements/base.txt index 56438d8ab23..87f8eb686c9 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.15.2 +yarl==1.17.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d815484c984..087f7acba6f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -285,7 +285,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.15.2 +yarl==1.17.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 4c1e643513a..d1c84f46cfb 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -277,7 +277,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.15.2 +yarl==1.17.0 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 7af9fb50246..7d0f5ca3a62 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -10,4 +10,4 @@ brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 -yarl >= 1.13.0, < 2.0 +yarl >= 1.17.0, < 2.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 7aaddcd942d..246c5934f50 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.15.2 +yarl==1.17.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 7364abfd29f..e78e3f01ca9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -147,5 +147,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.15.2 +yarl==1.17.0 # via -r requirements/runtime-deps.in diff --git a/setup.cfg b/setup.cfg index 6408dde39bd..5f78750679f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -58,7 +58,7 @@ install_requires = frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 - yarl >= 1.13.0, < 2.0 + yarl >= 1.17.0, < 2.0 [options.exclude_package_data] * = From 429ec17a44c1aa38585f672c8090610e514978b2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 22:52:50 +0000 Subject: [PATCH 0779/1511] [PR #9575/951def15 backport][3.11] Switch to using `URL.host_port_subcomponent` in `yarl` for the client host header (#9578) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 57df4b016e6..fbccfa48eb0 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -433,30 +433,11 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: self.headers: CIMultiDict[str] = CIMultiDict() # Build the host header - host = self.url.host_subcomponent + host = self.url.host_port_subcomponent - # host_subcomponent is None when the URL is a relative URL. + # host_port_subcomponent is None when the URL is a relative URL. # but we know we do not have a relative URL here. assert host is not None - - if host[-1] == ".": - # Remove all trailing dots from the netloc as while - # they are valid FQDNs in DNS, TLS validation fails. - # See https://github.com/aio-libs/aiohttp/issues/3636. - # To avoid string manipulation we only call rstrip if - # the last character is a dot. - host = host.rstrip(".") - - # If explicit port is not None, it means that the port was - # explicitly specified in the URL. In this case we check - # if its not the default port for the scheme and add it to - # the host header. We check explicit_port first because - # yarl caches explicit_port and its likely to already be - # in the cache and non-default port URLs are far less common. - explicit_port = self.url.explicit_port - if explicit_port is not None and not self.url.is_default_port(): - host = f"{host}:{explicit_port}" - self.headers[hdrs.HOST] = host if not headers: From 125c7ed01ea99d57532da07cdfd9e5c7cb774099 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 23:04:32 +0000 Subject: [PATCH 0780/1511] [PR #9577/a54dd98c backport][3.11] Cleanup changelog messages for 3.11 (#9579) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/3945.deprecation.rst | 2 +- CHANGES/6652.bugfix | 1 - CHANGES/6652.bugfix.rst | 1 + CHANGES/8612.feature.rst | 2 +- CHANGES/8966.feature.rst | 2 +- CHANGES/9207.feature.rst | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/6652.bugfix create mode 100644 CHANGES/6652.bugfix.rst diff --git a/CHANGES/3945.deprecation.rst b/CHANGES/3945.deprecation.rst index 07f8566881a..91c510c6d32 100644 --- a/CHANGES/3945.deprecation.rst +++ b/CHANGES/3945.deprecation.rst @@ -1 +1 @@ -Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in `ClientSession.ws_connect()`. Change default websocket receive timeout from `None` to `10.0`. +Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in :py:meth:`~aiohttp.ClientSession.ws_connect`. Change default websocket receive timeout from `None` to `10.0`. diff --git a/CHANGES/6652.bugfix b/CHANGES/6652.bugfix deleted file mode 100644 index 4ce1f678792..00000000000 --- a/CHANGES/6652.bugfix +++ /dev/null @@ -1 +0,0 @@ -Raise `aiohttp.ServerFingerprintMismatch` exception on client-side if request through http proxy with mismatching server fingerprint digest: `aiohttp.ClientSession(headers=headers, connector=TCPConnector(ssl=aiohttp.Fingerprint(mismatch_digest), trust_env=True).request(...)`. diff --git a/CHANGES/6652.bugfix.rst b/CHANGES/6652.bugfix.rst new file mode 100644 index 00000000000..972557b0d96 --- /dev/null +++ b/CHANGES/6652.bugfix.rst @@ -0,0 +1 @@ +Raise :exc:`aiohttp.ServerFingerprintMismatch` exception on client-side if request through http proxy with mismatching server fingerprint digest: `aiohttp.ClientSession(headers=headers, connector=TCPConnector(ssl=aiohttp.Fingerprint(mismatch_digest), trust_env=True).request(...)` -- by :user:`gangj`. diff --git a/CHANGES/8612.feature.rst b/CHANGES/8612.feature.rst index 96adcf6dc4c..51ede16ebe5 100644 --- a/CHANGES/8612.feature.rst +++ b/CHANGES/8612.feature.rst @@ -1 +1 @@ -Exported ``ClientWSTimeout`` to top-level namespace -- by :user:`Dreamsorcerer`. +Exported :py:class:`~aiohttp.ClientWSTimeout` to top-level namespace -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8966.feature.rst b/CHANGES/8966.feature.rst index ab1dc45b60e..68ec1323568 100644 --- a/CHANGES/8966.feature.rst +++ b/CHANGES/8966.feature.rst @@ -1 +1 @@ -Updated ClientSession's auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` +Updated :py:class:`~aiohttp.ClientSession`'s auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` diff --git a/CHANGES/9207.feature.rst b/CHANGES/9207.feature.rst index d9ac55c8520..cb54a1dd1f9 100644 --- a/CHANGES/9207.feature.rst +++ b/CHANGES/9207.feature.rst @@ -1 +1 @@ -Added ``proxy`` and ``proxy_auth`` parameters to ``ClientSession`` -- by :user:`meshya`. +Added ``proxy`` and ``proxy_auth`` parameters to :py:class:`~aiohttp.ClientSession` -- by :user:`meshya`. From f07c021de75fc55e541609bec476318d2b6416d3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 28 Oct 2024 13:34:24 -1000 Subject: [PATCH 0781/1511] Release 3.11.0b0 (#9580) --- CHANGES.rst | 268 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 269 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 0245204fe5e..02b06136a1e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,274 @@ .. towncrier release notes start +3.11.0b0 (2024-10-28) +===================== + +Bug fixes +--------- + +- Raise :exc:`aiohttp.ServerFingerprintMismatch` exception on client-side if request through http proxy with mismatching server fingerprint digest: `aiohttp.ClientSession(headers=headers, connector=TCPConnector(ssl=aiohttp.Fingerprint(mismatch_digest), trust_env=True).request(...)` -- by :user:`gangj`. + + + *Related issues and pull requests on GitHub:* + :issue:`6652`. + + + +- Made ``TestClient.app`` a ``Generic`` so type checkers will know the correct type (avoiding unneeded ``client.app is not None`` checks) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8977`. + + + +- Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. + + + *Related issues and pull requests on GitHub:* + :issue:`9436`. + + + + +Features +-------- + +- Added ``strategy`` parameter to :meth:`aiohttp.web.StreamResponse.enable_compression` + The value of this parameter is passed to the :func:`zlib.compressobj` function, allowing people + to use a more sufficient compression algorithm for their data served by :mod:`aiohttp.web` + -- by :user:`shootkin` + + + *Related issues and pull requests on GitHub:* + :issue:`6257`. + + + +- Exported :py:class:`~aiohttp.ClientWSTimeout` to top-level namespace -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8612`. + + + +- Added ``secure``/``httponly``/``samesite`` parameters to ``.del_cookie()`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8956`. + + + +- Updated :py:class:`~aiohttp.ClientSession`'s auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` + + + *Related issues and pull requests on GitHub:* + :issue:`8966`, :issue:`9466`. + + + +- Added ``proxy`` and ``proxy_auth`` parameters to :py:class:`~aiohttp.ClientSession` -- by :user:`meshya`. + + + *Related issues and pull requests on GitHub:* + :issue:`9207`. + + + +- Added ``default_to_multipart`` parameter to ``FormData``. + + + *Related issues and pull requests on GitHub:* + :issue:`9335`. + + + +- Added :py:meth:`~aiohttp.ClientWebSocketResponse.send_frame` and :py:meth:`~aiohttp.web.WebSocketResponse.send_frame` for WebSockets -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9348`. + + + +- Improved performance of reading WebSocket messages with a Cython implementation -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9543`, :issue:`9554`, :issue:`9556`, :issue:`9558`. + + + +- Added ``writer_limit`` to the :py:class:`~aiohttp.web.WebSocketResponse` to be able to adjust the limit before the writer forces the buffer to be drained -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9572`. + + + + +Deprecations (removal in next major release) +-------------------------------------------- + +- Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in :py:meth:`~aiohttp.ClientSession.ws_connect`. Change default websocket receive timeout from `None` to `10.0`. + + + *Related issues and pull requests on GitHub:* + :issue:`3945`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Dropped support for Python 3.8 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8797`. + + + +- Increased minimum yarl version to 1.17.0 -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8909`, :issue:`9079`, :issue:`9305`, :issue:`9574`. + + + +- Removed the ``is_ipv6_address`` and ``is_ip4_address`` helpers are they are no longer used -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9344`. + + + +- Changed ``ClientRequest.connection_key`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9365`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Switched to using the :mod:`propcache <propcache.api>` package for property caching + -- by :user:`bdraco`. + + The :mod:`propcache <propcache.api>` package is derived from the property caching + code in :mod:`yarl` and has been broken out to avoid maintaining it for multiple + projects. + + + *Related issues and pull requests on GitHub:* + :issue:`9394`. + + + +- Separated ``aiohttp.http_websocket`` into multiple files to make it easier to maintain -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9542`, :issue:`9552`. + + + + +Contributor-facing changes +-------------------------- + +- Changed diagram images generator from ``blockdiag`` to ``GraphViz``. + Generating documentation now requires the GraphViz executable to be included in $PATH or sphinx build configuration. + + + *Related issues and pull requests on GitHub:* + :issue:`9359`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added flake8 settings to avoid some forms of implicit concatenation. -- by :user:`booniepepper`. + + + *Related issues and pull requests on GitHub:* + :issue:`7731`. + + + +- Enabled keep-alive support on proxies (which was originally disabled several years ago) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8920`. + + + +- Changed web entry point to not listen on TCP when only a Unix path is passed -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9033`. + + + +- Disabled automatic retries of failed requests in :class:`aiohttp.test_utils.TestClient`'s client session + (which could potentially hide errors in tests) -- by :user:`ShubhAgarwal-dev`. + + + *Related issues and pull requests on GitHub:* + :issue:`9141`. + + + +- Changed web ``keepalive_timeout`` default to around an hour in order to reduce race conditions on reverse proxies -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9285`. + + + +- Reduced memory required for stream objects created during the client request lifecycle -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9407`. + + + +- Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9470`. + + + +- Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9485`. + + + + +---- + + 3.10.10 (2024-10-10) ==================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index a08e5406900..83eabcc3acc 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0.dev0" +__version__ = "3.11.0b0" from typing import TYPE_CHECKING, Tuple From 5f61c7bff5bd0560a169b611f5c4b7e4b1ee3360 Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Tue, 29 Oct 2024 14:55:33 +0800 Subject: [PATCH 0782/1511] [PR #9530/f3a3f60 backport][3.11] Allow URLs with paths that end with / as base_url in ClientSession (#9583) --- CHANGES/9530.feature.rst | 2 ++ aiohttp/client.py | 8 ++++---- docs/client_reference.rst | 17 ++++++++++++++--- tests/test_client_session.py | 23 +++++++++++++++++++++++ 4 files changed, 43 insertions(+), 7 deletions(-) create mode 100644 CHANGES/9530.feature.rst diff --git a/CHANGES/9530.feature.rst b/CHANGES/9530.feature.rst new file mode 100644 index 00000000000..cc4e75a13ca --- /dev/null +++ b/CHANGES/9530.feature.rst @@ -0,0 +1,2 @@ +Updated :py:class:`~aiohttp.ClientSession` to support paths in ``base_url`` parameter. +``base_url`` paths must end with a ``/`` -- by :user:`Cycloctane`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 92eb87a764d..34df772eba8 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -314,9 +314,9 @@ def __init__( else: self._base_url = URL(base_url) self._base_url_origin = self._base_url.origin() - assert ( - self._base_url_origin == self._base_url - ), "Only absolute URLs without path part are supported" + assert self._base_url.absolute, "Only absolute URLs are supported" + if self._base_url is not None and not self._base_url.path.endswith("/"): + raise ValueError("base_url must have a trailing '/'") if timeout is sentinel or timeout is None: self._timeout = DEFAULT_TIMEOUT @@ -463,7 +463,7 @@ def _build_url(self, str_or_url: StrOrURL) -> URL: if self._base_url is None: return url else: - assert not url.absolute and url.path.startswith("/") + assert not url.absolute return self._base_url.join(url) async def _request( diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 06b580b6338..bb640e7ab0f 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -62,9 +62,20 @@ The client session supports the context manager protocol for self closing. :param base_url: Base part of the URL (optional) - If set, it allows to skip the base part (https://docs.aiohttp.org) in - request calls. It must not include a path (as in - https://docs.aiohttp.org/en/stable). + If set, allows to join a base part to relative URLs in request calls. + If the URL has a path it must have a trailing ``/`` (as in + https://docs.aiohttp.org/en/stable/). + + Note that URL joining follows :rfc:`3986`. This means, in the most + common case the request URLs should have no leading slash, e.g.:: + + session = ClientSession(base_url="http://example.com/foo/") + + await session.request("GET", "bar") + # request for http://example.com/foo/bar + + await session.request("GET", "/bar") + # request for http://example.com/bar .. versionadded:: 3.8 diff --git a/tests/test_client_session.py b/tests/test_client_session.py index aa5824283b2..3b442020898 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -1068,6 +1068,24 @@ async def test_requote_redirect_setter() -> None: URL("http://example.com/test"), id="base_url=URL('http://example.com') url='/test'", ), + pytest.param( + URL("http://example.com/test1/"), + "test2", + URL("http://example.com/test1/test2"), + id="base_url=URL('http://example.com/test1/') url='test2'", + ), + pytest.param( + URL("http://example.com/test1/"), + "/test2", + URL("http://example.com/test2"), + id="base_url=URL('http://example.com/test1/') url='/test2'", + ), + pytest.param( + URL("http://example.com/test1/"), + "test2?q=foo#bar", + URL("http://example.com/test1/test2?q=foo#bar"), + id="base_url=URL('http://example.com/test1/') url='test2?q=foo#bar'", + ), ], ) async def test_build_url_returns_expected_url( @@ -1077,6 +1095,11 @@ async def test_build_url_returns_expected_url( assert session._build_url(url) == expected_url +async def test_base_url_without_trailing_slash() -> None: + with pytest.raises(ValueError, match="base_url must have a trailing '/'"): + ClientSession(base_url="http://example.com/test") + + async def test_instantiation_with_invalid_timeout_value(loop): loop.set_debug(False) logs = [] From 6b327e116e0e24e9bbdcd85919053fd6dc57fd91 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 20:34:26 +0000 Subject: [PATCH 0783/1511] [PR #9497/e23e036d backport][3.11] Add missing docs for timeout parameter in WebSocketResponse.__init__() (#9587) Co-authored-by: lenard-mosys <lenard@mo-sys.com> --- docs/web_reference.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 0bb72bb3678..cffff89e01d 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -986,12 +986,18 @@ and :ref:`aiohttp-web-signals` handlers:: connection if `pong` response is not received. The timer is reset on any data reception. + :param float timeout: Timeout value for the ``close`` + operation. After sending the close websocket message, + ``close`` waits for ``timeout`` seconds for a response. + Default value is ``10.0`` (10 seconds for ``close`` + operation) + :param float receive_timeout: Timeout value for `receive` - operations. Default value is None + operations. Default value is :data:`None` (no timeout for receive operation) :param bool compress: Enable per-message deflate extension support. - False for disabled, default value is True. + :data:`False` for disabled, default value is :data:`True`. :param int max_msg_size: maximum size of read websocket message, 4 MB by default. To disable the size limit use ``0``. From 1820718c396ce8fcffa94ccbed5cd80b057cb430 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Tue, 29 Oct 2024 20:42:19 +0000 Subject: [PATCH 0784/1511] Merge HISTORY into CHANGES (#9581) (#9588) (cherry picked from commit 90060bc7f424e5d9f7c07b34e6b6b7e965c33725) --- CHANGES.rst | 2302 +++++++++++++++++++++++++++++++++++++++ HISTORY.rst | 2686 ---------------------------------------------- docs/changes.rst | 2 - 3 files changed, 2302 insertions(+), 2688 deletions(-) delete mode 100644 HISTORY.rst diff --git a/CHANGES.rst b/CHANGES.rst index 02b06136a1e..dc8b711eb42 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -3877,3 +3877,2305 @@ Improved Documentation - Add diagrams for tracing request life style. (`#2748 <https://github.com/aio-libs/aiohttp/pull/2748>`_) - Drop removed functionality for passing ``StreamReader`` as data at client side. (`#2793 <https://github.com/aio-libs/aiohttp/pull/2793>`_) + + + +---- + +3.0.9 (2018-03-14) +================== + +- Close a connection if an unexpected exception occurs while sending a request + (`#2827 <https://github.com/aio-libs/aiohttp/pull/2827>`_) + + + +---- + + +3.0.8 (2018-03-12) +================== + +- Use ``asyncio.current_task()`` on Python 3.7 (`#2825 <https://github.com/aio-libs/aiohttp/pull/2825>`_) + + + +---- + +3.0.7 (2018-03-08) +================== + +- Fix SSL proxy support by client. (`#2810 <https://github.com/aio-libs/aiohttp/pull/2810>`_) +- Restore an imperative check in ``setup.py`` for python version. The check + works in parallel to environment marker. As effect an error about unsupported + Python versions is raised even on outdated systems with very old + ``setuptools`` version installed. (`#2813 <https://github.com/aio-libs/aiohttp/pull/2813>`_) + + + +---- + + +3.0.6 (2018-03-05) +================== + +- Add ``_reuse_address`` and ``_reuse_port`` to + ``web_runner.TCPSite.__slots__``. (`#2792 <https://github.com/aio-libs/aiohttp/pull/2792>`_) + + + +---- + +3.0.5 (2018-02-27) +================== + +- Fix ``InvalidStateError`` on processing a sequence of two + ``RequestHandler.data_received`` calls on web server. (`#2773 <https://github.com/aio-libs/aiohttp/pull/2773>`_) + + + +---- + +3.0.4 (2018-02-26) +================== + +- Fix ``IndexError`` in HTTP request handling by server. (`#2752 <https://github.com/aio-libs/aiohttp/pull/2752>`_) +- Fix MultipartWriter.append* no longer returning part/payload. (`#2759 <https://github.com/aio-libs/aiohttp/pull/2759>`_) + + + +---- + + +3.0.3 (2018-02-25) +================== + +- Relax ``attrs`` dependency to minimal actually supported version + 17.0.3 The change allows to avoid version conflicts with currently + existing test tools. + + + +---- + +3.0.2 (2018-02-23) +================== + +Security Fix +------------ + +- Prevent Windows absolute URLs in static files. Paths like + ``/static/D:\path`` and ``/static/\\hostname\drive\path`` are + forbidden. + + + +---- + +3.0.1 +===== + +- Technical release for fixing distribution problems. + + + +---- + +3.0.0 (2018-02-12) +================== + +Features +-------- + +- Speed up the `PayloadWriter.write` method for large request bodies. (`#2126 <https://github.com/aio-libs/aiohttp/pull/2126>`_) +- StreamResponse and Response are now MutableMappings. (`#2246 <https://github.com/aio-libs/aiohttp/pull/2246>`_) +- ClientSession publishes a set of signals to track the HTTP request execution. + (`#2313 <https://github.com/aio-libs/aiohttp/pull/2313>`_) +- Content-Disposition fast access in ClientResponse (`#2455 <https://github.com/aio-libs/aiohttp/pull/2455>`_) +- Added support to Flask-style decorators with class-based Views. (`#2472 <https://github.com/aio-libs/aiohttp/pull/2472>`_) +- Signal handlers (registered callbacks) should be coroutines. (`#2480 <https://github.com/aio-libs/aiohttp/pull/2480>`_) +- Support ``async with test_client.ws_connect(...)`` (`#2525 <https://github.com/aio-libs/aiohttp/pull/2525>`_) +- Introduce *site* and *application runner* as underlying API for `web.run_app` + implementation. (`#2530 <https://github.com/aio-libs/aiohttp/pull/2530>`_) +- Only quote multipart boundary when necessary and sanitize input (`#2544 <https://github.com/aio-libs/aiohttp/pull/2544>`_) +- Make the `aiohttp.ClientResponse.get_encoding` method public with the + processing of invalid charset while detecting content encoding. (`#2549 <https://github.com/aio-libs/aiohttp/pull/2549>`_) +- Add optional configurable per message compression for + `ClientWebSocketResponse` and `WebSocketResponse`. (`#2551 <https://github.com/aio-libs/aiohttp/pull/2551>`_) +- Add hysteresis to `StreamReader` to prevent flipping between paused and + resumed states too often. (`#2555 <https://github.com/aio-libs/aiohttp/pull/2555>`_) +- Support `.netrc` by `trust_env` (`#2581 <https://github.com/aio-libs/aiohttp/pull/2581>`_) +- Avoid to create a new resource when adding a route with the same name and + path of the last added resource (`#2586 <https://github.com/aio-libs/aiohttp/pull/2586>`_) +- `MultipartWriter.boundary` is `str` now. (`#2589 <https://github.com/aio-libs/aiohttp/pull/2589>`_) +- Allow a custom port to be used by `TestServer` (and associated pytest + fixtures) (`#2613 <https://github.com/aio-libs/aiohttp/pull/2613>`_) +- Add param access_log_class to web.run_app function (`#2615 <https://github.com/aio-libs/aiohttp/pull/2615>`_) +- Add ``ssl`` parameter to client API (`#2626 <https://github.com/aio-libs/aiohttp/pull/2626>`_) +- Fixes performance issue introduced by #2577. When there are no middlewares + installed by the user, no additional and useless code is executed. (`#2629 <https://github.com/aio-libs/aiohttp/pull/2629>`_) +- Rename PayloadWriter to StreamWriter (`#2654 <https://github.com/aio-libs/aiohttp/pull/2654>`_) +- New options *reuse_port*, *reuse_address* are added to `run_app` and + `TCPSite`. (`#2679 <https://github.com/aio-libs/aiohttp/pull/2679>`_) +- Use custom classes to pass client signals parameters (`#2686 <https://github.com/aio-libs/aiohttp/pull/2686>`_) +- Use ``attrs`` library for data classes, replace `namedtuple`. (`#2690 <https://github.com/aio-libs/aiohttp/pull/2690>`_) +- Pytest fixtures renaming, add ``aiohttp_`` prefix (`#2578 <https://github.com/aio-libs/aiohttp/pull/2578>`_) +- Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line + parameters (`#2578 <https://github.com/aio-libs/aiohttp/pull/2578>`_) + +Bugfixes +-------- + +- Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not + support HTTP2 yet, the protocol is not upgraded but response is handled + correctly. (`#2277 <https://github.com/aio-libs/aiohttp/pull/2277>`_) +- Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy + connector (`#2408 <https://github.com/aio-libs/aiohttp/pull/2408>`_) +- Fix connector convert OSError to ClientConnectorError (`#2423 <https://github.com/aio-libs/aiohttp/pull/2423>`_) +- Fix connection attempts for multiple dns hosts (`#2424 <https://github.com/aio-libs/aiohttp/pull/2424>`_) +- Fix writing to closed transport by raising `asyncio.CancelledError` (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_) +- Fix warning in `ClientSession.__del__` by stopping to try to close it. + (`#2523 <https://github.com/aio-libs/aiohttp/pull/2523>`_) +- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 <https://github.com/aio-libs/aiohttp/pull/2620>`_) +- Fix default value of `access_log_format` argument in `web.run_app` (`#2649 <https://github.com/aio-libs/aiohttp/pull/2649>`_) +- Freeze sub-application on adding to parent app (`#2656 <https://github.com/aio-libs/aiohttp/pull/2656>`_) +- Do percent encoding for `.url_for()` parameters (`#2668 <https://github.com/aio-libs/aiohttp/pull/2668>`_) +- Correctly process request start time and multiple request/response + headers in access log extra (`#2641 <https://github.com/aio-libs/aiohttp/pull/2641>`_) + +Improved Documentation +---------------------- + +- Improve tutorial docs, using `literalinclude` to link to the actual files. + (`#2396 <https://github.com/aio-libs/aiohttp/pull/2396>`_) +- Small improvement docs: better example for file uploads. (`#2401 <https://github.com/aio-libs/aiohttp/pull/2401>`_) +- Rename `from_env` to `trust_env` in client reference. (`#2451 <https://github.com/aio-libs/aiohttp/pull/2451>`_) +- Fixed mistype in `Proxy Support` section where `trust_env` parameter was + used in `session.get("http://python.org", trust_env=True)` method instead of + aiohttp.ClientSession constructor as follows: + `aiohttp.ClientSession(trust_env=True)`. (`#2688 <https://github.com/aio-libs/aiohttp/pull/2688>`_) +- Fix issue with unittest example not compiling in testing docs. (`#2717 <https://github.com/aio-libs/aiohttp/pull/2717>`_) + +Deprecations and Removals +------------------------- + +- Simplify HTTP pipelining implementation (`#2109 <https://github.com/aio-libs/aiohttp/pull/2109>`_) +- Drop `StreamReaderPayload` and `DataQueuePayload`. (`#2257 <https://github.com/aio-libs/aiohttp/pull/2257>`_) +- Drop `md5` and `sha1` finger-prints (`#2267 <https://github.com/aio-libs/aiohttp/pull/2267>`_) +- Drop WSMessage.tp (`#2321 <https://github.com/aio-libs/aiohttp/pull/2321>`_) +- Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python + versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. + (`#2343 <https://github.com/aio-libs/aiohttp/pull/2343>`_) +- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (`#2348 <https://github.com/aio-libs/aiohttp/pull/2348>`_) +- Drop `resolve` param from TCPConnector. (`#2377 <https://github.com/aio-libs/aiohttp/pull/2377>`_) +- Add DeprecationWarning for returning HTTPException (`#2415 <https://github.com/aio-libs/aiohttp/pull/2415>`_) +- `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are + genuine async functions now. (`#2475 <https://github.com/aio-libs/aiohttp/pull/2475>`_) +- Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal + handlers should be coroutines, support for regular functions is dropped. + (`#2480 <https://github.com/aio-libs/aiohttp/pull/2480>`_) +- `StreamResponse.drain()` is not a part of public API anymore, just use `await + StreamResponse.write()`. `StreamResponse.write` is converted to async + function. (`#2483 <https://github.com/aio-libs/aiohttp/pull/2483>`_) +- Drop deprecated `slow_request_timeout` param and `**kwargs`` from + `RequestHandler`. (`#2500 <https://github.com/aio-libs/aiohttp/pull/2500>`_) +- Drop deprecated `resource.url()`. (`#2501 <https://github.com/aio-libs/aiohttp/pull/2501>`_) +- Remove `%u` and `%l` format specifiers from access log format. (`#2506 <https://github.com/aio-libs/aiohttp/pull/2506>`_) +- Drop deprecated `request.GET` property. (`#2547 <https://github.com/aio-libs/aiohttp/pull/2547>`_) +- Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, + merge `FlowControlStreamReader` functionality into `StreamReader`, drop + `FlowControlStreamReader` name. (`#2555 <https://github.com/aio-libs/aiohttp/pull/2555>`_) +- Do not create a new resource on `router.add_get(..., allow_head=True)` + (`#2585 <https://github.com/aio-libs/aiohttp/pull/2585>`_) +- Drop access to TCP tuning options from PayloadWriter and Response classes + (`#2604 <https://github.com/aio-libs/aiohttp/pull/2604>`_) +- Drop deprecated `encoding` parameter from client API (`#2606 <https://github.com/aio-libs/aiohttp/pull/2606>`_) +- Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in + client API (`#2626 <https://github.com/aio-libs/aiohttp/pull/2626>`_) +- Get rid of the legacy class StreamWriter. (`#2651 <https://github.com/aio-libs/aiohttp/pull/2651>`_) +- Forbid non-strings in `resource.url_for()` parameters. (`#2668 <https://github.com/aio-libs/aiohttp/pull/2668>`_) +- Deprecate inheritance from ``ClientSession`` and ``web.Application`` and + custom user attributes for ``ClientSession``, ``web.Request`` and + ``web.Application`` (`#2691 <https://github.com/aio-libs/aiohttp/pull/2691>`_) +- Drop `resp = await aiohttp.request(...)` syntax for sake of `async with + aiohttp.request(...) as resp:`. (`#2540 <https://github.com/aio-libs/aiohttp/pull/2540>`_) +- Forbid synchronous context managers for `ClientSession` and test + server/client. (`#2362 <https://github.com/aio-libs/aiohttp/pull/2362>`_) + + +Misc +---- + +- #2552 + + + +---- + + +2.3.10 (2018-02-02) +=================== + +- Fix 100% CPU usage on HTTP GET and websocket connection just after it (`#1955 <https://github.com/aio-libs/aiohttp/pull/1955>`_) +- Patch broken `ssl.match_hostname()` on Python<3.7 (`#2674 <https://github.com/aio-libs/aiohttp/pull/2674>`_) + + + +---- + +2.3.9 (2018-01-16) +================== + +- Fix colon handing in path for dynamic resources (`#2670 <https://github.com/aio-libs/aiohttp/pull/2670>`_) + + + +---- + +2.3.8 (2018-01-15) +================== + +- Do not use `yarl.unquote` internal function in aiohttp. Fix + incorrectly unquoted path part in URL dispatcher (`#2662 <https://github.com/aio-libs/aiohttp/pull/2662>`_) +- Fix compatibility with `yarl==1.0.0` (`#2662 <https://github.com/aio-libs/aiohttp/pull/2662>`_) + + + +---- + +2.3.7 (2017-12-27) +================== + +- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 <https://github.com/aio-libs/aiohttp/pull/2620>`_) +- Fix docstring for request.host (`#2591 <https://github.com/aio-libs/aiohttp/pull/2591>`_) +- Fix docstring for request.remote (`#2592 <https://github.com/aio-libs/aiohttp/pull/2592>`_) + + + +---- + + +2.3.6 (2017-12-04) +================== + +- Correct `request.app` context (for handlers not just middlewares). (`#2577 <https://github.com/aio-libs/aiohttp/pull/2577>`_) + + + +---- + + +2.3.5 (2017-11-30) +================== + +- Fix compatibility with `pytest` 3.3+ (`#2565 <https://github.com/aio-libs/aiohttp/pull/2565>`_) + + + +---- + + +2.3.4 (2017-11-29) +================== + +- Make `request.app` point to proper application instance when using nested + applications (with middlewares). (`#2550 <https://github.com/aio-libs/aiohttp/pull/2550>`_) +- Change base class of ClientConnectorSSLError to ClientSSLError from + ClientConnectorError. (`#2563 <https://github.com/aio-libs/aiohttp/pull/2563>`_) +- Return client connection back to free pool on error in `connector.connect()`. + (`#2567 <https://github.com/aio-libs/aiohttp/pull/2567>`_) + + + +---- + + +2.3.3 (2017-11-17) +================== + +- Having a `;` in Response content type does not assume it contains a charset + anymore. (`#2197 <https://github.com/aio-libs/aiohttp/pull/2197>`_) +- Use `getattr(asyncio, 'async')` for keeping compatibility with Python 3.7. + (`#2476 <https://github.com/aio-libs/aiohttp/pull/2476>`_) +- Ignore `NotImplementedError` raised by `set_child_watcher` from `uvloop`. + (`#2491 <https://github.com/aio-libs/aiohttp/pull/2491>`_) +- Fix warning in `ClientSession.__del__` by stopping to try to close it. + (`#2523 <https://github.com/aio-libs/aiohttp/pull/2523>`_) +- Fixed typo's in Third-party libraries page. And added async-v20 to the list + (`#2510 <https://github.com/aio-libs/aiohttp/pull/2510>`_) + + + +---- + + +2.3.2 (2017-11-01) +================== + +- Fix passing client max size on cloning request obj. (`#2385 <https://github.com/aio-libs/aiohttp/pull/2385>`_) +- Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy + connector. (`#2408 <https://github.com/aio-libs/aiohttp/pull/2408>`_) +- Drop generated `_http_parser` shared object from tarball distribution. (`#2414 <https://github.com/aio-libs/aiohttp/pull/2414>`_) +- Fix connector convert OSError to ClientConnectorError. (`#2423 <https://github.com/aio-libs/aiohttp/pull/2423>`_) +- Fix connection attempts for multiple dns hosts. (`#2424 <https://github.com/aio-libs/aiohttp/pull/2424>`_) +- Fix ValueError for AF_INET6 sockets if a preexisting INET6 socket to the + `aiohttp.web.run_app` function. (`#2431 <https://github.com/aio-libs/aiohttp/pull/2431>`_) +- `_SessionRequestContextManager` closes the session properly now. (`#2441 <https://github.com/aio-libs/aiohttp/pull/2441>`_) +- Rename `from_env` to `trust_env` in client reference. (`#2451 <https://github.com/aio-libs/aiohttp/pull/2451>`_) + + + +---- + + +2.3.1 (2017-10-18) +================== + +- Relax attribute lookup in warning about old-styled middleware (`#2340 <https://github.com/aio-libs/aiohttp/pull/2340>`_) + + + +---- + + +2.3.0 (2017-10-18) +================== + +Features +-------- + +- Add SSL related params to `ClientSession.request` (`#1128 <https://github.com/aio-libs/aiohttp/pull/1128>`_) +- Make enable_compression work on HTTP/1.0 (`#1828 <https://github.com/aio-libs/aiohttp/pull/1828>`_) +- Deprecate registering synchronous web handlers (`#1993 <https://github.com/aio-libs/aiohttp/pull/1993>`_) +- Switch to `multidict 3.0`. All HTTP headers preserve casing now but compared + in case-insensitive way. (`#1994 <https://github.com/aio-libs/aiohttp/pull/1994>`_) +- Improvement for `normalize_path_middleware`. Added possibility to handle URLs + with query string. (`#1995 <https://github.com/aio-libs/aiohttp/pull/1995>`_) +- Use towncrier for CHANGES.txt build (`#1997 <https://github.com/aio-libs/aiohttp/pull/1997>`_) +- Implement `trust_env=True` param in `ClientSession`. (`#1998 <https://github.com/aio-libs/aiohttp/pull/1998>`_) +- Added variable to customize proxy headers (`#2001 <https://github.com/aio-libs/aiohttp/pull/2001>`_) +- Implement `router.add_routes` and router decorators. (`#2004 <https://github.com/aio-libs/aiohttp/pull/2004>`_) +- Deprecated `BaseRequest.has_body` in favor of + `BaseRequest.can_read_body` Added `BaseRequest.body_exists` + attribute that stays static for the lifetime of the request (`#2005 <https://github.com/aio-libs/aiohttp/pull/2005>`_) +- Provide `BaseRequest.loop` attribute (`#2024 <https://github.com/aio-libs/aiohttp/pull/2024>`_) +- Make `_CoroGuard` awaitable and fix `ClientSession.close` warning message + (`#2026 <https://github.com/aio-libs/aiohttp/pull/2026>`_) +- Responses to redirects without Location header are returned instead of + raising a RuntimeError (`#2030 <https://github.com/aio-libs/aiohttp/pull/2030>`_) +- Added `get_client`, `get_server`, `setUpAsync` and `tearDownAsync` methods to + AioHTTPTestCase (`#2032 <https://github.com/aio-libs/aiohttp/pull/2032>`_) +- Add automatically a SafeChildWatcher to the test loop (`#2058 <https://github.com/aio-libs/aiohttp/pull/2058>`_) +- add ability to disable automatic response decompression (`#2110 <https://github.com/aio-libs/aiohttp/pull/2110>`_) +- Add support for throttling DNS request, avoiding the requests saturation when + there is a miss in the DNS cache and many requests getting into the connector + at the same time. (`#2111 <https://github.com/aio-libs/aiohttp/pull/2111>`_) +- Use request for getting access log information instead of message/transport + pair. Add `RequestBase.remote` property for accessing to IP of client + initiated HTTP request. (`#2123 <https://github.com/aio-libs/aiohttp/pull/2123>`_) +- json() raises a ContentTypeError exception if the content-type does not meet + the requirements instead of raising a generic ClientResponseError. (`#2136 <https://github.com/aio-libs/aiohttp/pull/2136>`_) +- Make the HTTP client able to return HTTP chunks when chunked transfer + encoding is used. (`#2150 <https://github.com/aio-libs/aiohttp/pull/2150>`_) +- add `append_version` arg into `StaticResource.url` and + `StaticResource.url_for` methods for getting an url with hash (version) of + the file. (`#2157 <https://github.com/aio-libs/aiohttp/pull/2157>`_) +- Fix parsing the Forwarded header. * commas and semicolons are allowed inside + quoted-strings; * empty forwarded-pairs (as in for=_1;;by=_2) are allowed; * + non-standard parameters are allowed (although this alone could be easily done + in the previous parser). (`#2173 <https://github.com/aio-libs/aiohttp/pull/2173>`_) +- Don't require ssl module to run. aiohttp does not require SSL to function. + The code paths involved with SSL will only be hit upon SSL usage. Raise + `RuntimeError` if HTTPS protocol is required but ssl module is not present. + (`#2221 <https://github.com/aio-libs/aiohttp/pull/2221>`_) +- Accept coroutine fixtures in pytest plugin (`#2223 <https://github.com/aio-libs/aiohttp/pull/2223>`_) +- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (`#2227 <https://github.com/aio-libs/aiohttp/pull/2227>`_) +- Speed up Signals when there are no receivers (`#2229 <https://github.com/aio-libs/aiohttp/pull/2229>`_) +- Raise `InvalidURL` instead of `ValueError` on fetches with invalid URL. + (`#2241 <https://github.com/aio-libs/aiohttp/pull/2241>`_) +- Move `DummyCookieJar` into `cookiejar.py` (`#2242 <https://github.com/aio-libs/aiohttp/pull/2242>`_) +- `run_app`: Make `print=None` disable printing (`#2260 <https://github.com/aio-libs/aiohttp/pull/2260>`_) +- Support `brotli` encoding (generic-purpose lossless compression algorithm) + (`#2270 <https://github.com/aio-libs/aiohttp/pull/2270>`_) +- Add server support for WebSockets Per-Message Deflate. Add client option to + add deflate compress header in WebSockets request header. If calling + ClientSession.ws_connect() with `compress=15` the client will support deflate + compress negotiation. (`#2273 <https://github.com/aio-libs/aiohttp/pull/2273>`_) +- Support `verify_ssl`, `fingerprint`, `ssl_context` and `proxy_headers` by + `client.ws_connect`. (`#2292 <https://github.com/aio-libs/aiohttp/pull/2292>`_) +- Added `aiohttp.ClientConnectorSSLError` when connection fails due + `ssl.SSLError` (`#2294 <https://github.com/aio-libs/aiohttp/pull/2294>`_) +- `aiohttp.web.Application.make_handler` support `access_log_class` (`#2315 <https://github.com/aio-libs/aiohttp/pull/2315>`_) +- Build HTTP parser extension in non-strict mode by default. (`#2332 <https://github.com/aio-libs/aiohttp/pull/2332>`_) + + +Bugfixes +-------- + +- Clear auth information on redirecting to other domain (`#1699 <https://github.com/aio-libs/aiohttp/pull/1699>`_) +- Fix missing app.loop on startup hooks during tests (`#2060 <https://github.com/aio-libs/aiohttp/pull/2060>`_) +- Fix issue with synchronous session closing when using `ClientSession` as an + asynchronous context manager. (`#2063 <https://github.com/aio-libs/aiohttp/pull/2063>`_) +- Fix issue with `CookieJar` incorrectly expiring cookies in some edge cases. + (`#2084 <https://github.com/aio-libs/aiohttp/pull/2084>`_) +- Force use of IPv4 during test, this will make tests run in a Docker container + (`#2104 <https://github.com/aio-libs/aiohttp/pull/2104>`_) +- Warnings about unawaited coroutines now correctly point to the user's code. + (`#2106 <https://github.com/aio-libs/aiohttp/pull/2106>`_) +- Fix issue with `IndexError` being raised by the `StreamReader.iter_chunks()` + generator. (`#2112 <https://github.com/aio-libs/aiohttp/pull/2112>`_) +- Support HTTP 308 Permanent redirect in client class. (`#2114 <https://github.com/aio-libs/aiohttp/pull/2114>`_) +- Fix `FileResponse` sending empty chunked body on 304. (`#2143 <https://github.com/aio-libs/aiohttp/pull/2143>`_) +- Do not add `Content-Length: 0` to GET/HEAD/TRACE/OPTIONS requests by default. + (`#2167 <https://github.com/aio-libs/aiohttp/pull/2167>`_) +- Fix parsing the Forwarded header according to RFC 7239. (`#2170 <https://github.com/aio-libs/aiohttp/pull/2170>`_) +- Securely determining remote/scheme/host #2171 (`#2171 <https://github.com/aio-libs/aiohttp/pull/2171>`_) +- Fix header name parsing, if name is split into multiple lines (`#2183 <https://github.com/aio-libs/aiohttp/pull/2183>`_) +- Handle session close during connection, `KeyError: + <aiohttp.connector._TransportPlaceholder>` (`#2193 <https://github.com/aio-libs/aiohttp/pull/2193>`_) +- Fixes uncaught `TypeError` in `helpers.guess_filename` if `name` is not a + string (`#2201 <https://github.com/aio-libs/aiohttp/pull/2201>`_) +- Raise OSError on async DNS lookup if resolved domain is an alias for another + one, which does not have an A or CNAME record. (`#2231 <https://github.com/aio-libs/aiohttp/pull/2231>`_) +- Fix incorrect warning in `StreamReader`. (`#2251 <https://github.com/aio-libs/aiohttp/pull/2251>`_) +- Properly clone state of web request (`#2284 <https://github.com/aio-libs/aiohttp/pull/2284>`_) +- Fix C HTTP parser for cases when status line is split into different TCP + packets. (`#2311 <https://github.com/aio-libs/aiohttp/pull/2311>`_) +- Fix `web.FileResponse` overriding user supplied Content-Type (`#2317 <https://github.com/aio-libs/aiohttp/pull/2317>`_) + + +Improved Documentation +---------------------- + +- Add a note about possible performance degradation in `await resp.text()` if + charset was not provided by `Content-Type` HTTP header. Pass explicit + encoding to solve it. (`#1811 <https://github.com/aio-libs/aiohttp/pull/1811>`_) +- Drop `disqus` widget from documentation pages. (`#2018 <https://github.com/aio-libs/aiohttp/pull/2018>`_) +- Add a graceful shutdown section to the client usage documentation. (`#2039 <https://github.com/aio-libs/aiohttp/pull/2039>`_) +- Document `connector_owner` parameter. (`#2072 <https://github.com/aio-libs/aiohttp/pull/2072>`_) +- Update the doc of web.Application (`#2081 <https://github.com/aio-libs/aiohttp/pull/2081>`_) +- Fix mistake about access log disabling. (`#2085 <https://github.com/aio-libs/aiohttp/pull/2085>`_) +- Add example usage of on_startup and on_shutdown signals by creating and + disposing an aiopg connection engine. (`#2131 <https://github.com/aio-libs/aiohttp/pull/2131>`_) +- Document `encoded=True` for `yarl.URL`, it disables all yarl transformations. + (`#2198 <https://github.com/aio-libs/aiohttp/pull/2198>`_) +- Document that all app's middleware factories are run for every request. + (`#2225 <https://github.com/aio-libs/aiohttp/pull/2225>`_) +- Reflect the fact that default resolver is threaded one starting from aiohttp + 1.1 (`#2228 <https://github.com/aio-libs/aiohttp/pull/2228>`_) + + +Deprecations and Removals +------------------------- + +- Drop deprecated `Server.finish_connections` (`#2006 <https://github.com/aio-libs/aiohttp/pull/2006>`_) +- Drop %O format from logging, use %b instead. Drop %e format from logging, + environment variables are not supported anymore. (`#2123 <https://github.com/aio-libs/aiohttp/pull/2123>`_) +- Drop deprecated secure_proxy_ssl_header support (`#2171 <https://github.com/aio-libs/aiohttp/pull/2171>`_) +- Removed TimeService in favor of simple caching. TimeService also had a bug + where it lost about 0.5 seconds per second. (`#2176 <https://github.com/aio-libs/aiohttp/pull/2176>`_) +- Drop unused response_factory from static files API (`#2290 <https://github.com/aio-libs/aiohttp/pull/2290>`_) + + +Misc +---- + +- #2013, #2014, #2048, #2094, #2149, #2187, #2214, #2225, #2243, #2248 + + + +---- + + +2.2.5 (2017-08-03) +================== + +- Don't raise deprecation warning on + `loop.run_until_complete(client.close())` (`#2065 <https://github.com/aio-libs/aiohttp/pull/2065>`_) + + + +---- + +2.2.4 (2017-08-02) +================== + +- Fix issue with synchronous session closing when using ClientSession + as an asynchronous context manager. (`#2063 <https://github.com/aio-libs/aiohttp/pull/2063>`_) + + + +---- + +2.2.3 (2017-07-04) +================== + +- Fix `_CoroGuard` for python 3.4 + + + +---- + +2.2.2 (2017-07-03) +================== + +- Allow `await session.close()` along with `yield from session.close()` + + + +---- + + +2.2.1 (2017-07-02) +================== + +- Relax `yarl` requirement to 0.11+ +- Backport #2026: `session.close` *is* a coroutine (`#2029 <https://github.com/aio-libs/aiohttp/pull/2029>`_) + + + +---- + + +2.2.0 (2017-06-20) +================== + +- Add doc for add_head, update doc for add_get. (`#1944 <https://github.com/aio-libs/aiohttp/pull/1944>`_) +- Fixed consecutive calls for `Response.write_eof`. +- Retain method attributes (e.g. :code:`__doc__`) when registering synchronous + handlers for resources. (`#1953 <https://github.com/aio-libs/aiohttp/pull/1953>`_) +- Added signal TERM handling in `run_app` to gracefully exit (`#1932 <https://github.com/aio-libs/aiohttp/pull/1932>`_) +- Fix websocket issues caused by frame fragmentation. (`#1962 <https://github.com/aio-libs/aiohttp/pull/1962>`_) +- Raise RuntimeError is you try to set the Content Length and enable + chunked encoding at the same time (`#1941 <https://github.com/aio-libs/aiohttp/pull/1941>`_) +- Small update for `unittest_run_loop` +- Use CIMultiDict for ClientRequest.skip_auto_headers (`#1970 <https://github.com/aio-libs/aiohttp/pull/1970>`_) +- Fix wrong startup sequence: test server and `run_app()` are not raise + `DeprecationWarning` now (`#1947 <https://github.com/aio-libs/aiohttp/pull/1947>`_) +- Make sure cleanup signal is sent if startup signal has been sent (`#1959 <https://github.com/aio-libs/aiohttp/pull/1959>`_) +- Fixed server keep-alive handler, could cause 100% cpu utilization (`#1955 <https://github.com/aio-libs/aiohttp/pull/1955>`_) +- Connection can be destroyed before response get processed if + `await aiohttp.request(..)` is used (`#1981 <https://github.com/aio-libs/aiohttp/pull/1981>`_) +- MultipartReader does not work with -OO (`#1969 <https://github.com/aio-libs/aiohttp/pull/1969>`_) +- Fixed `ClientPayloadError` with blank `Content-Encoding` header (`#1931 <https://github.com/aio-libs/aiohttp/pull/1931>`_) +- Support `deflate` encoding implemented in `httpbin.org/deflate` (`#1918 <https://github.com/aio-libs/aiohttp/pull/1918>`_) +- Fix BadStatusLine caused by extra `CRLF` after `POST` data (`#1792 <https://github.com/aio-libs/aiohttp/pull/1792>`_) +- Keep a reference to `ClientSession` in response object (`#1985 <https://github.com/aio-libs/aiohttp/pull/1985>`_) +- Deprecate undocumented `app.on_loop_available` signal (`#1978 <https://github.com/aio-libs/aiohttp/pull/1978>`_) + + + +---- + + + +2.1.0 (2017-05-26) +================== + +- Added support for experimental `async-tokio` event loop written in Rust + https://github.com/PyO3/tokio +- Write to transport ``\r\n`` before closing after keepalive timeout, + otherwise client can not detect socket disconnection. (`#1883 <https://github.com/aio-libs/aiohttp/pull/1883>`_) +- Only call `loop.close` in `run_app` if the user did *not* supply a loop. + Useful for allowing clients to specify their own cleanup before closing the + asyncio loop if they wish to tightly control loop behavior +- Content disposition with semicolon in filename (`#917 <https://github.com/aio-libs/aiohttp/pull/917>`_) +- Added `request_info` to response object and `ClientResponseError`. (`#1733 <https://github.com/aio-libs/aiohttp/pull/1733>`_) +- Added `history` to `ClientResponseError`. (`#1741 <https://github.com/aio-libs/aiohttp/pull/1741>`_) +- Allow to disable redirect url re-quoting (`#1474 <https://github.com/aio-libs/aiohttp/pull/1474>`_) +- Handle RuntimeError from transport (`#1790 <https://github.com/aio-libs/aiohttp/pull/1790>`_) +- Dropped "%O" in access logger (`#1673 <https://github.com/aio-libs/aiohttp/pull/1673>`_) +- Added `args` and `kwargs` to `unittest_run_loop`. Useful with other + decorators, for example `@patch`. (`#1803 <https://github.com/aio-libs/aiohttp/pull/1803>`_) +- Added `iter_chunks` to response.content object. (`#1805 <https://github.com/aio-libs/aiohttp/pull/1805>`_) +- Avoid creating TimerContext when there is no timeout to allow + compatibility with Tornado. (`#1817 <https://github.com/aio-libs/aiohttp/pull/1817>`_) (`#1180 <https://github.com/aio-libs/aiohttp/pull/1180>`_) +- Add `proxy_from_env` to `ClientRequest` to read from environment + variables. (`#1791 <https://github.com/aio-libs/aiohttp/pull/1791>`_) +- Add DummyCookieJar helper. (`#1830 <https://github.com/aio-libs/aiohttp/pull/1830>`_) +- Fix assertion errors in Python 3.4 from noop helper. (`#1847 <https://github.com/aio-libs/aiohttp/pull/1847>`_) +- Do not unquote `+` in match_info values (`#1816 <https://github.com/aio-libs/aiohttp/pull/1816>`_) +- Use Forwarded, X-Forwarded-Scheme and X-Forwarded-Host for better scheme and + host resolution. (`#1134 <https://github.com/aio-libs/aiohttp/pull/1134>`_) +- Fix sub-application middlewares resolution order (`#1853 <https://github.com/aio-libs/aiohttp/pull/1853>`_) +- Fix applications comparison (`#1866 <https://github.com/aio-libs/aiohttp/pull/1866>`_) +- Fix static location in index when prefix is used (`#1662 <https://github.com/aio-libs/aiohttp/pull/1662>`_) +- Make test server more reliable (`#1896 <https://github.com/aio-libs/aiohttp/pull/1896>`_) +- Extend list of web exceptions, add HTTPUnprocessableEntity, + HTTPFailedDependency, HTTPInsufficientStorage status codes (`#1920 <https://github.com/aio-libs/aiohttp/pull/1920>`_) + + + +---- + + +2.0.7 (2017-04-12) +================== + +- Fix *pypi* distribution +- Fix exception description (`#1807 <https://github.com/aio-libs/aiohttp/pull/1807>`_) +- Handle socket error in FileResponse (`#1773 <https://github.com/aio-libs/aiohttp/pull/1773>`_) +- Cancel websocket heartbeat on close (`#1793 <https://github.com/aio-libs/aiohttp/pull/1793>`_) + + + +---- + + +2.0.6 (2017-04-04) +================== + +- Keeping blank values for `request.post()` and `multipart.form()` (`#1765 <https://github.com/aio-libs/aiohttp/pull/1765>`_) +- TypeError in data_received of ResponseHandler (`#1770 <https://github.com/aio-libs/aiohttp/pull/1770>`_) +- Fix ``web.run_app`` not to bind to default host-port pair if only socket is + passed (`#1786 <https://github.com/aio-libs/aiohttp/pull/1786>`_) + + + +---- + + +2.0.5 (2017-03-29) +================== + +- Memory leak with aiohttp.request (`#1756 <https://github.com/aio-libs/aiohttp/pull/1756>`_) +- Disable cleanup closed ssl transports by default. +- Exception in request handling if the server responds before the body + is sent (`#1761 <https://github.com/aio-libs/aiohttp/pull/1761>`_) + + + +---- + + +2.0.4 (2017-03-27) +================== + +- Memory leak with aiohttp.request (`#1756 <https://github.com/aio-libs/aiohttp/pull/1756>`_) +- Encoding is always UTF-8 in POST data (`#1750 <https://github.com/aio-libs/aiohttp/pull/1750>`_) +- Do not add "Content-Disposition" header by default (`#1755 <https://github.com/aio-libs/aiohttp/pull/1755>`_) + + + +---- + + +2.0.3 (2017-03-24) +================== + +- Call https website through proxy will cause error (`#1745 <https://github.com/aio-libs/aiohttp/pull/1745>`_) +- Fix exception on multipart/form-data post if content-type is not set (`#1743 <https://github.com/aio-libs/aiohttp/pull/1743>`_) + + + +---- + + +2.0.2 (2017-03-21) +================== + +- Fixed Application.on_loop_available signal (`#1739 <https://github.com/aio-libs/aiohttp/pull/1739>`_) +- Remove debug code + + + +---- + + +2.0.1 (2017-03-21) +================== + +- Fix allow-head to include name on route (`#1737 <https://github.com/aio-libs/aiohttp/pull/1737>`_) +- Fixed AttributeError in WebSocketResponse.can_prepare (`#1736 <https://github.com/aio-libs/aiohttp/pull/1736>`_) + + + +---- + + +2.0.0 (2017-03-20) +================== + +- Added `json` to `ClientSession.request()` method (`#1726 <https://github.com/aio-libs/aiohttp/pull/1726>`_) +- Added session's `raise_for_status` parameter, automatically calls + raise_for_status() on any request. (`#1724 <https://github.com/aio-libs/aiohttp/pull/1724>`_) +- `response.json()` raises `ClientResponseError` exception if response's + content type does not match (`#1723 <https://github.com/aio-libs/aiohttp/pull/1723>`_) + - Cleanup timer and loop handle on any client exception. +- Deprecate `loop` parameter for Application's constructor +- Properly handle payload errors (`#1710 <https://github.com/aio-libs/aiohttp/pull/1710>`_) +- Added `ClientWebSocketResponse.get_extra_info()` (`#1717 <https://github.com/aio-libs/aiohttp/pull/1717>`_) +- It is not possible to combine Transfer-Encoding and chunked parameter, + same for compress and Content-Encoding (`#1655 <https://github.com/aio-libs/aiohttp/pull/1655>`_) +- Connector's `limit` parameter indicates total concurrent connections. + New `limit_per_host` added, indicates total connections per endpoint. (`#1601 <https://github.com/aio-libs/aiohttp/pull/1601>`_) +- Use url's `raw_host` for name resolution (`#1685 <https://github.com/aio-libs/aiohttp/pull/1685>`_) +- Change `ClientResponse.url` to `yarl.URL` instance (`#1654 <https://github.com/aio-libs/aiohttp/pull/1654>`_) +- Add max_size parameter to web.Request reading methods (`#1133 <https://github.com/aio-libs/aiohttp/pull/1133>`_) +- Web Request.post() stores data in temp files (`#1469 <https://github.com/aio-libs/aiohttp/pull/1469>`_) +- Add the `allow_head=True` keyword argument for `add_get` (`#1618 <https://github.com/aio-libs/aiohttp/pull/1618>`_) +- `run_app` and the Command Line Interface now support serving over + Unix domain sockets for faster inter-process communication. +- `run_app` now supports passing a preexisting socket object. This can be useful + e.g. for socket-based activated applications, when binding of a socket is + done by the parent process. +- Implementation for Trailer headers parser is broken (`#1619 <https://github.com/aio-libs/aiohttp/pull/1619>`_) +- Fix FileResponse to not fall on bad request (range out of file size) +- Fix FileResponse to correct stream video to Chromes +- Deprecate public low-level api (`#1657 <https://github.com/aio-libs/aiohttp/pull/1657>`_) +- Deprecate `encoding` parameter for ClientSession.request() method +- Dropped aiohttp.wsgi (`#1108 <https://github.com/aio-libs/aiohttp/pull/1108>`_) +- Dropped `version` from ClientSession.request() method +- Dropped websocket version 76 support (`#1160 <https://github.com/aio-libs/aiohttp/pull/1160>`_) +- Dropped: `aiohttp.protocol.HttpPrefixParser` (`#1590 <https://github.com/aio-libs/aiohttp/pull/1590>`_) +- Dropped: Servers response's `.started`, `.start()` and + `.can_start()` method (`#1591 <https://github.com/aio-libs/aiohttp/pull/1591>`_) +- Dropped: Adding `sub app` via `app.router.add_subapp()` is deprecated + use `app.add_subapp()` instead (`#1592 <https://github.com/aio-libs/aiohttp/pull/1592>`_) +- Dropped: `Application.finish()` and `Application.register_on_finish()` (`#1602 <https://github.com/aio-libs/aiohttp/pull/1602>`_) +- Dropped: `web.Request.GET` and `web.Request.POST` +- Dropped: aiohttp.get(), aiohttp.options(), aiohttp.head(), + aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and + aiohttp.ws_connect() (`#1593 <https://github.com/aio-libs/aiohttp/pull/1593>`_) +- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (`#1605 <https://github.com/aio-libs/aiohttp/pull/1605>`_) +- Dropped: `ServerHttpProtocol.keep_alive_timeout` attribute and + `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (`#1606 <https://github.com/aio-libs/aiohttp/pull/1606>`_) +- Dropped: `TCPConnector's`` `.resolve`, `.resolved_hosts`, + `.clear_resolved_hosts()` attributes and `resolve` constructor + parameter (`#1607 <https://github.com/aio-libs/aiohttp/pull/1607>`_) +- Dropped `ProxyConnector` (`#1609 <https://github.com/aio-libs/aiohttp/pull/1609>`_) + + + +---- + + +1.3.5 (2017-03-16) +================== + +- Fixed None timeout support (`#1720 <https://github.com/aio-libs/aiohttp/pull/1720>`_) + + + +---- + + +1.3.4 (2017-03-14) +================== + +- Revert timeout handling in client request +- Fix StreamResponse representation after eof +- Fix file_sender to not fall on bad request (range out of file size) +- Fix file_sender to correct stream video to Chromes +- Fix NotImplementedError server exception (`#1703 <https://github.com/aio-libs/aiohttp/pull/1703>`_) +- Clearer error message for URL without a host name. (`#1691 <https://github.com/aio-libs/aiohttp/pull/1691>`_) +- Silence deprecation warning in __repr__ (`#1690 <https://github.com/aio-libs/aiohttp/pull/1690>`_) +- IDN + HTTPS = `ssl.CertificateError` (`#1685 <https://github.com/aio-libs/aiohttp/pull/1685>`_) + + + +---- + + +1.3.3 (2017-02-19) +================== + +- Fixed memory leak in time service (`#1656 <https://github.com/aio-libs/aiohttp/pull/1656>`_) + + + +---- + + +1.3.2 (2017-02-16) +================== + +- Awaiting on WebSocketResponse.send_* does not work (`#1645 <https://github.com/aio-libs/aiohttp/pull/1645>`_) +- Fix multiple calls to client ws_connect when using a shared header + dict (`#1643 <https://github.com/aio-libs/aiohttp/pull/1643>`_) +- Make CookieJar.filter_cookies() accept plain string parameter. (`#1636 <https://github.com/aio-libs/aiohttp/pull/1636>`_) + + + +---- + + +1.3.1 (2017-02-09) +================== + +- Handle CLOSING in WebSocketResponse.__anext__ +- Fixed AttributeError 'drain' for server websocket handler (`#1613 <https://github.com/aio-libs/aiohttp/pull/1613>`_) + + + +---- + + +1.3.0 (2017-02-08) +================== + +- Multipart writer validates the data on append instead of on a + request send (`#920 <https://github.com/aio-libs/aiohttp/pull/920>`_) +- Multipart reader accepts multipart messages with or without their epilogue + to consistently handle valid and legacy behaviors (`#1526 <https://github.com/aio-libs/aiohttp/pull/1526>`_) (`#1581 <https://github.com/aio-libs/aiohttp/pull/1581>`_) +- Separate read + connect + request timeouts # 1523 +- Do not swallow Upgrade header (`#1587 <https://github.com/aio-libs/aiohttp/pull/1587>`_) +- Fix polls demo run application (`#1487 <https://github.com/aio-libs/aiohttp/pull/1487>`_) +- Ignore unknown 1XX status codes in client (`#1353 <https://github.com/aio-libs/aiohttp/pull/1353>`_) +- Fix sub-Multipart messages missing their headers on serialization (`#1525 <https://github.com/aio-libs/aiohttp/pull/1525>`_) +- Do not use readline when reading the content of a part + in the multipart reader (`#1535 <https://github.com/aio-libs/aiohttp/pull/1535>`_) +- Add optional flag for quoting `FormData` fields (`#916 <https://github.com/aio-libs/aiohttp/pull/916>`_) +- 416 Range Not Satisfiable if requested range end > file size (`#1588 <https://github.com/aio-libs/aiohttp/pull/1588>`_) +- Having a `:` or `@` in a route does not work (`#1552 <https://github.com/aio-libs/aiohttp/pull/1552>`_) +- Added `receive_timeout` timeout for websocket to receive complete + message. (`#1325 <https://github.com/aio-libs/aiohttp/pull/1325>`_) +- Added `heartbeat` parameter for websocket to automatically send + `ping` message. (`#1024 <https://github.com/aio-libs/aiohttp/pull/1024>`_) (`#777 <https://github.com/aio-libs/aiohttp/pull/777>`_) +- Remove `web.Application` dependency from `web.UrlDispatcher` (`#1510 <https://github.com/aio-libs/aiohttp/pull/1510>`_) +- Accepting back-pressure from slow websocket clients (`#1367 <https://github.com/aio-libs/aiohttp/pull/1367>`_) +- Do not pause transport during set_parser stage (`#1211 <https://github.com/aio-libs/aiohttp/pull/1211>`_) +- Lingering close does not terminate before timeout (`#1559 <https://github.com/aio-libs/aiohttp/pull/1559>`_) +- `setsockopt` may raise `OSError` exception if socket is closed already (`#1595 <https://github.com/aio-libs/aiohttp/pull/1595>`_) +- Lots of CancelledError when requests are interrupted (`#1565 <https://github.com/aio-libs/aiohttp/pull/1565>`_) +- Allow users to specify what should happen to decoding errors + when calling a responses `text()` method (`#1542 <https://github.com/aio-libs/aiohttp/pull/1542>`_) +- Back port std module `http.cookies` for python3.4.2 (`#1566 <https://github.com/aio-libs/aiohttp/pull/1566>`_) +- Maintain url's fragment in client response (`#1314 <https://github.com/aio-libs/aiohttp/pull/1314>`_) +- Allow concurrently close WebSocket connection (`#754 <https://github.com/aio-libs/aiohttp/pull/754>`_) +- Gzipped responses with empty body raises ContentEncodingError (`#609 <https://github.com/aio-libs/aiohttp/pull/609>`_) +- Return 504 if request handle raises TimeoutError. +- Refactor how we use keep-alive and close lingering timeouts. +- Close response connection if we can not consume whole http + message during client response release +- Abort closed ssl client transports, broken servers can keep socket + open un-limit time (`#1568 <https://github.com/aio-libs/aiohttp/pull/1568>`_) +- Log warning instead of `RuntimeError` is websocket connection is closed. +- Deprecated: `aiohttp.protocol.HttpPrefixParser` + will be removed in 1.4 (`#1590 <https://github.com/aio-libs/aiohttp/pull/1590>`_) +- Deprecated: Servers response's `.started`, `.start()` and + `.can_start()` method will be removed in 1.4 (`#1591 <https://github.com/aio-libs/aiohttp/pull/1591>`_) +- Deprecated: Adding `sub app` via `app.router.add_subapp()` is deprecated + use `app.add_subapp()` instead, will be removed in 1.4 (`#1592 <https://github.com/aio-libs/aiohttp/pull/1592>`_) +- Deprecated: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), + aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect() + will be removed in 1.4 (`#1593 <https://github.com/aio-libs/aiohttp/pull/1593>`_) +- Deprecated: `Application.finish()` and `Application.register_on_finish()` + will be removed in 1.4 (`#1602 <https://github.com/aio-libs/aiohttp/pull/1602>`_) + + + +---- + + +1.2.0 (2016-12-17) +================== + +- Extract `BaseRequest` from `web.Request`, introduce `web.Server` + (former `RequestHandlerFactory`), introduce new low-level web server + which is not coupled with `web.Application` and routing (`#1362 <https://github.com/aio-libs/aiohttp/pull/1362>`_) +- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 <https://github.com/aio-libs/aiohttp/pull/1389>`_) +- Implement range requests for static files (`#1382 <https://github.com/aio-libs/aiohttp/pull/1382>`_) +- Support task attribute for StreamResponse (`#1410 <https://github.com/aio-libs/aiohttp/pull/1410>`_) +- Drop `TestClient.app` property, use `TestClient.server.app` instead + (BACKWARD INCOMPATIBLE) +- Drop `TestClient.handler` property, use `TestClient.server.handler` instead + (BACKWARD INCOMPATIBLE) +- `TestClient.server` property returns a test server instance, was + `asyncio.AbstractServer` (BACKWARD INCOMPATIBLE) +- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (`#1201 <https://github.com/aio-libs/aiohttp/pull/1201>`_) +- Call worker_int and worker_abort callbacks in + `Gunicorn[UVLoop]WebWorker` (`#1202 <https://github.com/aio-libs/aiohttp/pull/1202>`_) +- Has functional tests for client proxy (`#1218 <https://github.com/aio-libs/aiohttp/pull/1218>`_) +- Fix bugs with client proxy target path and proxy host with port (`#1413 <https://github.com/aio-libs/aiohttp/pull/1413>`_) +- Fix bugs related to the use of unicode hostnames (`#1444 <https://github.com/aio-libs/aiohttp/pull/1444>`_) +- Preserve cookie quoting/escaping (`#1453 <https://github.com/aio-libs/aiohttp/pull/1453>`_) +- FileSender will send gzipped response if gzip version available (`#1426 <https://github.com/aio-libs/aiohttp/pull/1426>`_) +- Don't override `Content-Length` header in `web.Response` if no body + was set (`#1400 <https://github.com/aio-libs/aiohttp/pull/1400>`_) +- Introduce `router.post_init()` for solving (`#1373 <https://github.com/aio-libs/aiohttp/pull/1373>`_) +- Fix raise error in case of multiple calls of `TimeServive.stop()` +- Allow to raise web exceptions on router resolving stage (`#1460 <https://github.com/aio-libs/aiohttp/pull/1460>`_) +- Add a warning for session creation outside of coroutine (`#1468 <https://github.com/aio-libs/aiohttp/pull/1468>`_) +- Avoid a race when application might start accepting incoming requests + but startup signals are not processed yet e98e8c6 +- Raise a `RuntimeError` when trying to change the status of the HTTP response + after the headers have been sent (`#1480 <https://github.com/aio-libs/aiohttp/pull/1480>`_) +- Fix bug with https proxy acquired cleanup (`#1340 <https://github.com/aio-libs/aiohttp/pull/1340>`_) +- Use UTF-8 as the default encoding for multipart text parts (`#1484 <https://github.com/aio-libs/aiohttp/pull/1484>`_) + + + +---- + + +1.1.6 (2016-11-28) +================== + +- Fix `BodyPartReader.read_chunk` bug about returns zero bytes before + `EOF` (`#1428 <https://github.com/aio-libs/aiohttp/pull/1428>`_) + + + +---- + +1.1.5 (2016-11-16) +================== + +- Fix static file serving in fallback mode (`#1401 <https://github.com/aio-libs/aiohttp/pull/1401>`_) + + + +---- + +1.1.4 (2016-11-14) +================== + +- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 <https://github.com/aio-libs/aiohttp/pull/1389>`_) +- Generate informative exception on redirects from server which + does not provide redirection headers (`#1396 <https://github.com/aio-libs/aiohttp/pull/1396>`_) + + + +---- + + +1.1.3 (2016-11-10) +================== + +- Support *root* resources for sub-applications (`#1379 <https://github.com/aio-libs/aiohttp/pull/1379>`_) + + + +---- + + +1.1.2 (2016-11-08) +================== + +- Allow starting variables with an underscore (`#1379 <https://github.com/aio-libs/aiohttp/pull/1379>`_) +- Properly process UNIX sockets by gunicorn worker (`#1375 <https://github.com/aio-libs/aiohttp/pull/1375>`_) +- Fix ordering for `FrozenList` +- Don't propagate pre and post signals to sub-application (`#1377 <https://github.com/aio-libs/aiohttp/pull/1377>`_) + + + +---- + +1.1.1 (2016-11-04) +================== + +- Fix documentation generation (`#1120 <https://github.com/aio-libs/aiohttp/pull/1120>`_) + + + +---- + +1.1.0 (2016-11-03) +================== + +- Drop deprecated `WSClientDisconnectedError` (BACKWARD INCOMPATIBLE) +- Use `yarl.URL` in client API. The change is 99% backward compatible + but `ClientResponse.url` is an `yarl.URL` instance now. (`#1217 <https://github.com/aio-libs/aiohttp/pull/1217>`_) +- Close idle keep-alive connections on shutdown (`#1222 <https://github.com/aio-libs/aiohttp/pull/1222>`_) +- Modify regex in AccessLogger to accept underscore and numbers (`#1225 <https://github.com/aio-libs/aiohttp/pull/1225>`_) +- Use `yarl.URL` in web server API. `web.Request.rel_url` and `web.Request.url` are added. URLs and templates are + percent-encoded now. (`#1224 <https://github.com/aio-libs/aiohttp/pull/1224>`_) +- Accept `yarl.URL` by server redirections (`#1278 <https://github.com/aio-libs/aiohttp/pull/1278>`_) +- Return `yarl.URL` by `.make_url()` testing utility (`#1279 <https://github.com/aio-libs/aiohttp/pull/1279>`_) +- Properly format IPv6 addresses by `aiohttp.web.run_app` (`#1139 <https://github.com/aio-libs/aiohttp/pull/1139>`_) +- Use `yarl.URL` by server API (`#1288 <https://github.com/aio-libs/aiohttp/pull/1288>`_) + + * Introduce `resource.url_for()`, deprecate `resource.url()`. + * Implement `StaticResource`. + * Inherit `SystemRoute` from `AbstractRoute` + * Drop old-style routes: `Route`, `PlainRoute`, `DynamicRoute`, + `StaticRoute`, `ResourceAdapter`. +- Revert `resp.url` back to `str`, introduce `resp.url_obj` (`#1292 <https://github.com/aio-libs/aiohttp/pull/1292>`_) +- Raise ValueError if BasicAuth login has a ":" character (`#1307 <https://github.com/aio-libs/aiohttp/pull/1307>`_) +- Fix bug when ClientRequest send payload file with opened as + open('filename', 'r+b') (`#1306 <https://github.com/aio-libs/aiohttp/pull/1306>`_) +- Enhancement to AccessLogger (pass *extra* dict) (`#1303 <https://github.com/aio-libs/aiohttp/pull/1303>`_) +- Show more verbose message on import errors (`#1319 <https://github.com/aio-libs/aiohttp/pull/1319>`_) +- Added save and load functionality for `CookieJar` (`#1219 <https://github.com/aio-libs/aiohttp/pull/1219>`_) +- Added option on `StaticRoute` to follow symlinks (`#1299 <https://github.com/aio-libs/aiohttp/pull/1299>`_) +- Force encoding of `application/json` content type to utf-8 (`#1339 <https://github.com/aio-libs/aiohttp/pull/1339>`_) +- Fix invalid invocations of `errors.LineTooLong` (`#1335 <https://github.com/aio-libs/aiohttp/pull/1335>`_) +- Websockets: Stop `async for` iteration when connection is closed (`#1144 <https://github.com/aio-libs/aiohttp/pull/1144>`_) +- Ensure TestClient HTTP methods return a context manager (`#1318 <https://github.com/aio-libs/aiohttp/pull/1318>`_) +- Raise `ClientDisconnectedError` to `FlowControlStreamReader` read function + if `ClientSession` object is closed by client when reading data. (`#1323 <https://github.com/aio-libs/aiohttp/pull/1323>`_) +- Document deployment without `Gunicorn` (`#1120 <https://github.com/aio-libs/aiohttp/pull/1120>`_) +- Add deprecation warning for MD5 and SHA1 digests when used for fingerprint + of site certs in TCPConnector. (`#1186 <https://github.com/aio-libs/aiohttp/pull/1186>`_) +- Implement sub-applications (`#1301 <https://github.com/aio-libs/aiohttp/pull/1301>`_) +- Don't inherit `web.Request` from `dict` but implement + `MutableMapping` protocol. +- Implement frozen signals +- Don't inherit `web.Application` from `dict` but implement + `MutableMapping` protocol. +- Support freezing for web applications +- Accept access_log parameter in `web.run_app`, use `None` to disable logging +- Don't flap `tcp_cork` and `tcp_nodelay` in regular request handling. + `tcp_nodelay` is still enabled by default. +- Improve performance of web server by removing premature computing of + Content-Type if the value was set by `web.Response` constructor. + + While the patch boosts speed of trivial `web.Response(text='OK', + content_type='text/plain)` very well please don't expect significant + boost of your application -- a couple DB requests and business logic + is still the main bottleneck. +- Boost performance by adding a custom time service (`#1350 <https://github.com/aio-libs/aiohttp/pull/1350>`_) +- Extend `ClientResponse` with `content_type` and `charset` + properties like in `web.Request`. (`#1349 <https://github.com/aio-libs/aiohttp/pull/1349>`_) +- Disable aiodns by default (`#559 <https://github.com/aio-libs/aiohttp/pull/559>`_) +- Don't flap `tcp_cork` in client code, use TCP_NODELAY mode by default. +- Implement `web.Request.clone()` (`#1361 <https://github.com/aio-libs/aiohttp/pull/1361>`_) + + + +---- + +1.0.5 (2016-10-11) +================== + +- Fix StreamReader._read_nowait to return all available + data up to the requested amount (`#1297 <https://github.com/aio-libs/aiohttp/pull/1297>`_) + + + +---- + + +1.0.4 (2016-09-22) +================== + +- Fix FlowControlStreamReader.read_nowait so that it checks + whether the transport is paused (`#1206 <https://github.com/aio-libs/aiohttp/pull/1206>`_) + + + +---- + + +1.0.2 (2016-09-22) +================== + +- Make CookieJar compatible with 32-bit systems (`#1188 <https://github.com/aio-libs/aiohttp/pull/1188>`_) +- Add missing `WSMsgType` to `web_ws.__all__`, see (`#1200 <https://github.com/aio-libs/aiohttp/pull/1200>`_) +- Fix `CookieJar` ctor when called with `loop=None` (`#1203 <https://github.com/aio-libs/aiohttp/pull/1203>`_) +- Fix broken upper-casing in wsgi support (`#1197 <https://github.com/aio-libs/aiohttp/pull/1197>`_) + + + +---- + + +1.0.1 (2016-09-16) +================== + +- Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake + of backward compatibility (`#1178 <https://github.com/aio-libs/aiohttp/pull/1178>`_) +- Tune alabaster schema. +- Use `text/html` content type for displaying index pages by static + file handler. +- Fix `AssertionError` in static file handling (`#1177 <https://github.com/aio-libs/aiohttp/pull/1177>`_) +- Fix access log formats `%O` and `%b` for static file handling +- Remove `debug` setting of GunicornWorker, use `app.debug` + to control its debug-mode instead + + + +---- + + +1.0.0 (2016-09-16) +================== + +- Change default size for client session's connection pool from + unlimited to 20 (`#977 <https://github.com/aio-libs/aiohttp/pull/977>`_) +- Add IE support for cookie deletion. (`#994 <https://github.com/aio-libs/aiohttp/pull/994>`_) +- Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD + INCOMPATIBLE) +- Remove deprecated `force` parameter for `ClientResponse.close` + method (BACKWARD INCOMPATIBLE) +- Avoid using of mutable CIMultiDict kw param in make_mocked_request + (`#997 <https://github.com/aio-libs/aiohttp/pull/997>`_) +- Make WebSocketResponse.close a little bit faster by avoiding new + task creating just for timeout measurement +- Add `proxy` and `proxy_auth` params to `client.get()` and family, + deprecate `ProxyConnector` (`#998 <https://github.com/aio-libs/aiohttp/pull/998>`_) +- Add support for websocket send_json and receive_json, synchronize + server and client API for websockets (`#984 <https://github.com/aio-libs/aiohttp/pull/984>`_) +- Implement router shourtcuts for most useful HTTP methods, use + `app.router.add_get()`, `app.router.add_post()` etc. instead of + `app.router.add_route()` (`#986 <https://github.com/aio-libs/aiohttp/pull/986>`_) +- Support SSL connections for gunicorn worker (`#1003 <https://github.com/aio-libs/aiohttp/pull/1003>`_) +- Move obsolete examples to legacy folder +- Switch to multidict 2.0 and title-cased strings (`#1015 <https://github.com/aio-libs/aiohttp/pull/1015>`_) +- `{FOO}e` logger format is case-sensitive now +- Fix logger report for unix socket 8e8469b +- Rename aiohttp.websocket to aiohttp._ws_impl +- Rename ``aiohttp.MsgType`` to ``aiohttp.WSMsgType`` +- Introduce ``aiohttp.WSMessage`` officially +- Rename Message -> WSMessage +- Remove deprecated decode param from resp.read(decode=True) +- Use 5min default client timeout (`#1028 <https://github.com/aio-libs/aiohttp/pull/1028>`_) +- Relax HTTP method validation in UrlDispatcher (`#1037 <https://github.com/aio-libs/aiohttp/pull/1037>`_) +- Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()` + should be present) +- Remove aiohttp.websocket module (BACKWARD INCOMPATIBLE) + Please use high-level client and server approaches +- Link header for 451 status code is mandatory +- Fix test_client fixture to allow multiple clients per test (`#1072 <https://github.com/aio-libs/aiohttp/pull/1072>`_) +- make_mocked_request now accepts dict as headers (`#1073 <https://github.com/aio-libs/aiohttp/pull/1073>`_) +- Add Python 3.5.2/3.6+ compatibility patch for async generator + protocol change (`#1082 <https://github.com/aio-libs/aiohttp/pull/1082>`_) +- Improvement test_client can accept instance object (`#1083 <https://github.com/aio-libs/aiohttp/pull/1083>`_) +- Simplify ServerHttpProtocol implementation (`#1060 <https://github.com/aio-libs/aiohttp/pull/1060>`_) +- Add a flag for optional showing directory index for static file + handling (`#921 <https://github.com/aio-libs/aiohttp/pull/921>`_) +- Define `web.Application.on_startup()` signal handler (`#1103 <https://github.com/aio-libs/aiohttp/pull/1103>`_) +- Drop ChunkedParser and LinesParser (`#1111 <https://github.com/aio-libs/aiohttp/pull/1111>`_) +- Call `Application.startup` in GunicornWebWorker (`#1105 <https://github.com/aio-libs/aiohttp/pull/1105>`_) +- Fix client handling hostnames with 63 bytes when a port is given in + the url (`#1044 <https://github.com/aio-libs/aiohttp/pull/1044>`_) +- Implement proxy support for ClientSession.ws_connect (`#1025 <https://github.com/aio-libs/aiohttp/pull/1025>`_) +- Return named tuple from WebSocketResponse.can_prepare (`#1016 <https://github.com/aio-libs/aiohttp/pull/1016>`_) +- Fix access_log_format in `GunicornWebWorker` (`#1117 <https://github.com/aio-libs/aiohttp/pull/1117>`_) +- Setup Content-Type to application/octet-stream by default (`#1124 <https://github.com/aio-libs/aiohttp/pull/1124>`_) +- Deprecate debug parameter from app.make_handler(), use + `Application(debug=True)` instead (`#1121 <https://github.com/aio-libs/aiohttp/pull/1121>`_) +- Remove fragment string in request path (`#846 <https://github.com/aio-libs/aiohttp/pull/846>`_) +- Use aiodns.DNSResolver.gethostbyname() if available (`#1136 <https://github.com/aio-libs/aiohttp/pull/1136>`_) +- Fix static file sending on uvloop when sendfile is available (`#1093 <https://github.com/aio-libs/aiohttp/pull/1093>`_) +- Make prettier urls if query is empty dict (`#1143 <https://github.com/aio-libs/aiohttp/pull/1143>`_) +- Fix redirects for HEAD requests (`#1147 <https://github.com/aio-libs/aiohttp/pull/1147>`_) +- Default value for `StreamReader.read_nowait` is -1 from now (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_) +- `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now + (BACKWARD INCOMPATIBLE) (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_) +- Streams documentation added (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_) +- Add `multipart` coroutine method for web Request object (`#1067 <https://github.com/aio-libs/aiohttp/pull/1067>`_) +- Publish ClientSession.loop property (`#1149 <https://github.com/aio-libs/aiohttp/pull/1149>`_) +- Fix static file with spaces (`#1140 <https://github.com/aio-libs/aiohttp/pull/1140>`_) +- Fix piling up asyncio loop by cookie expiration callbacks (`#1061 <https://github.com/aio-libs/aiohttp/pull/1061>`_) +- Drop `Timeout` class for sake of `async_timeout` external library. + `aiohttp.Timeout` is an alias for `async_timeout.timeout` +- `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by + default (BACKWARD INCOMPATIBLE) (`#1152 <https://github.com/aio-libs/aiohttp/pull/1152>`_) +- `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by + default (BACKWARD INCOMPATIBLE) (`#1152 <https://github.com/aio-libs/aiohttp/pull/1152>`_) +- Conform to RFC3986 - do not include url fragments in client requests (`#1174 <https://github.com/aio-libs/aiohttp/pull/1174>`_) +- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (`#1173 <https://github.com/aio-libs/aiohttp/pull/1173>`_) +- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (`#1173 <https://github.com/aio-libs/aiohttp/pull/1173>`_) +- Fix clashing cookies with have the same name but belong to different + domains (BACKWARD INCOMPATIBLE) (`#1125 <https://github.com/aio-libs/aiohttp/pull/1125>`_) +- Support binary Content-Transfer-Encoding (`#1169 <https://github.com/aio-libs/aiohttp/pull/1169>`_) + + + +---- + + +0.22.5 (08-02-2016) +=================== + +- Pin miltidict version to >=1.2.2 + + + +---- + +0.22.3 (07-26-2016) +=================== + +- Do not filter cookies if unsafe flag provided (`#1005 <https://github.com/aio-libs/aiohttp/pull/1005>`_) + + + +---- + + +0.22.2 (07-23-2016) +=================== + +- Suppress CancelledError when Timeout raises TimeoutError (`#970 <https://github.com/aio-libs/aiohttp/pull/970>`_) +- Don't expose `aiohttp.__version__` +- Add unsafe parameter to CookieJar (`#968 <https://github.com/aio-libs/aiohttp/pull/968>`_) +- Use unsafe cookie jar in test client tools +- Expose aiohttp.CookieJar name + + + +---- + + +0.22.1 (07-16-2016) +=================== + +- Large cookie expiration/max-age does not break an event loop from now + (fixes (`#967 <https://github.com/aio-libs/aiohttp/pull/967>`_)) + + + +---- + + +0.22.0 (07-15-2016) +=================== + +- Fix bug in serving static directory (`#803 <https://github.com/aio-libs/aiohttp/pull/803>`_) +- Fix command line arg parsing (`#797 <https://github.com/aio-libs/aiohttp/pull/797>`_) +- Fix a documentation chapter about cookie usage (`#790 <https://github.com/aio-libs/aiohttp/pull/790>`_) +- Handle empty body with gzipped encoding (`#758 <https://github.com/aio-libs/aiohttp/pull/758>`_) +- Support 451 Unavailable For Legal Reasons http status (`#697 <https://github.com/aio-libs/aiohttp/pull/697>`_) +- Fix Cookie share example and few small typos in docs (`#817 <https://github.com/aio-libs/aiohttp/pull/817>`_) +- UrlDispatcher.add_route with partial coroutine handler (`#814 <https://github.com/aio-libs/aiohttp/pull/814>`_) +- Optional support for aiodns (`#728 <https://github.com/aio-libs/aiohttp/pull/728>`_) +- Add ServiceRestart and TryAgainLater websocket close codes (`#828 <https://github.com/aio-libs/aiohttp/pull/828>`_) +- Fix prompt message for `web.run_app` (`#832 <https://github.com/aio-libs/aiohttp/pull/832>`_) +- Allow to pass None as a timeout value to disable timeout logic (`#834 <https://github.com/aio-libs/aiohttp/pull/834>`_) +- Fix leak of connection slot during connection error (`#835 <https://github.com/aio-libs/aiohttp/pull/835>`_) +- Gunicorn worker with uvloop support + `aiohttp.worker.GunicornUVLoopWebWorker` (`#878 <https://github.com/aio-libs/aiohttp/pull/878>`_) +- Don't send body in response to HEAD request (`#838 <https://github.com/aio-libs/aiohttp/pull/838>`_) +- Skip the preamble in MultipartReader (`#881 <https://github.com/aio-libs/aiohttp/pull/881>`_) +- Implement BasicAuth decode classmethod. (`#744 <https://github.com/aio-libs/aiohttp/pull/744>`_) +- Don't crash logger when transport is None (`#889 <https://github.com/aio-libs/aiohttp/pull/889>`_) +- Use a create_future compatibility wrapper instead of creating + Futures directly (`#896 <https://github.com/aio-libs/aiohttp/pull/896>`_) +- Add test utilities to aiohttp (`#902 <https://github.com/aio-libs/aiohttp/pull/902>`_) +- Improve Request.__repr__ (`#875 <https://github.com/aio-libs/aiohttp/pull/875>`_) +- Skip DNS resolving if provided host is already an ip address (`#874 <https://github.com/aio-libs/aiohttp/pull/874>`_) +- Add headers to ClientSession.ws_connect (`#785 <https://github.com/aio-libs/aiohttp/pull/785>`_) +- Document that server can send pre-compressed data (`#906 <https://github.com/aio-libs/aiohttp/pull/906>`_) +- Don't add Content-Encoding and Transfer-Encoding if no body (`#891 <https://github.com/aio-libs/aiohttp/pull/891>`_) +- Add json() convenience methods to websocket message objects (`#897 <https://github.com/aio-libs/aiohttp/pull/897>`_) +- Add client_resp.raise_for_status() (`#908 <https://github.com/aio-libs/aiohttp/pull/908>`_) +- Implement cookie filter (`#799 <https://github.com/aio-libs/aiohttp/pull/799>`_) +- Include an example of middleware to handle error pages (`#909 <https://github.com/aio-libs/aiohttp/pull/909>`_) +- Fix error handling in StaticFileMixin (`#856 <https://github.com/aio-libs/aiohttp/pull/856>`_) +- Add mocked request helper (`#900 <https://github.com/aio-libs/aiohttp/pull/900>`_) +- Fix empty ALLOW Response header for cls based View (`#929 <https://github.com/aio-libs/aiohttp/pull/929>`_) +- Respect CONNECT method to implement a proxy server (`#847 <https://github.com/aio-libs/aiohttp/pull/847>`_) +- Add pytest_plugin (`#914 <https://github.com/aio-libs/aiohttp/pull/914>`_) +- Add tutorial +- Add backlog option to support more than 128 (default value in + "create_server" function) concurrent connections (`#892 <https://github.com/aio-libs/aiohttp/pull/892>`_) +- Allow configuration of header size limits (`#912 <https://github.com/aio-libs/aiohttp/pull/912>`_) +- Separate sending file logic from StaticRoute dispatcher (`#901 <https://github.com/aio-libs/aiohttp/pull/901>`_) +- Drop deprecated share_cookies connector option (BACKWARD INCOMPATIBLE) +- Drop deprecated support for tuple as auth parameter. + Use aiohttp.BasicAuth instead (BACKWARD INCOMPATIBLE) +- Remove deprecated `request.payload` property, use `content` instead. + (BACKWARD INCOMPATIBLE) +- Drop all mentions about api changes in documentation for versions + older than 0.16 +- Allow to override default cookie jar (`#963 <https://github.com/aio-libs/aiohttp/pull/963>`_) +- Add manylinux wheel builds +- Dup a socket for sendfile usage (`#964 <https://github.com/aio-libs/aiohttp/pull/964>`_) + + + +---- + +0.21.6 (05-05-2016) +=================== + +- Drop initial query parameters on redirects (`#853 <https://github.com/aio-libs/aiohttp/pull/853>`_) + + + +---- + + +0.21.5 (03-22-2016) +=================== + +- Fix command line arg parsing (`#797 <https://github.com/aio-libs/aiohttp/pull/797>`_) + + + +---- + +0.21.4 (03-12-2016) +=================== + +- Fix ResourceAdapter: don't add method to allowed if resource is not + match (`#826 <https://github.com/aio-libs/aiohttp/pull/826>`_) +- Fix Resource: append found method to returned allowed methods + + + +---- + +0.21.2 (02-16-2016) +=================== + +- Fix a regression: support for handling ~/path in static file routes was + broken (`#782 <https://github.com/aio-libs/aiohttp/pull/782>`_) + + + +---- + +0.21.1 (02-10-2016) +=================== + +- Make new resources classes public (`#767 <https://github.com/aio-libs/aiohttp/pull/767>`_) +- Add `router.resources()` view +- Fix cmd-line parameter names in doc + + + +---- + +0.21.0 (02-04-2016) +=================== + +- Introduce on_shutdown signal (`#722 <https://github.com/aio-libs/aiohttp/pull/722>`_) +- Implement raw input headers (`#726 <https://github.com/aio-libs/aiohttp/pull/726>`_) +- Implement web.run_app utility function (`#734 <https://github.com/aio-libs/aiohttp/pull/734>`_) +- Introduce on_cleanup signal +- Deprecate Application.finish() / Application.register_on_finish() in favor of on_cleanup. +- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (`#729 <https://github.com/aio-libs/aiohttp/pull/729>`_) +- Deprecate bare aiohttp.request(), aiohttp.get() and family (`#729 <https://github.com/aio-libs/aiohttp/pull/729>`_) +- Refactor keep-alive support (`#737 <https://github.com/aio-libs/aiohttp/pull/737>`_) + + - Enable keepalive for HTTP 1.0 by default + - Disable it for HTTP 0.9 (who cares about 0.9, BTW?) + - For keepalived connections + + - Send `Connection: keep-alive` for HTTP 1.0 only + - don't send `Connection` header for HTTP 1.1 + - For non-keepalived connections + + - Send `Connection: close` for HTTP 1.1 only + - don't send `Connection` header for HTTP 1.0 +- Add version parameter to ClientSession constructor, + deprecate it for session.request() and family (`#736 <https://github.com/aio-libs/aiohttp/pull/736>`_) +- Enable access log by default (`#735 <https://github.com/aio-libs/aiohttp/pull/735>`_) +- Deprecate app.router.register_route() (the method was not documented intentionally BTW). +- Deprecate app.router.named_routes() in favor of app.router.named_resources() +- route.add_static accepts pathlib.Path now (`#743 <https://github.com/aio-libs/aiohttp/pull/743>`_) +- Add command line support: `$ python -m aiohttp.web package.main` (`#740 <https://github.com/aio-libs/aiohttp/pull/740>`_) +- FAQ section was added to docs. Enjoy and fill free to contribute new topics +- Add async context manager support to ClientSession +- Document ClientResponse's host, method, url properties +- Use CORK/NODELAY in client API (`#748 <https://github.com/aio-libs/aiohttp/pull/748>`_) +- ClientSession.close and Connector.close are coroutines now +- Close client connection on exception in ClientResponse.release() +- Allow to read multipart parts without content-length specified (`#750 <https://github.com/aio-libs/aiohttp/pull/750>`_) +- Add support for unix domain sockets to gunicorn worker (`#470 <https://github.com/aio-libs/aiohttp/pull/470>`_) +- Add test for default Expect handler (`#601 <https://github.com/aio-libs/aiohttp/pull/601>`_) +- Add the first demo project +- Rename `loader` keyword argument in `web.Request.json` method. (`#646 <https://github.com/aio-libs/aiohttp/pull/646>`_) +- Add local socket binding for TCPConnector (`#678 <https://github.com/aio-libs/aiohttp/pull/678>`_) + + + +---- + +0.20.2 (01-07-2016) +=================== + +- Enable use of `await` for a class based view (`#717 <https://github.com/aio-libs/aiohttp/pull/717>`_) +- Check address family to fill wsgi env properly (`#718 <https://github.com/aio-libs/aiohttp/pull/718>`_) +- Fix memory leak in headers processing (thanks to Marco Paolini) (`#723 <https://github.com/aio-libs/aiohttp/pull/723>`_ + + + +----) + +0.20.1 (12-30-2015) +=================== + +- Raise RuntimeError is Timeout context manager was used outside of + task context. +- Add number of bytes to stream.read_nowait (`#700 <https://github.com/aio-libs/aiohttp/pull/700>`_) +- Use X-FORWARDED-PROTO for wsgi.url_scheme when available + + + +---- + + +0.20.0 (12-28-2015) +=================== + +- Extend list of web exceptions, add HTTPMisdirectedRequest, + HTTPUpgradeRequired, HTTPPreconditionRequired, HTTPTooManyRequests, + HTTPRequestHeaderFieldsTooLarge, HTTPVariantAlsoNegotiates, + HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (`#644 <https://github.com/aio-libs/aiohttp/pull/644>`_) +- Do not remove AUTHORIZATION header by WSGI handler (`#649 <https://github.com/aio-libs/aiohttp/pull/649>`_) +- Fix broken support for https proxies with authentication (`#617 <https://github.com/aio-libs/aiohttp/pull/617>`_) +- Get REMOTE_* and SEVER_* http vars from headers when listening on + unix socket (`#654 <https://github.com/aio-libs/aiohttp/pull/654>`_) +- Add HTTP 308 support (`#663 <https://github.com/aio-libs/aiohttp/pull/663>`_) +- Add Tf format (time to serve request in seconds, %06f format) to + access log (`#669 <https://github.com/aio-libs/aiohttp/pull/669>`_) +- Remove one and a half years long deprecated + ClientResponse.read_and_close() method +- Optimize chunked encoding: use a single syscall instead of 3 calls + on sending chunked encoded data +- Use TCP_CORK and TCP_NODELAY to optimize network latency and + throughput (`#680 <https://github.com/aio-libs/aiohttp/pull/680>`_) +- Websocket XOR performance improved (`#687 <https://github.com/aio-libs/aiohttp/pull/687>`_) +- Avoid sending cookie attributes in Cookie header (`#613 <https://github.com/aio-libs/aiohttp/pull/613>`_) +- Round server timeouts to seconds for grouping pending calls. That + leads to less amount of poller syscalls e.g. epoll.poll(). (`#702 <https://github.com/aio-libs/aiohttp/pull/702>`_) +- Close connection on websocket handshake error (`#703 <https://github.com/aio-libs/aiohttp/pull/703>`_) +- Implement class based views (`#684 <https://github.com/aio-libs/aiohttp/pull/684>`_) +- Add *headers* parameter to ws_connect() (`#709 <https://github.com/aio-libs/aiohttp/pull/709>`_) +- Drop unused function `parse_remote_addr()` (`#708 <https://github.com/aio-libs/aiohttp/pull/708>`_) +- Close session on exception (`#707 <https://github.com/aio-libs/aiohttp/pull/707>`_) +- Store http code and headers in WSServerHandshakeError (`#706 <https://github.com/aio-libs/aiohttp/pull/706>`_) +- Make some low-level message properties readonly (`#710 <https://github.com/aio-libs/aiohttp/pull/710>`_) + + + +---- + + +0.19.0 (11-25-2015) +=================== + +- Memory leak in ParserBuffer (`#579 <https://github.com/aio-libs/aiohttp/pull/579>`_) +- Support gunicorn's `max_requests` settings in gunicorn worker +- Fix wsgi environment building (`#573 <https://github.com/aio-libs/aiohttp/pull/573>`_) +- Improve access logging (`#572 <https://github.com/aio-libs/aiohttp/pull/572>`_) +- Drop unused host and port from low-level server (`#586 <https://github.com/aio-libs/aiohttp/pull/586>`_) +- Add Python 3.5 `async for` implementation to server websocket (`#543 <https://github.com/aio-libs/aiohttp/pull/543>`_) +- Add Python 3.5 `async for` implementation to client websocket +- Add Python 3.5 `async with` implementation to client websocket +- Add charset parameter to web.Response constructor (`#593 <https://github.com/aio-libs/aiohttp/pull/593>`_) +- Forbid passing both Content-Type header and content_type or charset + params into web.Response constructor +- Forbid duplicating of web.Application and web.Request (`#602 <https://github.com/aio-libs/aiohttp/pull/602>`_) +- Add an option to pass Origin header in ws_connect (`#607 <https://github.com/aio-libs/aiohttp/pull/607>`_) +- Add json_response function (`#592 <https://github.com/aio-libs/aiohttp/pull/592>`_) +- Make concurrent connections respect limits (`#581 <https://github.com/aio-libs/aiohttp/pull/581>`_) +- Collect history of responses if redirects occur (`#614 <https://github.com/aio-libs/aiohttp/pull/614>`_) +- Enable passing pre-compressed data in requests (`#621 <https://github.com/aio-libs/aiohttp/pull/621>`_) +- Expose named routes via UrlDispatcher.named_routes() (`#622 <https://github.com/aio-libs/aiohttp/pull/622>`_) +- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (`#629 <https://github.com/aio-libs/aiohttp/pull/629>`_) +- Use ensure_future if available +- Always quote params for Content-Disposition (`#641 <https://github.com/aio-libs/aiohttp/pull/641>`_) +- Support async for in multipart reader (`#640 <https://github.com/aio-libs/aiohttp/pull/640>`_) +- Add Timeout context manager (`#611 <https://github.com/aio-libs/aiohttp/pull/611>`_) + + + +---- + +0.18.4 (13-11-2015) +=================== + +- Relax rule for router names again by adding dash to allowed + characters: they may contain identifiers, dashes, dots and columns + + + +---- + +0.18.3 (25-10-2015) +=================== + +- Fix formatting for _RequestContextManager helper (`#590 <https://github.com/aio-libs/aiohttp/pull/590>`_) + + + +---- + +0.18.2 (22-10-2015) +=================== + +- Fix regression for OpenSSL < 1.0.0 (`#583 <https://github.com/aio-libs/aiohttp/pull/583>`_) + + + +---- + +0.18.1 (20-10-2015) +=================== + +- Relax rule for router names: they may contain dots and columns + starting from now + + + +---- + +0.18.0 (19-10-2015) +=================== + +- Use errors.HttpProcessingError.message as HTTP error reason and + message (`#459 <https://github.com/aio-libs/aiohttp/pull/459>`_) +- Optimize cythonized multidict a bit +- Change repr's of multidicts and multidict views +- default headers in ClientSession are now case-insensitive +- Make '=' char and 'wss://' schema safe in urls (`#477 <https://github.com/aio-libs/aiohttp/pull/477>`_) +- `ClientResponse.close()` forces connection closing by default from now (`#479 <https://github.com/aio-libs/aiohttp/pull/479>`_) + + N.B. Backward incompatible change: was `.close(force=False) Using + `force` parameter for the method is deprecated: use `.release()` + instead. +- Properly requote URL's path (`#480 <https://github.com/aio-libs/aiohttp/pull/480>`_) +- add `skip_auto_headers` parameter for client API (`#486 <https://github.com/aio-libs/aiohttp/pull/486>`_) +- Properly parse URL path in aiohttp.web.Request (`#489 <https://github.com/aio-libs/aiohttp/pull/489>`_) +- Raise RuntimeError when chunked enabled and HTTP is 1.0 (`#488 <https://github.com/aio-libs/aiohttp/pull/488>`_) +- Fix a bug with processing io.BytesIO as data parameter for client API (`#500 <https://github.com/aio-libs/aiohttp/pull/500>`_) +- Skip auto-generation of Content-Type header (`#507 <https://github.com/aio-libs/aiohttp/pull/507>`_) +- Use sendfile facility for static file handling (`#503 <https://github.com/aio-libs/aiohttp/pull/503>`_) +- Default `response_factory` in `app.router.add_static` now is + `StreamResponse`, not `None`. The functionality is not changed if + default is not specified. +- Drop `ClientResponse.message` attribute, it was always implementation detail. +- Streams are optimized for speed and mostly memory in case of a big + HTTP message sizes (`#496 <https://github.com/aio-libs/aiohttp/pull/496>`_) +- Fix a bug for server-side cookies for dropping cookie and setting it + again without Max-Age parameter. +- Don't trim redirect URL in client API (`#499 <https://github.com/aio-libs/aiohttp/pull/499>`_) +- Extend precision of access log "D" to milliseconds (`#527 <https://github.com/aio-libs/aiohttp/pull/527>`_) +- Deprecate `StreamResponse.start()` method in favor of + `StreamResponse.prepare()` coroutine (`#525 <https://github.com/aio-libs/aiohttp/pull/525>`_) + + `.start()` is still supported but responses begun with `.start()` + does not call signal for response preparing to be sent. +- Add `StreamReader.__repr__` +- Drop Python 3.3 support, from now minimal required version is Python + 3.4.1 (`#541 <https://github.com/aio-libs/aiohttp/pull/541>`_) +- Add `async with` support for `ClientSession.request()` and family (`#536 <https://github.com/aio-libs/aiohttp/pull/536>`_) +- Ignore message body on 204 and 304 responses (`#505 <https://github.com/aio-libs/aiohttp/pull/505>`_) +- `TCPConnector` processed both IPv4 and IPv6 by default (`#559 <https://github.com/aio-libs/aiohttp/pull/559>`_) +- Add `.routes()` view for urldispatcher (`#519 <https://github.com/aio-libs/aiohttp/pull/519>`_) +- Route name should be a valid identifier name from now (`#567 <https://github.com/aio-libs/aiohttp/pull/567>`_) +- Implement server signals (`#562 <https://github.com/aio-libs/aiohttp/pull/562>`_) +- Drop a year-old deprecated *files* parameter from client API. +- Added `async for` support for aiohttp stream (`#542 <https://github.com/aio-libs/aiohttp/pull/542>`_) + + + +---- + +0.17.4 (09-29-2015) +=================== + +- Properly parse URL path in aiohttp.web.Request (`#489 <https://github.com/aio-libs/aiohttp/pull/489>`_) +- Add missing coroutine decorator, the client api is await-compatible now + + + +---- + +0.17.3 (08-28-2015) +=================== + +- Remove Content-Length header on compressed responses (`#450 <https://github.com/aio-libs/aiohttp/pull/450>`_) +- Support Python 3.5 +- Improve performance of transport in-use list (`#472 <https://github.com/aio-libs/aiohttp/pull/472>`_) +- Fix connection pooling (`#473 <https://github.com/aio-libs/aiohttp/pull/473>`_) + + + +---- + +0.17.2 (08-11-2015) +=================== + +- Don't forget to pass `data` argument forward (`#462 <https://github.com/aio-libs/aiohttp/pull/462>`_) +- Fix multipart read bytes count (`#463 <https://github.com/aio-libs/aiohttp/pull/463>`_) + + + +---- + +0.17.1 (08-10-2015) +=================== + +- Fix multidict comparison to arbitrary abc.Mapping + + + +---- + +0.17.0 (08-04-2015) +=================== + +- Make StaticRoute support Last-Modified and If-Modified-Since headers (`#386 <https://github.com/aio-libs/aiohttp/pull/386>`_) +- Add Request.if_modified_since and Stream.Response.last_modified properties +- Fix deflate compression when writing a chunked response (`#395 <https://github.com/aio-libs/aiohttp/pull/395>`_) +- Request`s content-length header is cleared now after redirect from + POST method (`#391 <https://github.com/aio-libs/aiohttp/pull/391>`_) +- Return a 400 if server received a non HTTP content (`#405 <https://github.com/aio-libs/aiohttp/pull/405>`_) +- Fix keep-alive support for aiohttp clients (`#406 <https://github.com/aio-libs/aiohttp/pull/406>`_) +- Allow gzip compression in high-level server response interface (`#403 <https://github.com/aio-libs/aiohttp/pull/403>`_) +- Rename TCPConnector.resolve and family to dns_cache (`#415 <https://github.com/aio-libs/aiohttp/pull/415>`_) +- Make UrlDispatcher ignore quoted characters during url matching (`#414 <https://github.com/aio-libs/aiohttp/pull/414>`_) + Backward-compatibility warning: this may change the url matched by + your queries if they send quoted character (like %2F for /) (`#414 <https://github.com/aio-libs/aiohttp/pull/414>`_) +- Use optional cchardet accelerator if present (`#418 <https://github.com/aio-libs/aiohttp/pull/418>`_) +- Borrow loop from Connector in ClientSession if loop is not set +- Add context manager support to ClientSession for session closing. +- Add toplevel get(), post(), put(), head(), delete(), options(), + patch() coroutines. +- Fix IPv6 support for client API (`#425 <https://github.com/aio-libs/aiohttp/pull/425>`_) +- Pass SSL context through proxy connector (`#421 <https://github.com/aio-libs/aiohttp/pull/421>`_) +- Make the rule: path for add_route should start with slash +- Don't process request finishing by low-level server on closed event loop +- Don't override data if multiple files are uploaded with same key (`#433 <https://github.com/aio-libs/aiohttp/pull/433>`_) +- Ensure multipart.BodyPartReader.read_chunk read all the necessary data + to avoid false assertions about malformed multipart payload +- Don't send body for 204, 205 and 304 http exceptions (`#442 <https://github.com/aio-libs/aiohttp/pull/442>`_) +- Correctly skip Cython compilation in MSVC not found (`#453 <https://github.com/aio-libs/aiohttp/pull/453>`_) +- Add response factory to StaticRoute (`#456 <https://github.com/aio-libs/aiohttp/pull/456>`_) +- Don't append trailing CRLF for multipart.BodyPartReader (`#454 <https://github.com/aio-libs/aiohttp/pull/454>`_) + + + +---- + + +0.16.6 (07-15-2015) +=================== + +- Skip compilation on Windows if vcvarsall.bat cannot be found (`#438 <https://github.com/aio-libs/aiohttp/pull/438>`_) + + + +---- + +0.16.5 (06-13-2015) +=================== + +- Get rid of all comprehensions and yielding in _multidict (`#410 <https://github.com/aio-libs/aiohttp/pull/410>`_) + + + +---- + + +0.16.4 (06-13-2015) +=================== + +- Don't clear current exception in multidict's `__repr__` (cythonized + versions) (`#410 <https://github.com/aio-libs/aiohttp/pull/410>`_) + + + +---- + + +0.16.3 (05-30-2015) +=================== + +- Fix StaticRoute vulnerability to directory traversal attacks (`#380 <https://github.com/aio-libs/aiohttp/pull/380>`_) + + + +---- + + +0.16.2 (05-27-2015) +=================== + +- Update python version required for `__del__` usage: it's actually + 3.4.1 instead of 3.4.0 +- Add check for presence of loop.is_closed() method before call the + former (`#378 <https://github.com/aio-libs/aiohttp/pull/378>`_) + + + +---- + + +0.16.1 (05-27-2015) +=================== + +- Fix regression in static file handling (`#377 <https://github.com/aio-libs/aiohttp/pull/377>`_) + + + +---- + +0.16.0 (05-26-2015) +=================== + +- Unset waiter future after cancellation (`#363 <https://github.com/aio-libs/aiohttp/pull/363>`_) +- Update request url with query parameters (`#372 <https://github.com/aio-libs/aiohttp/pull/372>`_) +- Support new `fingerprint` param of TCPConnector to enable verifying + SSL certificates via MD5, SHA1, or SHA256 digest (`#366 <https://github.com/aio-libs/aiohttp/pull/366>`_) +- Setup uploaded filename if field value is binary and transfer + encoding is not specified (`#349 <https://github.com/aio-libs/aiohttp/pull/349>`_) +- Implement `ClientSession.close()` method +- Implement `connector.closed` readonly property +- Implement `ClientSession.closed` readonly property +- Implement `ClientSession.connector` readonly property +- Implement `ClientSession.detach` method +- Add `__del__` to client-side objects: sessions, connectors, + connections, requests, responses. +- Refactor connections cleanup by connector (`#357 <https://github.com/aio-libs/aiohttp/pull/357>`_) +- Add `limit` parameter to connector constructor (`#358 <https://github.com/aio-libs/aiohttp/pull/358>`_) +- Add `request.has_body` property (`#364 <https://github.com/aio-libs/aiohttp/pull/364>`_) +- Add `response_class` parameter to `ws_connect()` (`#367 <https://github.com/aio-libs/aiohttp/pull/367>`_) +- `ProxyConnector` does not support keep-alive requests by default + starting from now (`#368 <https://github.com/aio-libs/aiohttp/pull/368>`_) +- Add `connector.force_close` property +- Add ws_connect to ClientSession (`#374 <https://github.com/aio-libs/aiohttp/pull/374>`_) +- Support optional `chunk_size` parameter in `router.add_static()` + + + +---- + + +0.15.3 (04-22-2015) +=================== + +- Fix graceful shutdown handling +- Fix `Expect` header handling for not found and not allowed routes (`#340 <https://github.com/aio-libs/aiohttp/pull/340>`_) + + + +---- + + +0.15.2 (04-19-2015) +=================== + +- Flow control subsystem refactoring +- HTTP server performance optimizations +- Allow to match any request method with `*` +- Explicitly call drain on transport (`#316 <https://github.com/aio-libs/aiohttp/pull/316>`_) +- Make chardet module dependency mandatory (`#318 <https://github.com/aio-libs/aiohttp/pull/318>`_) +- Support keep-alive for HTTP 1.0 (`#325 <https://github.com/aio-libs/aiohttp/pull/325>`_) +- Do not chunk single file during upload (`#327 <https://github.com/aio-libs/aiohttp/pull/327>`_) +- Add ClientSession object for cookie storage and default headers (`#328 <https://github.com/aio-libs/aiohttp/pull/328>`_) +- Add `keep_alive_on` argument for HTTP server handler. + + + +---- + + +0.15.1 (03-31-2015) +=================== + +- Pass Autobahn Testsuite tests +- Fixed websocket fragmentation +- Fixed websocket close procedure +- Fixed parser buffer limits +- Added `timeout` parameter to WebSocketResponse ctor +- Added `WebSocketResponse.close_code` attribute + + + +---- + + +0.15.0 (03-27-2015) +=================== + +- Client WebSockets support +- New Multipart system (`#273 <https://github.com/aio-libs/aiohttp/pull/273>`_) +- Support for "Except" header (`#287 <https://github.com/aio-libs/aiohttp/pull/287>`_) (`#267 <https://github.com/aio-libs/aiohttp/pull/267>`_) +- Set default Content-Type for post requests (`#184 <https://github.com/aio-libs/aiohttp/pull/184>`_) +- Fix issue with construction dynamic route with regexps and trailing slash (`#266 <https://github.com/aio-libs/aiohttp/pull/266>`_) +- Add repr to web.Request +- Add repr to web.Response +- Add repr for NotFound and NotAllowed match infos +- Add repr for web.Application +- Add repr to UrlMappingMatchInfo (`#217 <https://github.com/aio-libs/aiohttp/pull/217>`_) +- Gunicorn 19.2.x compatibility + + + +---- + + +0.14.4 (01-29-2015) +=================== + +- Fix issue with error during constructing of url with regex parts (`#264 <https://github.com/aio-libs/aiohttp/pull/264>`_) + + + +---- + + +0.14.3 (01-28-2015) +=================== + +- Use path='/' by default for cookies (`#261 <https://github.com/aio-libs/aiohttp/pull/261>`_) + + + +---- + + +0.14.2 (01-23-2015) +=================== + +- Connections leak in BaseConnector (`#253 <https://github.com/aio-libs/aiohttp/pull/253>`_) +- Do not swallow websocket reader exceptions (`#255 <https://github.com/aio-libs/aiohttp/pull/255>`_) +- web.Request's read, text, json are memorized (`#250 <https://github.com/aio-libs/aiohttp/pull/250>`_) + + + +---- + + +0.14.1 (01-15-2015) +=================== + +- HttpMessage._add_default_headers does not overwrite existing headers (`#216 <https://github.com/aio-libs/aiohttp/pull/216>`_) +- Expose multidict classes at package level +- add `aiohttp.web.WebSocketResponse` +- According to RFC 6455 websocket subprotocol preference order is + provided by client, not by server +- websocket's ping and pong accept optional message parameter +- multidict views do not accept `getall` parameter anymore, it + returns the full body anyway. +- multidicts have optional Cython optimization, cythonized version of + multidicts is about 5 times faster than pure Python. +- multidict.getall() returns `list`, not `tuple`. +- Backward incompatible change: now there are two mutable multidicts + (`MultiDict`, `CIMultiDict`) and two immutable multidict proxies + (`MultiDictProxy` and `CIMultiDictProxy`). Previous edition of + multidicts was not a part of public API BTW. +- Router refactoring to push Not Allowed and Not Found in middleware processing +- Convert `ConnectionError` to `aiohttp.DisconnectedError` and don't + eat `ConnectionError` exceptions from web handlers. +- Remove hop headers from Response class, wsgi response still uses hop headers. +- Allow to send raw chunked encoded response. +- Allow to encode output bytes stream into chunked encoding. +- Allow to compress output bytes stream with `deflate` encoding. +- Server has 75 seconds keepalive timeout now, was non-keepalive by default. +- Application does not accept `**kwargs` anymore ((`#243 <https://github.com/aio-libs/aiohttp/pull/243>`_)). +- Request is inherited from dict now for making per-request storage to + middlewares ((`#242 <https://github.com/aio-libs/aiohttp/pull/242>`_)). + + + +---- + + +0.13.1 (12-31-2014) +=================== + +- Add `aiohttp.web.StreamResponse.started` property (`#213 <https://github.com/aio-libs/aiohttp/pull/213>`_) +- HTML escape traceback text in `ServerHttpProtocol.handle_error` +- Mention handler and middlewares in `aiohttp.web.RequestHandler.handle_request` + on error ((`#218 <https://github.com/aio-libs/aiohttp/pull/218>`_)) + + + +---- + + +0.13.0 (12-29-2014) +=================== + +- `StreamResponse.charset` converts value to lower-case on assigning. +- Chain exceptions when raise `ClientRequestError`. +- Support custom regexps in route variables (`#204 <https://github.com/aio-libs/aiohttp/pull/204>`_) +- Fixed graceful shutdown, disable keep-alive on connection closing. +- Decode HTTP message with `utf-8` encoding, some servers send headers + in utf-8 encoding (`#207 <https://github.com/aio-libs/aiohttp/pull/207>`_) +- Support `aiohtt.web` middlewares (`#209 <https://github.com/aio-libs/aiohttp/pull/209>`_) +- Add ssl_context to TCPConnector (`#206 <https://github.com/aio-libs/aiohttp/pull/206>`_) + + + +---- + + +0.12.0 (12-12-2014) +=================== + +- Deep refactoring of `aiohttp.web` in backward-incompatible manner. + Sorry, we have to do this. +- Automatically force aiohttp.web handlers to coroutines in + `UrlDispatcher.add_route()` (`#186 <https://github.com/aio-libs/aiohttp/pull/186>`_) +- Rename `Request.POST()` function to `Request.post()` +- Added POST attribute +- Response processing refactoring: constructor does not accept Request + instance anymore. +- Pass application instance to finish callback +- Exceptions refactoring +- Do not unquote query string in `aiohttp.web.Request` +- Fix concurrent access to payload in `RequestHandle.handle_request()` +- Add access logging to `aiohttp.web` +- Gunicorn worker for `aiohttp.web` +- Removed deprecated `AsyncGunicornWorker` +- Removed deprecated HttpClient + + + +---- + + +0.11.0 (11-29-2014) +=================== + +- Support named routes in `aiohttp.web.UrlDispatcher` (`#179 <https://github.com/aio-libs/aiohttp/pull/179>`_) +- Make websocket subprotocols conform to spec (`#181 <https://github.com/aio-libs/aiohttp/pull/181>`_) + + + +---- + + +0.10.2 (11-19-2014) +=================== + +- Don't unquote `environ['PATH_INFO']` in wsgi.py (`#177 <https://github.com/aio-libs/aiohttp/pull/177>`_) + + + +---- + + +0.10.1 (11-17-2014) +=================== + +- aiohttp.web.HTTPException and descendants now files response body + with string like `404: NotFound` +- Fix multidict `__iter__`, the method should iterate over keys, not + (key, value) pairs. + + + +---- + + +0.10.0 (11-13-2014) +=================== + +- Add aiohttp.web subpackage for highlevel HTTP server support. +- Add *reason* optional parameter to aiohttp.protocol.Response ctor. +- Fix aiohttp.client bug for sending file without content-type. +- Change error text for connection closed between server responses + from 'Can not read status line' to explicit 'Connection closed by + server' +- Drop closed connections from connector (`#173 <https://github.com/aio-libs/aiohttp/pull/173>`_) +- Set server.transport to None on .closing() (`#172 <https://github.com/aio-libs/aiohttp/pull/172>`_) + + + +---- + + +0.9.3 (10-30-2014) +================== + +- Fix compatibility with asyncio 3.4.1+ (`#170 <https://github.com/aio-libs/aiohttp/pull/170>`_) + + + +---- + + +0.9.2 (10-16-2014) +================== + +- Improve redirect handling (`#157 <https://github.com/aio-libs/aiohttp/pull/157>`_) +- Send raw files as is (`#153 <https://github.com/aio-libs/aiohttp/pull/153>`_) +- Better websocket support (`#150 <https://github.com/aio-libs/aiohttp/pull/150>`_) + + + +---- + + +0.9.1 (08-30-2014) +================== + +- Added MultiDict support for client request params and data (`#114 <https://github.com/aio-libs/aiohttp/pull/114>`_). +- Fixed parameter type for IncompleteRead exception (`#118 <https://github.com/aio-libs/aiohttp/pull/118>`_). +- Strictly require ASCII headers names and values (`#137 <https://github.com/aio-libs/aiohttp/pull/137>`_) +- Keep port in ProxyConnector (`#128 <https://github.com/aio-libs/aiohttp/pull/128>`_). +- Python 3.4.1 compatibility (`#131 <https://github.com/aio-libs/aiohttp/pull/131>`_). + + + +---- + + +0.9.0 (07-08-2014) +================== + +- Better client basic authentication support (`#112 <https://github.com/aio-libs/aiohttp/pull/112>`_). +- Fixed incorrect line splitting in HttpRequestParser (`#97 <https://github.com/aio-libs/aiohttp/pull/97>`_). +- Support StreamReader and DataQueue as request data. +- Client files handling refactoring (`#20 <https://github.com/aio-libs/aiohttp/pull/20>`_). +- Backward incompatible: Replace DataQueue with StreamReader for + request payload (`#87 <https://github.com/aio-libs/aiohttp/pull/87>`_). + + + +---- + + +0.8.4 (07-04-2014) +================== + +- Change ProxyConnector authorization parameters. + + + +---- + + +0.8.3 (07-03-2014) +================== + +- Publish TCPConnector properties: verify_ssl, family, resolve, resolved_hosts. +- Don't parse message body for HEAD responses. +- Refactor client response decoding. + + + +---- + + +0.8.2 (06-22-2014) +================== + +- Make ProxyConnector.proxy immutable property. +- Make UnixConnector.path immutable property. +- Fix resource leak for aiohttp.request() with implicit connector. +- Rename Connector's reuse_timeout to keepalive_timeout. + + + +---- + + +0.8.1 (06-18-2014) +================== + +- Use case insensitive multidict for server request/response headers. +- MultiDict.getall() accepts default value. +- Catch server ConnectionError. +- Accept MultiDict (and derived) instances in aiohttp.request header argument. +- Proxy 'CONNECT' support. + + + +---- + + +0.8.0 (06-06-2014) +================== + +- Add support for utf-8 values in HTTP headers +- Allow to use custom response class instead of HttpResponse +- Use MultiDict for client request headers +- Use MultiDict for server request/response headers +- Store response headers in ClientResponse.headers attribute +- Get rid of timeout parameter in aiohttp.client API +- Exceptions refactoring + + + +---- + + +0.7.3 (05-20-2014) +================== + +- Simple HTTP proxy support. + + + +---- + + +0.7.2 (05-14-2014) +================== + +- Get rid of `__del__` methods +- Use ResourceWarning instead of logging warning record. + + + +---- + + +0.7.1 (04-28-2014) +================== + +- Do not unquote client request urls. +- Allow multiple waiters on transport drain. +- Do not return client connection to pool in case of exceptions. +- Rename SocketConnector to TCPConnector and UnixSocketConnector to + UnixConnector. + + + +---- + + +0.7.0 (04-16-2014) +================== + +- Connection flow control. +- HTTP client session/connection pool refactoring. +- Better handling for bad server requests. + + + +---- + + +0.6.5 (03-29-2014) +================== + +- Added client session reuse timeout. +- Better client request cancellation support. +- Better handling responses without content length. +- Added HttpClient verify_ssl parameter support. + + + +---- + + +0.6.4 (02-27-2014) +================== + +- Log content-length missing warning only for put and post requests. + + + +---- + + +0.6.3 (02-27-2014) +================== + +- Better support for server exit. +- Read response body until EOF if content-length is not defined (`#14 <https://github.com/aio-libs/aiohttp/pull/14>`_) + + + +---- + + +0.6.2 (02-18-2014) +================== + +- Fix trailing char in allowed_methods. +- Start slow request timer for first request. + + + +---- + + +0.6.1 (02-17-2014) +================== + +- Added utility method HttpResponse.read_and_close() +- Added slow request timeout. +- Enable socket SO_KEEPALIVE if available. + + + +---- + + +0.6.0 (02-12-2014) +================== + +- Better handling for process exit. + + + +---- + + +0.5.0 (01-29-2014) +================== +- Allow to use custom HttpRequest client class. +- Use gunicorn keepalive setting for asynchronous worker. +- Log leaking responses. +- python 3.4 compatibility + + + +---- + + +0.4.4 (11-15-2013) +================== + +- Resolve only AF_INET family, because it is not clear how to pass + extra info to asyncio. + + + +---- + + +0.4.3 (11-15-2013) +================== + +- Allow to wait completion of request with `HttpResponse.wait_for_close()` + + + +---- + + +0.4.2 (11-14-2013) +================== + +- Handle exception in client request stream. +- Prevent host resolving for each client request. + + + +---- + + +0.4.1 (11-12-2013) +================== + +- Added client support for `expect: 100-continue` header. + + + +---- + + +0.4 (11-06-2013) +================ + +- Added custom wsgi application close procedure +- Fixed concurrent host failure in HttpClient + + + +---- + + +0.3 (11-04-2013) +================ + +- Added PortMapperWorker +- Added HttpClient +- Added TCP connection timeout to HTTP client +- Better client connection errors handling +- Gracefully handle process exit + + + +---- + + +0.2 +=== + +- Fix packaging diff --git a/HISTORY.rst b/HISTORY.rst deleted file mode 100644 index b3c3b97f886..00000000000 --- a/HISTORY.rst +++ /dev/null @@ -1,2686 +0,0 @@ -3.4.4 (2018-09-05) -================== - -- Fix installation from sources when compiling toolkit is not available (`#3241 <https://github.com/aio-libs/aiohttp/pull/3241>`_) - -3.4.3 (2018-09-04) -================== - -- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_) - - -3.4.2 (2018-09-01) -================== - -- Fix ``iter_chunks`` type annotation (`#3230 <https://github.com/aio-libs/aiohttp/pull/3230>`_) - -3.4.1 (2018-08-28) -================== - -- Fix empty header parsing regression. (`#3218 <https://github.com/aio-libs/aiohttp/pull/3218>`_) -- Fix BaseRequest.raw_headers doc. (`#3215 <https://github.com/aio-libs/aiohttp/pull/3215>`_) -- Fix documentation building on ReadTheDocs (`#3221 <https://github.com/aio-libs/aiohttp/pull/3221>`_) - - -3.4.0 (2018-08-25) -================== - -Features --------- - -- Add type hints (`#3049 <https://github.com/aio-libs/aiohttp/pull/3049>`_) -- Add ``raise_for_status`` request parameter (`#3073 <https://github.com/aio-libs/aiohttp/pull/3073>`_) -- Add type hints to HTTP client (`#3092 <https://github.com/aio-libs/aiohttp/pull/3092>`_) -- Minor server optimizations (`#3095 <https://github.com/aio-libs/aiohttp/pull/3095>`_) -- Preserve the cause when `HTTPException` is raised from another exception. (`#3096 <https://github.com/aio-libs/aiohttp/pull/3096>`_) -- Add `close_boundary` option in `MultipartWriter.write` method. Support streaming (`#3104 <https://github.com/aio-libs/aiohttp/pull/3104>`_) -- Added a ``remove_slash`` option to the ``normalize_path_middleware`` factory. (`#3173 <https://github.com/aio-libs/aiohttp/pull/3173>`_) -- The class `AbstractRouteDef` is importable from `aiohttp.web`. (`#3183 <https://github.com/aio-libs/aiohttp/pull/3183>`_) - - -Bugfixes --------- - -- Prevent double closing when client connection is released before the - last ``data_received()`` callback. (`#3031 <https://github.com/aio-libs/aiohttp/pull/3031>`_) -- Make redirect with `normalize_path_middleware` work when using url encoded paths. (`#3051 <https://github.com/aio-libs/aiohttp/pull/3051>`_) -- Postpone web task creation to connection establishment. (`#3052 <https://github.com/aio-libs/aiohttp/pull/3052>`_) -- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_) -- When using a server-request body as the `data=` argument of a client request, iterate over the content with `readany` instead of `readline` to avoid `Line too long` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_) -- fix `UrlDispatcher` has no attribute `add_options`, add `web.options` (`#3062 <https://github.com/aio-libs/aiohttp/pull/3062>`_) -- correct filename in content-disposition with multipart body (`#3064 <https://github.com/aio-libs/aiohttp/pull/3064>`_) -- Many HTTP proxies has buggy keepalive support. - Let's not reuse connection but close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_) -- raise 413 "Payload Too Large" rather than raising ValueError in request.post() - Add helpful debug message to 413 responses (`#3087 <https://github.com/aio-libs/aiohttp/pull/3087>`_) -- Fix `StreamResponse` equality, now that they are `MutableMapping` objects. (`#3100 <https://github.com/aio-libs/aiohttp/pull/3100>`_) -- Fix server request objects comparison (`#3116 <https://github.com/aio-libs/aiohttp/pull/3116>`_) -- Do not hang on `206 Partial Content` response with `Content-Encoding: gzip` (`#3123 <https://github.com/aio-libs/aiohttp/pull/3123>`_) -- Fix timeout precondition checkers (`#3145 <https://github.com/aio-libs/aiohttp/pull/3145>`_) - - -Improved Documentation ----------------------- - -- Add a new FAQ entry that clarifies that you should not reuse response - objects in middleware functions. (`#3020 <https://github.com/aio-libs/aiohttp/pull/3020>`_) -- Add FAQ section "Why is creating a ClientSession outside of an event loop dangerous?" (`#3072 <https://github.com/aio-libs/aiohttp/pull/3072>`_) -- Fix link to Rambler (`#3115 <https://github.com/aio-libs/aiohttp/pull/3115>`_) -- Fix TCPSite documentation on the Server Reference page. (`#3146 <https://github.com/aio-libs/aiohttp/pull/3146>`_) -- Fix documentation build configuration file for Windows. (`#3147 <https://github.com/aio-libs/aiohttp/pull/3147>`_) -- Remove no longer existing lingering_timeout parameter of Application.make_handler from documentation. (`#3151 <https://github.com/aio-libs/aiohttp/pull/3151>`_) -- Mention that ``app.make_handler`` is deprecated, recommend to use runners - API instead. (`#3157 <https://github.com/aio-libs/aiohttp/pull/3157>`_) - - -Deprecations and Removals -------------------------- - -- Drop ``loop.current_task()`` from ``helpers.current_task()`` (`#2826 <https://github.com/aio-libs/aiohttp/pull/2826>`_) -- Drop ``reader`` parameter from ``request.multipart()``. (`#3090 <https://github.com/aio-libs/aiohttp/pull/3090>`_) - - -3.3.2 (2018-06-12) -================== - -- Many HTTP proxies has buggy keepalive support. Let's not reuse connection but - close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_) - -- Provide vendor source files in tarball (`#3076 <https://github.com/aio-libs/aiohttp/pull/3076>`_) - - -3.3.1 (2018-06-05) -================== - -- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_) -- When using a server-request body as the ``data=`` argument of a client request, - iterate over the content with ``readany`` instead of ``readline`` to avoid ``Line - too long`` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_) - - -3.3.0 (2018-06-01) -================== - -Features --------- - -- Raise ``ConnectionResetError`` instead of ``CancelledError`` on trying to - write to a closed stream. (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_) -- Implement ``ClientTimeout`` class and support socket read timeout. (`#2768 <https://github.com/aio-libs/aiohttp/pull/2768>`_) -- Enable logging when ``aiohttp.web`` is used as a program (`#2956 <https://github.com/aio-libs/aiohttp/pull/2956>`_) -- Add canonical property to resources (`#2968 <https://github.com/aio-libs/aiohttp/pull/2968>`_) -- Forbid reading response BODY after release (`#2983 <https://github.com/aio-libs/aiohttp/pull/2983>`_) -- Implement base protocol class to avoid a dependency from internal - ``asyncio.streams.FlowControlMixin`` (`#2986 <https://github.com/aio-libs/aiohttp/pull/2986>`_) -- Cythonize ``@helpers.reify``, 5% boost on macro benchmark (`#2995 <https://github.com/aio-libs/aiohttp/pull/2995>`_) -- Optimize HTTP parser (`#3015 <https://github.com/aio-libs/aiohttp/pull/3015>`_) -- Implement ``runner.addresses`` property. (`#3036 <https://github.com/aio-libs/aiohttp/pull/3036>`_) -- Use ``bytearray`` instead of a list of ``bytes`` in websocket reader. It - improves websocket message reading a little. (`#3039 <https://github.com/aio-libs/aiohttp/pull/3039>`_) -- Remove heartbeat on closing connection on keepalive timeout. The used hack - violates HTTP protocol. (`#3041 <https://github.com/aio-libs/aiohttp/pull/3041>`_) -- Limit websocket message size on reading to 4 MB by default. (`#3045 <https://github.com/aio-libs/aiohttp/pull/3045>`_) - - -Bugfixes --------- - -- Don't reuse a connection with the same URL but different proxy/TLS settings - (`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_) -- When parsing the Forwarded header, the optional port number is now preserved. - (`#3009 <https://github.com/aio-libs/aiohttp/pull/3009>`_) - - -Improved Documentation ----------------------- - -- Make Change Log more visible in docs (`#3029 <https://github.com/aio-libs/aiohttp/pull/3029>`_) -- Make style and grammar improvements on the FAQ page. (`#3030 <https://github.com/aio-libs/aiohttp/pull/3030>`_) -- Document that signal handlers should be async functions since aiohttp 3.0 - (`#3032 <https://github.com/aio-libs/aiohttp/pull/3032>`_) - - -Deprecations and Removals -------------------------- - -- Deprecate custom application's router. (`#3021 <https://github.com/aio-libs/aiohttp/pull/3021>`_) - - -Misc ----- - -- #3008, #3011 - - -3.2.1 (2018-05-10) -================== - -- Don't reuse a connection with the same URL but different proxy/TLS settings - (`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_) - - -3.2.0 (2018-05-06) -================== - -Features --------- - -- Raise ``TooManyRedirects`` exception when client gets redirected too many - times instead of returning last response. (`#2631 <https://github.com/aio-libs/aiohttp/pull/2631>`_) -- Extract route definitions into separate ``web_routedef.py`` file (`#2876 <https://github.com/aio-libs/aiohttp/pull/2876>`_) -- Raise an exception on request body reading after sending response. (`#2895 <https://github.com/aio-libs/aiohttp/pull/2895>`_) -- ClientResponse and RequestInfo now have real_url property, which is request - url without fragment part being stripped (`#2925 <https://github.com/aio-libs/aiohttp/pull/2925>`_) -- Speed up connector limiting (`#2937 <https://github.com/aio-libs/aiohttp/pull/2937>`_) -- Added and links property for ClientResponse object (`#2948 <https://github.com/aio-libs/aiohttp/pull/2948>`_) -- Add ``request.config_dict`` for exposing nested applications data. (`#2949 <https://github.com/aio-libs/aiohttp/pull/2949>`_) -- Speed up HTTP headers serialization, server micro-benchmark runs 5% faster - now. (`#2957 <https://github.com/aio-libs/aiohttp/pull/2957>`_) -- Apply assertions in debug mode only (`#2966 <https://github.com/aio-libs/aiohttp/pull/2966>`_) - - -Bugfixes --------- - -- expose property `app` for TestClient (`#2891 <https://github.com/aio-libs/aiohttp/pull/2891>`_) -- Call on_chunk_sent when write_eof takes as a param the last chunk (`#2909 <https://github.com/aio-libs/aiohttp/pull/2909>`_) -- A closing bracket was added to `__repr__` of resources (`#2935 <https://github.com/aio-libs/aiohttp/pull/2935>`_) -- Fix compression of FileResponse (`#2942 <https://github.com/aio-libs/aiohttp/pull/2942>`_) -- Fixes some bugs in the limit connection feature (`#2964 <https://github.com/aio-libs/aiohttp/pull/2964>`_) - - -Improved Documentation ----------------------- - -- Drop ``async_timeout`` usage from documentation for client API in favor of - ``timeout`` parameter. (`#2865 <https://github.com/aio-libs/aiohttp/pull/2865>`_) -- Improve Gunicorn logging documentation (`#2921 <https://github.com/aio-libs/aiohttp/pull/2921>`_) -- Replace multipart writer `.serialize()` method with `.write()` in - documentation. (`#2965 <https://github.com/aio-libs/aiohttp/pull/2965>`_) - - -Deprecations and Removals -------------------------- - -- Deprecate Application.make_handler() (`#2938 <https://github.com/aio-libs/aiohttp/pull/2938>`_) - - -Misc ----- - -- #2958 - - -3.1.3 (2018-04-12) -================== - -- Fix cancellation broadcast during DNS resolve (`#2910 <https://github.com/aio-libs/aiohttp/pull/2910>`_) - - -3.1.2 (2018-04-05) -================== - -- Make ``LineTooLong`` exception more detailed about actual data size (`#2863 <https://github.com/aio-libs/aiohttp/pull/2863>`_) -- Call ``on_chunk_sent`` when write_eof takes as a param the last chunk (`#2909 <https://github.com/aio-libs/aiohttp/pull/2909>`_) - - -3.1.1 (2018-03-27) -================== - -- Support *asynchronous iterators* (and *asynchronous generators* as - well) in both client and server API as request / response BODY - payloads. (`#2802 <https://github.com/aio-libs/aiohttp/pull/2802>`_) - - -3.1.0 (2018-03-21) -================== - -Welcome to aiohttp 3.1 release. - -This is an *incremental* release, fully backward compatible with *aiohttp 3.0*. - -But we have added several new features. - -The most visible one is ``app.add_routes()`` (an alias for existing -``app.router.add_routes()``. The addition is very important because -all *aiohttp* docs now uses ``app.add_routes()`` call in code -snippets. All your existing code still do register routes / resource -without any warning but you've got the idea for a favorite way: noisy -``app.router.add_get()`` is replaced by ``app.add_routes()``. - -The library does not make a preference between decorators:: - - routes = web.RouteTableDef() - - @routes.get('/') - async def hello(request): - return web.Response(text="Hello, world") - - app.add_routes(routes) - -and route tables as a list:: - - async def hello(request): - return web.Response(text="Hello, world") - - app.add_routes([web.get('/', hello)]) - -Both ways are equal, user may decide basing on own code taste. - -Also we have a lot of minor features, bug fixes and documentation -updates, see below. - -Features --------- - -- Relax JSON content-type checking in the ``ClientResponse.json()`` to allow - "application/xxx+json" instead of strict "application/json". (`#2206 <https://github.com/aio-libs/aiohttp/pull/2206>`_) -- Bump C HTTP parser to version 2.8 (`#2730 <https://github.com/aio-libs/aiohttp/pull/2730>`_) -- Accept a coroutine as an application factory in ``web.run_app`` and gunicorn - worker. (`#2739 <https://github.com/aio-libs/aiohttp/pull/2739>`_) -- Implement application cleanup context (``app.cleanup_ctx`` property). (`#2747 <https://github.com/aio-libs/aiohttp/pull/2747>`_) -- Make ``writer.write_headers`` a coroutine. (`#2762 <https://github.com/aio-libs/aiohttp/pull/2762>`_) -- Add tracking signals for getting request/response bodies. (`#2767 <https://github.com/aio-libs/aiohttp/pull/2767>`_) -- Deprecate ClientResponseError.code in favor of .status to keep similarity - with response classes. (`#2781 <https://github.com/aio-libs/aiohttp/pull/2781>`_) -- Implement ``app.add_routes()`` method. (`#2787 <https://github.com/aio-libs/aiohttp/pull/2787>`_) -- Implement ``web.static()`` and ``RouteTableDef.static()`` API. (`#2795 <https://github.com/aio-libs/aiohttp/pull/2795>`_) -- Install a test event loop as default by ``asyncio.set_event_loop()``. The - change affects aiohttp test utils but backward compatibility is not broken - for 99.99% of use cases. (`#2804 <https://github.com/aio-libs/aiohttp/pull/2804>`_) -- Refactor ``ClientResponse`` constructor: make logically required constructor - arguments mandatory, drop ``_post_init()`` method. (`#2820 <https://github.com/aio-libs/aiohttp/pull/2820>`_) -- Use ``app.add_routes()`` in server docs everywhere (`#2830 <https://github.com/aio-libs/aiohttp/pull/2830>`_) -- Websockets refactoring, all websocket writer methods are converted into - coroutines. (`#2836 <https://github.com/aio-libs/aiohttp/pull/2836>`_) -- Provide ``Content-Range`` header for ``Range`` requests (`#2844 <https://github.com/aio-libs/aiohttp/pull/2844>`_) - - -Bugfixes --------- - -- Fix websocket client return EofStream. (`#2784 <https://github.com/aio-libs/aiohttp/pull/2784>`_) -- Fix websocket demo. (`#2789 <https://github.com/aio-libs/aiohttp/pull/2789>`_) -- Property ``BaseRequest.http_range`` now returns a python-like slice when - requesting the tail of the range. It's now indicated by a negative value in - ``range.start`` rather then in ``range.stop`` (`#2805 <https://github.com/aio-libs/aiohttp/pull/2805>`_) -- Close a connection if an unexpected exception occurs while sending a request - (`#2827 <https://github.com/aio-libs/aiohttp/pull/2827>`_) -- Fix firing DNS tracing events. (`#2841 <https://github.com/aio-libs/aiohttp/pull/2841>`_) - - -Improved Documentation ----------------------- - -- Document behavior when cchardet detects encodings that are unknown to Python. - (`#2732 <https://github.com/aio-libs/aiohttp/pull/2732>`_) -- Add diagrams for tracing request life style. (`#2748 <https://github.com/aio-libs/aiohttp/pull/2748>`_) -- Drop removed functionality for passing ``StreamReader`` as data at client - side. (`#2793 <https://github.com/aio-libs/aiohttp/pull/2793>`_) - -3.0.9 (2018-03-14) -================== - -- Close a connection if an unexpected exception occurs while sending a request - (`#2827 <https://github.com/aio-libs/aiohttp/pull/2827>`_) - - -3.0.8 (2018-03-12) -================== - -- Use ``asyncio.current_task()`` on Python 3.7 (`#2825 <https://github.com/aio-libs/aiohttp/pull/2825>`_) - -3.0.7 (2018-03-08) -================== - -- Fix SSL proxy support by client. (`#2810 <https://github.com/aio-libs/aiohttp/pull/2810>`_) -- Restore an imperative check in ``setup.py`` for python version. The check - works in parallel to environment marker. As effect an error about unsupported - Python versions is raised even on outdated systems with very old - ``setuptools`` version installed. (`#2813 <https://github.com/aio-libs/aiohttp/pull/2813>`_) - - -3.0.6 (2018-03-05) -================== - -- Add ``_reuse_address`` and ``_reuse_port`` to - ``web_runner.TCPSite.__slots__``. (`#2792 <https://github.com/aio-libs/aiohttp/pull/2792>`_) - -3.0.5 (2018-02-27) -================== - -- Fix ``InvalidStateError`` on processing a sequence of two - ``RequestHandler.data_received`` calls on web server. (`#2773 <https://github.com/aio-libs/aiohttp/pull/2773>`_) - -3.0.4 (2018-02-26) -================== - -- Fix ``IndexError`` in HTTP request handling by server. (`#2752 <https://github.com/aio-libs/aiohttp/pull/2752>`_) -- Fix MultipartWriter.append* no longer returning part/payload. (`#2759 <https://github.com/aio-libs/aiohttp/pull/2759>`_) - - -3.0.3 (2018-02-25) -================== - -- Relax ``attrs`` dependency to minimal actually supported version - 17.0.3 The change allows to avoid version conflicts with currently - existing test tools. - -3.0.2 (2018-02-23) -================== - -Security Fix ------------- - -- Prevent Windows absolute URLs in static files. Paths like - ``/static/D:\path`` and ``/static/\\hostname\drive\path`` are - forbidden. - -3.0.1 -===== - -- Technical release for fixing distribution problems. - -3.0.0 (2018-02-12) -================== - -Features --------- - -- Speed up the `PayloadWriter.write` method for large request bodies. (`#2126 <https://github.com/aio-libs/aiohttp/pull/2126>`_) -- StreamResponse and Response are now MutableMappings. (`#2246 <https://github.com/aio-libs/aiohttp/pull/2246>`_) -- ClientSession publishes a set of signals to track the HTTP request execution. - (`#2313 <https://github.com/aio-libs/aiohttp/pull/2313>`_) -- Content-Disposition fast access in ClientResponse (`#2455 <https://github.com/aio-libs/aiohttp/pull/2455>`_) -- Added support to Flask-style decorators with class-based Views. (`#2472 <https://github.com/aio-libs/aiohttp/pull/2472>`_) -- Signal handlers (registered callbacks) should be coroutines. (`#2480 <https://github.com/aio-libs/aiohttp/pull/2480>`_) -- Support ``async with test_client.ws_connect(...)`` (`#2525 <https://github.com/aio-libs/aiohttp/pull/2525>`_) -- Introduce *site* and *application runner* as underlying API for `web.run_app` - implementation. (`#2530 <https://github.com/aio-libs/aiohttp/pull/2530>`_) -- Only quote multipart boundary when necessary and sanitize input (`#2544 <https://github.com/aio-libs/aiohttp/pull/2544>`_) -- Make the `aiohttp.ClientResponse.get_encoding` method public with the - processing of invalid charset while detecting content encoding. (`#2549 <https://github.com/aio-libs/aiohttp/pull/2549>`_) -- Add optional configurable per message compression for - `ClientWebSocketResponse` and `WebSocketResponse`. (`#2551 <https://github.com/aio-libs/aiohttp/pull/2551>`_) -- Add hysteresis to `StreamReader` to prevent flipping between paused and - resumed states too often. (`#2555 <https://github.com/aio-libs/aiohttp/pull/2555>`_) -- Support `.netrc` by `trust_env` (`#2581 <https://github.com/aio-libs/aiohttp/pull/2581>`_) -- Avoid to create a new resource when adding a route with the same name and - path of the last added resource (`#2586 <https://github.com/aio-libs/aiohttp/pull/2586>`_) -- `MultipartWriter.boundary` is `str` now. (`#2589 <https://github.com/aio-libs/aiohttp/pull/2589>`_) -- Allow a custom port to be used by `TestServer` (and associated pytest - fixtures) (`#2613 <https://github.com/aio-libs/aiohttp/pull/2613>`_) -- Add param access_log_class to web.run_app function (`#2615 <https://github.com/aio-libs/aiohttp/pull/2615>`_) -- Add ``ssl`` parameter to client API (`#2626 <https://github.com/aio-libs/aiohttp/pull/2626>`_) -- Fixes performance issue introduced by #2577. When there are no middlewares - installed by the user, no additional and useless code is executed. (`#2629 <https://github.com/aio-libs/aiohttp/pull/2629>`_) -- Rename PayloadWriter to StreamWriter (`#2654 <https://github.com/aio-libs/aiohttp/pull/2654>`_) -- New options *reuse_port*, *reuse_address* are added to `run_app` and - `TCPSite`. (`#2679 <https://github.com/aio-libs/aiohttp/pull/2679>`_) -- Use custom classes to pass client signals parameters (`#2686 <https://github.com/aio-libs/aiohttp/pull/2686>`_) -- Use ``attrs`` library for data classes, replace `namedtuple`. (`#2690 <https://github.com/aio-libs/aiohttp/pull/2690>`_) -- Pytest fixtures renaming, add ``aiohttp_`` prefix (`#2578 <https://github.com/aio-libs/aiohttp/pull/2578>`_) -- Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line - parameters (`#2578 <https://github.com/aio-libs/aiohttp/pull/2578>`_) - -Bugfixes --------- - -- Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not - support HTTP2 yet, the protocol is not upgraded but response is handled - correctly. (`#2277 <https://github.com/aio-libs/aiohttp/pull/2277>`_) -- Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy - connector (`#2408 <https://github.com/aio-libs/aiohttp/pull/2408>`_) -- Fix connector convert OSError to ClientConnectorError (`#2423 <https://github.com/aio-libs/aiohttp/pull/2423>`_) -- Fix connection attempts for multiple dns hosts (`#2424 <https://github.com/aio-libs/aiohttp/pull/2424>`_) -- Fix writing to closed transport by raising `asyncio.CancelledError` (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_) -- Fix warning in `ClientSession.__del__` by stopping to try to close it. - (`#2523 <https://github.com/aio-libs/aiohttp/pull/2523>`_) -- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 <https://github.com/aio-libs/aiohttp/pull/2620>`_) -- Fix default value of `access_log_format` argument in `web.run_app` (`#2649 <https://github.com/aio-libs/aiohttp/pull/2649>`_) -- Freeze sub-application on adding to parent app (`#2656 <https://github.com/aio-libs/aiohttp/pull/2656>`_) -- Do percent encoding for `.url_for()` parameters (`#2668 <https://github.com/aio-libs/aiohttp/pull/2668>`_) -- Correctly process request start time and multiple request/response - headers in access log extra (`#2641 <https://github.com/aio-libs/aiohttp/pull/2641>`_) - -Improved Documentation ----------------------- - -- Improve tutorial docs, using `literalinclude` to link to the actual files. - (`#2396 <https://github.com/aio-libs/aiohttp/pull/2396>`_) -- Small improvement docs: better example for file uploads. (`#2401 <https://github.com/aio-libs/aiohttp/pull/2401>`_) -- Rename `from_env` to `trust_env` in client reference. (`#2451 <https://github.com/aio-libs/aiohttp/pull/2451>`_) -- Fixed mistype in `Proxy Support` section where `trust_env` parameter was - used in `session.get("http://python.org", trust_env=True)` method instead of - aiohttp.ClientSession constructor as follows: - `aiohttp.ClientSession(trust_env=True)`. (`#2688 <https://github.com/aio-libs/aiohttp/pull/2688>`_) -- Fix issue with unittest example not compiling in testing docs. (`#2717 <https://github.com/aio-libs/aiohttp/pull/2717>`_) - -Deprecations and Removals -------------------------- - -- Simplify HTTP pipelining implementation (`#2109 <https://github.com/aio-libs/aiohttp/pull/2109>`_) -- Drop `StreamReaderPayload` and `DataQueuePayload`. (`#2257 <https://github.com/aio-libs/aiohttp/pull/2257>`_) -- Drop `md5` and `sha1` finger-prints (`#2267 <https://github.com/aio-libs/aiohttp/pull/2267>`_) -- Drop WSMessage.tp (`#2321 <https://github.com/aio-libs/aiohttp/pull/2321>`_) -- Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python - versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. - (`#2343 <https://github.com/aio-libs/aiohttp/pull/2343>`_) -- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (`#2348 <https://github.com/aio-libs/aiohttp/pull/2348>`_) -- Drop `resolve` param from TCPConnector. (`#2377 <https://github.com/aio-libs/aiohttp/pull/2377>`_) -- Add DeprecationWarning for returning HTTPException (`#2415 <https://github.com/aio-libs/aiohttp/pull/2415>`_) -- `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are - genuine async functions now. (`#2475 <https://github.com/aio-libs/aiohttp/pull/2475>`_) -- Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal - handlers should be coroutines, support for regular functions is dropped. - (`#2480 <https://github.com/aio-libs/aiohttp/pull/2480>`_) -- `StreamResponse.drain()` is not a part of public API anymore, just use `await - StreamResponse.write()`. `StreamResponse.write` is converted to async - function. (`#2483 <https://github.com/aio-libs/aiohttp/pull/2483>`_) -- Drop deprecated `slow_request_timeout` param and `**kwargs`` from - `RequestHandler`. (`#2500 <https://github.com/aio-libs/aiohttp/pull/2500>`_) -- Drop deprecated `resource.url()`. (`#2501 <https://github.com/aio-libs/aiohttp/pull/2501>`_) -- Remove `%u` and `%l` format specifiers from access log format. (`#2506 <https://github.com/aio-libs/aiohttp/pull/2506>`_) -- Drop deprecated `request.GET` property. (`#2547 <https://github.com/aio-libs/aiohttp/pull/2547>`_) -- Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, - merge `FlowControlStreamReader` functionality into `StreamReader`, drop - `FlowControlStreamReader` name. (`#2555 <https://github.com/aio-libs/aiohttp/pull/2555>`_) -- Do not create a new resource on `router.add_get(..., allow_head=True)` - (`#2585 <https://github.com/aio-libs/aiohttp/pull/2585>`_) -- Drop access to TCP tuning options from PayloadWriter and Response classes - (`#2604 <https://github.com/aio-libs/aiohttp/pull/2604>`_) -- Drop deprecated `encoding` parameter from client API (`#2606 <https://github.com/aio-libs/aiohttp/pull/2606>`_) -- Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in - client API (`#2626 <https://github.com/aio-libs/aiohttp/pull/2626>`_) -- Get rid of the legacy class StreamWriter. (`#2651 <https://github.com/aio-libs/aiohttp/pull/2651>`_) -- Forbid non-strings in `resource.url_for()` parameters. (`#2668 <https://github.com/aio-libs/aiohttp/pull/2668>`_) -- Deprecate inheritance from ``ClientSession`` and ``web.Application`` and - custom user attributes for ``ClientSession``, ``web.Request`` and - ``web.Application`` (`#2691 <https://github.com/aio-libs/aiohttp/pull/2691>`_) -- Drop `resp = await aiohttp.request(...)` syntax for sake of `async with - aiohttp.request(...) as resp:`. (`#2540 <https://github.com/aio-libs/aiohttp/pull/2540>`_) -- Forbid synchronous context managers for `ClientSession` and test - server/client. (`#2362 <https://github.com/aio-libs/aiohttp/pull/2362>`_) - - -Misc ----- - -- #2552 - - -2.3.10 (2018-02-02) -=================== - -- Fix 100% CPU usage on HTTP GET and websocket connection just after it (`#1955 <https://github.com/aio-libs/aiohttp/pull/1955>`_) - -- Patch broken `ssl.match_hostname()` on Python<3.7 (`#2674 <https://github.com/aio-libs/aiohttp/pull/2674>`_) - -2.3.9 (2018-01-16) -================== - -- Fix colon handing in path for dynamic resources (`#2670 <https://github.com/aio-libs/aiohttp/pull/2670>`_) - -2.3.8 (2018-01-15) -================== - -- Do not use `yarl.unquote` internal function in aiohttp. Fix - incorrectly unquoted path part in URL dispatcher (`#2662 <https://github.com/aio-libs/aiohttp/pull/2662>`_) - -- Fix compatibility with `yarl==1.0.0` (`#2662 <https://github.com/aio-libs/aiohttp/pull/2662>`_) - -2.3.7 (2017-12-27) -================== - -- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 <https://github.com/aio-libs/aiohttp/pull/2620>`_) -- Fix docstring for request.host (`#2591 <https://github.com/aio-libs/aiohttp/pull/2591>`_) -- Fix docstring for request.remote (`#2592 <https://github.com/aio-libs/aiohttp/pull/2592>`_) - - -2.3.6 (2017-12-04) -================== - -- Correct `request.app` context (for handlers not just middlewares). (`#2577 <https://github.com/aio-libs/aiohttp/pull/2577>`_) - - -2.3.5 (2017-11-30) -================== - -- Fix compatibility with `pytest` 3.3+ (`#2565 <https://github.com/aio-libs/aiohttp/pull/2565>`_) - - -2.3.4 (2017-11-29) -================== - -- Make `request.app` point to proper application instance when using nested - applications (with middlewares). (`#2550 <https://github.com/aio-libs/aiohttp/pull/2550>`_) -- Change base class of ClientConnectorSSLError to ClientSSLError from - ClientConnectorError. (`#2563 <https://github.com/aio-libs/aiohttp/pull/2563>`_) -- Return client connection back to free pool on error in `connector.connect()`. - (`#2567 <https://github.com/aio-libs/aiohttp/pull/2567>`_) - - -2.3.3 (2017-11-17) -================== - -- Having a `;` in Response content type does not assume it contains a charset - anymore. (`#2197 <https://github.com/aio-libs/aiohttp/pull/2197>`_) -- Use `getattr(asyncio, 'async')` for keeping compatibility with Python 3.7. - (`#2476 <https://github.com/aio-libs/aiohttp/pull/2476>`_) -- Ignore `NotImplementedError` raised by `set_child_watcher` from `uvloop`. - (`#2491 <https://github.com/aio-libs/aiohttp/pull/2491>`_) -- Fix warning in `ClientSession.__del__` by stopping to try to close it. - (`#2523 <https://github.com/aio-libs/aiohttp/pull/2523>`_) -- Fixed typo's in Third-party libraries page. And added async-v20 to the list - (`#2510 <https://github.com/aio-libs/aiohttp/pull/2510>`_) - - -2.3.2 (2017-11-01) -================== - -- Fix passing client max size on cloning request obj. (`#2385 <https://github.com/aio-libs/aiohttp/pull/2385>`_) -- Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy - connector. (`#2408 <https://github.com/aio-libs/aiohttp/pull/2408>`_) -- Drop generated `_http_parser` shared object from tarball distribution. (`#2414 <https://github.com/aio-libs/aiohttp/pull/2414>`_) -- Fix connector convert OSError to ClientConnectorError. (`#2423 <https://github.com/aio-libs/aiohttp/pull/2423>`_) -- Fix connection attempts for multiple dns hosts. (`#2424 <https://github.com/aio-libs/aiohttp/pull/2424>`_) -- Fix ValueError for AF_INET6 sockets if a preexisting INET6 socket to the - `aiohttp.web.run_app` function. (`#2431 <https://github.com/aio-libs/aiohttp/pull/2431>`_) -- `_SessionRequestContextManager` closes the session properly now. (`#2441 <https://github.com/aio-libs/aiohttp/pull/2441>`_) -- Rename `from_env` to `trust_env` in client reference. (`#2451 <https://github.com/aio-libs/aiohttp/pull/2451>`_) - - -2.3.1 (2017-10-18) -================== - -- Relax attribute lookup in warning about old-styled middleware (`#2340 <https://github.com/aio-libs/aiohttp/pull/2340>`_) - - -2.3.0 (2017-10-18) -================== - -Features --------- - -- Add SSL related params to `ClientSession.request` (`#1128 <https://github.com/aio-libs/aiohttp/pull/1128>`_) -- Make enable_compression work on HTTP/1.0 (`#1828 <https://github.com/aio-libs/aiohttp/pull/1828>`_) -- Deprecate registering synchronous web handlers (`#1993 <https://github.com/aio-libs/aiohttp/pull/1993>`_) -- Switch to `multidict 3.0`. All HTTP headers preserve casing now but compared - in case-insensitive way. (`#1994 <https://github.com/aio-libs/aiohttp/pull/1994>`_) -- Improvement for `normalize_path_middleware`. Added possibility to handle URLs - with query string. (`#1995 <https://github.com/aio-libs/aiohttp/pull/1995>`_) -- Use towncrier for CHANGES.txt build (`#1997 <https://github.com/aio-libs/aiohttp/pull/1997>`_) -- Implement `trust_env=True` param in `ClientSession`. (`#1998 <https://github.com/aio-libs/aiohttp/pull/1998>`_) -- Added variable to customize proxy headers (`#2001 <https://github.com/aio-libs/aiohttp/pull/2001>`_) -- Implement `router.add_routes` and router decorators. (`#2004 <https://github.com/aio-libs/aiohttp/pull/2004>`_) -- Deprecated `BaseRequest.has_body` in favor of - `BaseRequest.can_read_body` Added `BaseRequest.body_exists` - attribute that stays static for the lifetime of the request (`#2005 <https://github.com/aio-libs/aiohttp/pull/2005>`_) -- Provide `BaseRequest.loop` attribute (`#2024 <https://github.com/aio-libs/aiohttp/pull/2024>`_) -- Make `_CoroGuard` awaitable and fix `ClientSession.close` warning message - (`#2026 <https://github.com/aio-libs/aiohttp/pull/2026>`_) -- Responses to redirects without Location header are returned instead of - raising a RuntimeError (`#2030 <https://github.com/aio-libs/aiohttp/pull/2030>`_) -- Added `get_client`, `get_server`, `setUpAsync` and `tearDownAsync` methods to - AioHTTPTestCase (`#2032 <https://github.com/aio-libs/aiohttp/pull/2032>`_) -- Add automatically a SafeChildWatcher to the test loop (`#2058 <https://github.com/aio-libs/aiohttp/pull/2058>`_) -- add ability to disable automatic response decompression (`#2110 <https://github.com/aio-libs/aiohttp/pull/2110>`_) -- Add support for throttling DNS request, avoiding the requests saturation when - there is a miss in the DNS cache and many requests getting into the connector - at the same time. (`#2111 <https://github.com/aio-libs/aiohttp/pull/2111>`_) -- Use request for getting access log information instead of message/transport - pair. Add `RequestBase.remote` property for accessing to IP of client - initiated HTTP request. (`#2123 <https://github.com/aio-libs/aiohttp/pull/2123>`_) -- json() raises a ContentTypeError exception if the content-type does not meet - the requirements instead of raising a generic ClientResponseError. (`#2136 <https://github.com/aio-libs/aiohttp/pull/2136>`_) -- Make the HTTP client able to return HTTP chunks when chunked transfer - encoding is used. (`#2150 <https://github.com/aio-libs/aiohttp/pull/2150>`_) -- add `append_version` arg into `StaticResource.url` and - `StaticResource.url_for` methods for getting an url with hash (version) of - the file. (`#2157 <https://github.com/aio-libs/aiohttp/pull/2157>`_) -- Fix parsing the Forwarded header. * commas and semicolons are allowed inside - quoted-strings; * empty forwarded-pairs (as in for=_1;;by=_2) are allowed; * - non-standard parameters are allowed (although this alone could be easily done - in the previous parser). (`#2173 <https://github.com/aio-libs/aiohttp/pull/2173>`_) -- Don't require ssl module to run. aiohttp does not require SSL to function. - The code paths involved with SSL will only be hit upon SSL usage. Raise - `RuntimeError` if HTTPS protocol is required but ssl module is not present. - (`#2221 <https://github.com/aio-libs/aiohttp/pull/2221>`_) -- Accept coroutine fixtures in pytest plugin (`#2223 <https://github.com/aio-libs/aiohttp/pull/2223>`_) -- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (`#2227 <https://github.com/aio-libs/aiohttp/pull/2227>`_) -- Speed up Signals when there are no receivers (`#2229 <https://github.com/aio-libs/aiohttp/pull/2229>`_) -- Raise `InvalidURL` instead of `ValueError` on fetches with invalid URL. - (`#2241 <https://github.com/aio-libs/aiohttp/pull/2241>`_) -- Move `DummyCookieJar` into `cookiejar.py` (`#2242 <https://github.com/aio-libs/aiohttp/pull/2242>`_) -- `run_app`: Make `print=None` disable printing (`#2260 <https://github.com/aio-libs/aiohttp/pull/2260>`_) -- Support `brotli` encoding (generic-purpose lossless compression algorithm) - (`#2270 <https://github.com/aio-libs/aiohttp/pull/2270>`_) -- Add server support for WebSockets Per-Message Deflate. Add client option to - add deflate compress header in WebSockets request header. If calling - ClientSession.ws_connect() with `compress=15` the client will support deflate - compress negotiation. (`#2273 <https://github.com/aio-libs/aiohttp/pull/2273>`_) -- Support `verify_ssl`, `fingerprint`, `ssl_context` and `proxy_headers` by - `client.ws_connect`. (`#2292 <https://github.com/aio-libs/aiohttp/pull/2292>`_) -- Added `aiohttp.ClientConnectorSSLError` when connection fails due - `ssl.SSLError` (`#2294 <https://github.com/aio-libs/aiohttp/pull/2294>`_) -- `aiohttp.web.Application.make_handler` support `access_log_class` (`#2315 <https://github.com/aio-libs/aiohttp/pull/2315>`_) -- Build HTTP parser extension in non-strict mode by default. (`#2332 <https://github.com/aio-libs/aiohttp/pull/2332>`_) - - -Bugfixes --------- - -- Clear auth information on redirecting to other domain (`#1699 <https://github.com/aio-libs/aiohttp/pull/1699>`_) -- Fix missing app.loop on startup hooks during tests (`#2060 <https://github.com/aio-libs/aiohttp/pull/2060>`_) -- Fix issue with synchronous session closing when using `ClientSession` as an - asynchronous context manager. (`#2063 <https://github.com/aio-libs/aiohttp/pull/2063>`_) -- Fix issue with `CookieJar` incorrectly expiring cookies in some edge cases. - (`#2084 <https://github.com/aio-libs/aiohttp/pull/2084>`_) -- Force use of IPv4 during test, this will make tests run in a Docker container - (`#2104 <https://github.com/aio-libs/aiohttp/pull/2104>`_) -- Warnings about unawaited coroutines now correctly point to the user's code. - (`#2106 <https://github.com/aio-libs/aiohttp/pull/2106>`_) -- Fix issue with `IndexError` being raised by the `StreamReader.iter_chunks()` - generator. (`#2112 <https://github.com/aio-libs/aiohttp/pull/2112>`_) -- Support HTTP 308 Permanent redirect in client class. (`#2114 <https://github.com/aio-libs/aiohttp/pull/2114>`_) -- Fix `FileResponse` sending empty chunked body on 304. (`#2143 <https://github.com/aio-libs/aiohttp/pull/2143>`_) -- Do not add `Content-Length: 0` to GET/HEAD/TRACE/OPTIONS requests by default. - (`#2167 <https://github.com/aio-libs/aiohttp/pull/2167>`_) -- Fix parsing the Forwarded header according to RFC 7239. (`#2170 <https://github.com/aio-libs/aiohttp/pull/2170>`_) -- Securely determining remote/scheme/host #2171 (`#2171 <https://github.com/aio-libs/aiohttp/pull/2171>`_) -- Fix header name parsing, if name is split into multiple lines (`#2183 <https://github.com/aio-libs/aiohttp/pull/2183>`_) -- Handle session close during connection, `KeyError: - <aiohttp.connector._TransportPlaceholder>` (`#2193 <https://github.com/aio-libs/aiohttp/pull/2193>`_) -- Fixes uncaught `TypeError` in `helpers.guess_filename` if `name` is not a - string (`#2201 <https://github.com/aio-libs/aiohttp/pull/2201>`_) -- Raise OSError on async DNS lookup if resolved domain is an alias for another - one, which does not have an A or CNAME record. (`#2231 <https://github.com/aio-libs/aiohttp/pull/2231>`_) -- Fix incorrect warning in `StreamReader`. (`#2251 <https://github.com/aio-libs/aiohttp/pull/2251>`_) -- Properly clone state of web request (`#2284 <https://github.com/aio-libs/aiohttp/pull/2284>`_) -- Fix C HTTP parser for cases when status line is split into different TCP - packets. (`#2311 <https://github.com/aio-libs/aiohttp/pull/2311>`_) -- Fix `web.FileResponse` overriding user supplied Content-Type (`#2317 <https://github.com/aio-libs/aiohttp/pull/2317>`_) - - -Improved Documentation ----------------------- - -- Add a note about possible performance degradation in `await resp.text()` if - charset was not provided by `Content-Type` HTTP header. Pass explicit - encoding to solve it. (`#1811 <https://github.com/aio-libs/aiohttp/pull/1811>`_) -- Drop `disqus` widget from documentation pages. (`#2018 <https://github.com/aio-libs/aiohttp/pull/2018>`_) -- Add a graceful shutdown section to the client usage documentation. (`#2039 <https://github.com/aio-libs/aiohttp/pull/2039>`_) -- Document `connector_owner` parameter. (`#2072 <https://github.com/aio-libs/aiohttp/pull/2072>`_) -- Update the doc of web.Application (`#2081 <https://github.com/aio-libs/aiohttp/pull/2081>`_) -- Fix mistake about access log disabling. (`#2085 <https://github.com/aio-libs/aiohttp/pull/2085>`_) -- Add example usage of on_startup and on_shutdown signals by creating and - disposing an aiopg connection engine. (`#2131 <https://github.com/aio-libs/aiohttp/pull/2131>`_) -- Document `encoded=True` for `yarl.URL`, it disables all yarl transformations. - (`#2198 <https://github.com/aio-libs/aiohttp/pull/2198>`_) -- Document that all app's middleware factories are run for every request. - (`#2225 <https://github.com/aio-libs/aiohttp/pull/2225>`_) -- Reflect the fact that default resolver is threaded one starting from aiohttp - 1.1 (`#2228 <https://github.com/aio-libs/aiohttp/pull/2228>`_) - - -Deprecations and Removals -------------------------- - -- Drop deprecated `Server.finish_connections` (`#2006 <https://github.com/aio-libs/aiohttp/pull/2006>`_) -- Drop %O format from logging, use %b instead. Drop %e format from logging, - environment variables are not supported anymore. (`#2123 <https://github.com/aio-libs/aiohttp/pull/2123>`_) -- Drop deprecated secure_proxy_ssl_header support (`#2171 <https://github.com/aio-libs/aiohttp/pull/2171>`_) -- Removed TimeService in favor of simple caching. TimeService also had a bug - where it lost about 0.5 seconds per second. (`#2176 <https://github.com/aio-libs/aiohttp/pull/2176>`_) -- Drop unused response_factory from static files API (`#2290 <https://github.com/aio-libs/aiohttp/pull/2290>`_) - - -Misc ----- - -- #2013, #2014, #2048, #2094, #2149, #2187, #2214, #2225, #2243, #2248 - - -2.2.5 (2017-08-03) -================== - -- Don't raise deprecation warning on - `loop.run_until_complete(client.close())` (`#2065 <https://github.com/aio-libs/aiohttp/pull/2065>`_) - -2.2.4 (2017-08-02) -================== - -- Fix issue with synchronous session closing when using ClientSession - as an asynchronous context manager. (`#2063 <https://github.com/aio-libs/aiohttp/pull/2063>`_) - -2.2.3 (2017-07-04) -================== - -- Fix `_CoroGuard` for python 3.4 - -2.2.2 (2017-07-03) -================== - -- Allow `await session.close()` along with `yield from session.close()` - - -2.2.1 (2017-07-02) -================== - -- Relax `yarl` requirement to 0.11+ - -- Backport #2026: `session.close` *is* a coroutine (`#2029 <https://github.com/aio-libs/aiohttp/pull/2029>`_) - - -2.2.0 (2017-06-20) -================== - -- Add doc for add_head, update doc for add_get. (`#1944 <https://github.com/aio-libs/aiohttp/pull/1944>`_) - -- Fixed consecutive calls for `Response.write_eof`. - -- Retain method attributes (e.g. :code:`__doc__`) when registering synchronous - handlers for resources. (`#1953 <https://github.com/aio-libs/aiohttp/pull/1953>`_) - -- Added signal TERM handling in `run_app` to gracefully exit (`#1932 <https://github.com/aio-libs/aiohttp/pull/1932>`_) - -- Fix websocket issues caused by frame fragmentation. (`#1962 <https://github.com/aio-libs/aiohttp/pull/1962>`_) - -- Raise RuntimeError is you try to set the Content Length and enable - chunked encoding at the same time (`#1941 <https://github.com/aio-libs/aiohttp/pull/1941>`_) - -- Small update for `unittest_run_loop` - -- Use CIMultiDict for ClientRequest.skip_auto_headers (`#1970 <https://github.com/aio-libs/aiohttp/pull/1970>`_) - -- Fix wrong startup sequence: test server and `run_app()` are not raise - `DeprecationWarning` now (`#1947 <https://github.com/aio-libs/aiohttp/pull/1947>`_) - -- Make sure cleanup signal is sent if startup signal has been sent (`#1959 <https://github.com/aio-libs/aiohttp/pull/1959>`_) - -- Fixed server keep-alive handler, could cause 100% cpu utilization (`#1955 <https://github.com/aio-libs/aiohttp/pull/1955>`_) - -- Connection can be destroyed before response get processed if - `await aiohttp.request(..)` is used (`#1981 <https://github.com/aio-libs/aiohttp/pull/1981>`_) - -- MultipartReader does not work with -OO (`#1969 <https://github.com/aio-libs/aiohttp/pull/1969>`_) - -- Fixed `ClientPayloadError` with blank `Content-Encoding` header (`#1931 <https://github.com/aio-libs/aiohttp/pull/1931>`_) - -- Support `deflate` encoding implemented in `httpbin.org/deflate` (`#1918 <https://github.com/aio-libs/aiohttp/pull/1918>`_) - -- Fix BadStatusLine caused by extra `CRLF` after `POST` data (`#1792 <https://github.com/aio-libs/aiohttp/pull/1792>`_) - -- Keep a reference to `ClientSession` in response object (`#1985 <https://github.com/aio-libs/aiohttp/pull/1985>`_) - -- Deprecate undocumented `app.on_loop_available` signal (`#1978 <https://github.com/aio-libs/aiohttp/pull/1978>`_) - - - -2.1.0 (2017-05-26) -================== - -- Added support for experimental `async-tokio` event loop written in Rust - https://github.com/PyO3/tokio - -- Write to transport ``\r\n`` before closing after keepalive timeout, - otherwise client can not detect socket disconnection. (`#1883 <https://github.com/aio-libs/aiohttp/pull/1883>`_) - -- Only call `loop.close` in `run_app` if the user did *not* supply a loop. - Useful for allowing clients to specify their own cleanup before closing the - asyncio loop if they wish to tightly control loop behavior - -- Content disposition with semicolon in filename (`#917 <https://github.com/aio-libs/aiohttp/pull/917>`_) - -- Added `request_info` to response object and `ClientResponseError`. (`#1733 <https://github.com/aio-libs/aiohttp/pull/1733>`_) - -- Added `history` to `ClientResponseError`. (`#1741 <https://github.com/aio-libs/aiohttp/pull/1741>`_) - -- Allow to disable redirect url re-quoting (`#1474 <https://github.com/aio-libs/aiohttp/pull/1474>`_) - -- Handle RuntimeError from transport (`#1790 <https://github.com/aio-libs/aiohttp/pull/1790>`_) - -- Dropped "%O" in access logger (`#1673 <https://github.com/aio-libs/aiohttp/pull/1673>`_) - -- Added `args` and `kwargs` to `unittest_run_loop`. Useful with other - decorators, for example `@patch`. (`#1803 <https://github.com/aio-libs/aiohttp/pull/1803>`_) - -- Added `iter_chunks` to response.content object. (`#1805 <https://github.com/aio-libs/aiohttp/pull/1805>`_) - -- Avoid creating TimerContext when there is no timeout to allow - compatibility with Tornado. (`#1817 <https://github.com/aio-libs/aiohttp/pull/1817>`_) (`#1180 <https://github.com/aio-libs/aiohttp/pull/1180>`_) - -- Add `proxy_from_env` to `ClientRequest` to read from environment - variables. (`#1791 <https://github.com/aio-libs/aiohttp/pull/1791>`_) - -- Add DummyCookieJar helper. (`#1830 <https://github.com/aio-libs/aiohttp/pull/1830>`_) - -- Fix assertion errors in Python 3.4 from noop helper. (`#1847 <https://github.com/aio-libs/aiohttp/pull/1847>`_) - -- Do not unquote `+` in match_info values (`#1816 <https://github.com/aio-libs/aiohttp/pull/1816>`_) - -- Use Forwarded, X-Forwarded-Scheme and X-Forwarded-Host for better scheme and - host resolution. (`#1134 <https://github.com/aio-libs/aiohttp/pull/1134>`_) - -- Fix sub-application middlewares resolution order (`#1853 <https://github.com/aio-libs/aiohttp/pull/1853>`_) - -- Fix applications comparison (`#1866 <https://github.com/aio-libs/aiohttp/pull/1866>`_) - -- Fix static location in index when prefix is used (`#1662 <https://github.com/aio-libs/aiohttp/pull/1662>`_) - -- Make test server more reliable (`#1896 <https://github.com/aio-libs/aiohttp/pull/1896>`_) - -- Extend list of web exceptions, add HTTPUnprocessableEntity, - HTTPFailedDependency, HTTPInsufficientStorage status codes (`#1920 <https://github.com/aio-libs/aiohttp/pull/1920>`_) - - -2.0.7 (2017-04-12) -================== - -- Fix *pypi* distribution - -- Fix exception description (`#1807 <https://github.com/aio-libs/aiohttp/pull/1807>`_) - -- Handle socket error in FileResponse (`#1773 <https://github.com/aio-libs/aiohttp/pull/1773>`_) - -- Cancel websocket heartbeat on close (`#1793 <https://github.com/aio-libs/aiohttp/pull/1793>`_) - - -2.0.6 (2017-04-04) -================== - -- Keeping blank values for `request.post()` and `multipart.form()` (`#1765 <https://github.com/aio-libs/aiohttp/pull/1765>`_) - -- TypeError in data_received of ResponseHandler (`#1770 <https://github.com/aio-libs/aiohttp/pull/1770>`_) - -- Fix ``web.run_app`` not to bind to default host-port pair if only socket is - passed (`#1786 <https://github.com/aio-libs/aiohttp/pull/1786>`_) - - -2.0.5 (2017-03-29) -================== - -- Memory leak with aiohttp.request (`#1756 <https://github.com/aio-libs/aiohttp/pull/1756>`_) - -- Disable cleanup closed ssl transports by default. - -- Exception in request handling if the server responds before the body - is sent (`#1761 <https://github.com/aio-libs/aiohttp/pull/1761>`_) - - -2.0.4 (2017-03-27) -================== - -- Memory leak with aiohttp.request (`#1756 <https://github.com/aio-libs/aiohttp/pull/1756>`_) - -- Encoding is always UTF-8 in POST data (`#1750 <https://github.com/aio-libs/aiohttp/pull/1750>`_) - -- Do not add "Content-Disposition" header by default (`#1755 <https://github.com/aio-libs/aiohttp/pull/1755>`_) - - -2.0.3 (2017-03-24) -================== - -- Call https website through proxy will cause error (`#1745 <https://github.com/aio-libs/aiohttp/pull/1745>`_) - -- Fix exception on multipart/form-data post if content-type is not set (`#1743 <https://github.com/aio-libs/aiohttp/pull/1743>`_) - - -2.0.2 (2017-03-21) -================== - -- Fixed Application.on_loop_available signal (`#1739 <https://github.com/aio-libs/aiohttp/pull/1739>`_) - -- Remove debug code - - -2.0.1 (2017-03-21) -================== - -- Fix allow-head to include name on route (`#1737 <https://github.com/aio-libs/aiohttp/pull/1737>`_) - -- Fixed AttributeError in WebSocketResponse.can_prepare (`#1736 <https://github.com/aio-libs/aiohttp/pull/1736>`_) - - -2.0.0 (2017-03-20) -================== - -- Added `json` to `ClientSession.request()` method (`#1726 <https://github.com/aio-libs/aiohttp/pull/1726>`_) - -- Added session's `raise_for_status` parameter, automatically calls - raise_for_status() on any request. (`#1724 <https://github.com/aio-libs/aiohttp/pull/1724>`_) - -- `response.json()` raises `ClientResponseError` exception if response's - content type does not match (`#1723 <https://github.com/aio-libs/aiohttp/pull/1723>`_) - - - Cleanup timer and loop handle on any client exception. - -- Deprecate `loop` parameter for Application's constructor - - -`2.0.0rc1` (2017-03-15) -======================= - -- Properly handle payload errors (`#1710 <https://github.com/aio-libs/aiohttp/pull/1710>`_) - -- Added `ClientWebSocketResponse.get_extra_info()` (`#1717 <https://github.com/aio-libs/aiohttp/pull/1717>`_) - -- It is not possible to combine Transfer-Encoding and chunked parameter, - same for compress and Content-Encoding (`#1655 <https://github.com/aio-libs/aiohttp/pull/1655>`_) - -- Connector's `limit` parameter indicates total concurrent connections. - New `limit_per_host` added, indicates total connections per endpoint. (`#1601 <https://github.com/aio-libs/aiohttp/pull/1601>`_) - -- Use url's `raw_host` for name resolution (`#1685 <https://github.com/aio-libs/aiohttp/pull/1685>`_) - -- Change `ClientResponse.url` to `yarl.URL` instance (`#1654 <https://github.com/aio-libs/aiohttp/pull/1654>`_) - -- Add max_size parameter to web.Request reading methods (`#1133 <https://github.com/aio-libs/aiohttp/pull/1133>`_) - -- Web Request.post() stores data in temp files (`#1469 <https://github.com/aio-libs/aiohttp/pull/1469>`_) - -- Add the `allow_head=True` keyword argument for `add_get` (`#1618 <https://github.com/aio-libs/aiohttp/pull/1618>`_) - -- `run_app` and the Command Line Interface now support serving over - Unix domain sockets for faster inter-process communication. - -- `run_app` now supports passing a preexisting socket object. This can be useful - e.g. for socket-based activated applications, when binding of a socket is - done by the parent process. - -- Implementation for Trailer headers parser is broken (`#1619 <https://github.com/aio-libs/aiohttp/pull/1619>`_) - -- Fix FileResponse to not fall on bad request (range out of file size) - -- Fix FileResponse to correct stream video to Chromes - -- Deprecate public low-level api (`#1657 <https://github.com/aio-libs/aiohttp/pull/1657>`_) - -- Deprecate `encoding` parameter for ClientSession.request() method - -- Dropped aiohttp.wsgi (`#1108 <https://github.com/aio-libs/aiohttp/pull/1108>`_) - -- Dropped `version` from ClientSession.request() method - -- Dropped websocket version 76 support (`#1160 <https://github.com/aio-libs/aiohttp/pull/1160>`_) - -- Dropped: `aiohttp.protocol.HttpPrefixParser` (`#1590 <https://github.com/aio-libs/aiohttp/pull/1590>`_) - -- Dropped: Servers response's `.started`, `.start()` and - `.can_start()` method (`#1591 <https://github.com/aio-libs/aiohttp/pull/1591>`_) - -- Dropped: Adding `sub app` via `app.router.add_subapp()` is deprecated - use `app.add_subapp()` instead (`#1592 <https://github.com/aio-libs/aiohttp/pull/1592>`_) - -- Dropped: `Application.finish()` and `Application.register_on_finish()` (`#1602 <https://github.com/aio-libs/aiohttp/pull/1602>`_) - -- Dropped: `web.Request.GET` and `web.Request.POST` - -- Dropped: aiohttp.get(), aiohttp.options(), aiohttp.head(), - aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and - aiohttp.ws_connect() (`#1593 <https://github.com/aio-libs/aiohttp/pull/1593>`_) - -- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (`#1605 <https://github.com/aio-libs/aiohttp/pull/1605>`_) - -- Dropped: `ServerHttpProtocol.keep_alive_timeout` attribute and - `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (`#1606 <https://github.com/aio-libs/aiohttp/pull/1606>`_) - -- Dropped: `TCPConnector's`` `.resolve`, `.resolved_hosts`, - `.clear_resolved_hosts()` attributes and `resolve` constructor - parameter (`#1607 <https://github.com/aio-libs/aiohttp/pull/1607>`_) - -- Dropped `ProxyConnector` (`#1609 <https://github.com/aio-libs/aiohttp/pull/1609>`_) - - -1.3.5 (2017-03-16) -================== - -- Fixed None timeout support (`#1720 <https://github.com/aio-libs/aiohttp/pull/1720>`_) - - -1.3.4 (2017-03-14) -================== - -- Revert timeout handling in client request - -- Fix StreamResponse representation after eof - -- Fix file_sender to not fall on bad request (range out of file size) - -- Fix file_sender to correct stream video to Chromes - -- Fix NotImplementedError server exception (`#1703 <https://github.com/aio-libs/aiohttp/pull/1703>`_) - -- Clearer error message for URL without a host name. (`#1691 <https://github.com/aio-libs/aiohttp/pull/1691>`_) - -- Silence deprecation warning in __repr__ (`#1690 <https://github.com/aio-libs/aiohttp/pull/1690>`_) - -- IDN + HTTPS = `ssl.CertificateError` (`#1685 <https://github.com/aio-libs/aiohttp/pull/1685>`_) - - -1.3.3 (2017-02-19) -================== - -- Fixed memory leak in time service (`#1656 <https://github.com/aio-libs/aiohttp/pull/1656>`_) - - -1.3.2 (2017-02-16) -================== - -- Awaiting on WebSocketResponse.send_* does not work (`#1645 <https://github.com/aio-libs/aiohttp/pull/1645>`_) - -- Fix multiple calls to client ws_connect when using a shared header - dict (`#1643 <https://github.com/aio-libs/aiohttp/pull/1643>`_) - -- Make CookieJar.filter_cookies() accept plain string parameter. (`#1636 <https://github.com/aio-libs/aiohttp/pull/1636>`_) - - -1.3.1 (2017-02-09) -================== - -- Handle CLOSING in WebSocketResponse.__anext__ - -- Fixed AttributeError 'drain' for server websocket handler (`#1613 <https://github.com/aio-libs/aiohttp/pull/1613>`_) - - -1.3.0 (2017-02-08) -================== - -- Multipart writer validates the data on append instead of on a - request send (`#920 <https://github.com/aio-libs/aiohttp/pull/920>`_) - -- Multipart reader accepts multipart messages with or without their epilogue - to consistently handle valid and legacy behaviors (`#1526 <https://github.com/aio-libs/aiohttp/pull/1526>`_) (`#1581 <https://github.com/aio-libs/aiohttp/pull/1581>`_) - -- Separate read + connect + request timeouts # 1523 - -- Do not swallow Upgrade header (`#1587 <https://github.com/aio-libs/aiohttp/pull/1587>`_) - -- Fix polls demo run application (`#1487 <https://github.com/aio-libs/aiohttp/pull/1487>`_) - -- Ignore unknown 1XX status codes in client (`#1353 <https://github.com/aio-libs/aiohttp/pull/1353>`_) - -- Fix sub-Multipart messages missing their headers on serialization (`#1525 <https://github.com/aio-libs/aiohttp/pull/1525>`_) - -- Do not use readline when reading the content of a part - in the multipart reader (`#1535 <https://github.com/aio-libs/aiohttp/pull/1535>`_) - -- Add optional flag for quoting `FormData` fields (`#916 <https://github.com/aio-libs/aiohttp/pull/916>`_) - -- 416 Range Not Satisfiable if requested range end > file size (`#1588 <https://github.com/aio-libs/aiohttp/pull/1588>`_) - -- Having a `:` or `@` in a route does not work (`#1552 <https://github.com/aio-libs/aiohttp/pull/1552>`_) - -- Added `receive_timeout` timeout for websocket to receive complete - message. (`#1325 <https://github.com/aio-libs/aiohttp/pull/1325>`_) - -- Added `heartbeat` parameter for websocket to automatically send - `ping` message. (`#1024 <https://github.com/aio-libs/aiohttp/pull/1024>`_) (`#777 <https://github.com/aio-libs/aiohttp/pull/777>`_) - -- Remove `web.Application` dependency from `web.UrlDispatcher` (`#1510 <https://github.com/aio-libs/aiohttp/pull/1510>`_) - -- Accepting back-pressure from slow websocket clients (`#1367 <https://github.com/aio-libs/aiohttp/pull/1367>`_) - -- Do not pause transport during set_parser stage (`#1211 <https://github.com/aio-libs/aiohttp/pull/1211>`_) - -- Lingering close does not terminate before timeout (`#1559 <https://github.com/aio-libs/aiohttp/pull/1559>`_) - -- `setsockopt` may raise `OSError` exception if socket is closed already (`#1595 <https://github.com/aio-libs/aiohttp/pull/1595>`_) - -- Lots of CancelledError when requests are interrupted (`#1565 <https://github.com/aio-libs/aiohttp/pull/1565>`_) - -- Allow users to specify what should happen to decoding errors - when calling a responses `text()` method (`#1542 <https://github.com/aio-libs/aiohttp/pull/1542>`_) - -- Back port std module `http.cookies` for python3.4.2 (`#1566 <https://github.com/aio-libs/aiohttp/pull/1566>`_) - -- Maintain url's fragment in client response (`#1314 <https://github.com/aio-libs/aiohttp/pull/1314>`_) - -- Allow concurrently close WebSocket connection (`#754 <https://github.com/aio-libs/aiohttp/pull/754>`_) - -- Gzipped responses with empty body raises ContentEncodingError (`#609 <https://github.com/aio-libs/aiohttp/pull/609>`_) - -- Return 504 if request handle raises TimeoutError. - -- Refactor how we use keep-alive and close lingering timeouts. - -- Close response connection if we can not consume whole http - message during client response release - -- Abort closed ssl client transports, broken servers can keep socket - open un-limit time (`#1568 <https://github.com/aio-libs/aiohttp/pull/1568>`_) - -- Log warning instead of `RuntimeError` is websocket connection is closed. - -- Deprecated: `aiohttp.protocol.HttpPrefixParser` - will be removed in 1.4 (`#1590 <https://github.com/aio-libs/aiohttp/pull/1590>`_) - -- Deprecated: Servers response's `.started`, `.start()` and - `.can_start()` method will be removed in 1.4 (`#1591 <https://github.com/aio-libs/aiohttp/pull/1591>`_) - -- Deprecated: Adding `sub app` via `app.router.add_subapp()` is deprecated - use `app.add_subapp()` instead, will be removed in 1.4 (`#1592 <https://github.com/aio-libs/aiohttp/pull/1592>`_) - -- Deprecated: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), - aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect() - will be removed in 1.4 (`#1593 <https://github.com/aio-libs/aiohttp/pull/1593>`_) - -- Deprecated: `Application.finish()` and `Application.register_on_finish()` - will be removed in 1.4 (`#1602 <https://github.com/aio-libs/aiohttp/pull/1602>`_) - - -1.2.0 (2016-12-17) -================== - -- Extract `BaseRequest` from `web.Request`, introduce `web.Server` - (former `RequestHandlerFactory`), introduce new low-level web server - which is not coupled with `web.Application` and routing (`#1362 <https://github.com/aio-libs/aiohttp/pull/1362>`_) - -- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 <https://github.com/aio-libs/aiohttp/pull/1389>`_) - -- Implement range requests for static files (`#1382 <https://github.com/aio-libs/aiohttp/pull/1382>`_) - -- Support task attribute for StreamResponse (`#1410 <https://github.com/aio-libs/aiohttp/pull/1410>`_) - -- Drop `TestClient.app` property, use `TestClient.server.app` instead - (BACKWARD INCOMPATIBLE) - -- Drop `TestClient.handler` property, use `TestClient.server.handler` instead - (BACKWARD INCOMPATIBLE) - -- `TestClient.server` property returns a test server instance, was - `asyncio.AbstractServer` (BACKWARD INCOMPATIBLE) - -- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (`#1201 <https://github.com/aio-libs/aiohttp/pull/1201>`_) - -- Call worker_int and worker_abort callbacks in - `Gunicorn[UVLoop]WebWorker` (`#1202 <https://github.com/aio-libs/aiohttp/pull/1202>`_) - -- Has functional tests for client proxy (`#1218 <https://github.com/aio-libs/aiohttp/pull/1218>`_) - -- Fix bugs with client proxy target path and proxy host with port (`#1413 <https://github.com/aio-libs/aiohttp/pull/1413>`_) - -- Fix bugs related to the use of unicode hostnames (`#1444 <https://github.com/aio-libs/aiohttp/pull/1444>`_) - -- Preserve cookie quoting/escaping (`#1453 <https://github.com/aio-libs/aiohttp/pull/1453>`_) - -- FileSender will send gzipped response if gzip version available (`#1426 <https://github.com/aio-libs/aiohttp/pull/1426>`_) - -- Don't override `Content-Length` header in `web.Response` if no body - was set (`#1400 <https://github.com/aio-libs/aiohttp/pull/1400>`_) - -- Introduce `router.post_init()` for solving (`#1373 <https://github.com/aio-libs/aiohttp/pull/1373>`_) - -- Fix raise error in case of multiple calls of `TimeServive.stop()` - -- Allow to raise web exceptions on router resolving stage (`#1460 <https://github.com/aio-libs/aiohttp/pull/1460>`_) - -- Add a warning for session creation outside of coroutine (`#1468 <https://github.com/aio-libs/aiohttp/pull/1468>`_) - -- Avoid a race when application might start accepting incoming requests - but startup signals are not processed yet e98e8c6 - -- Raise a `RuntimeError` when trying to change the status of the HTTP response - after the headers have been sent (`#1480 <https://github.com/aio-libs/aiohttp/pull/1480>`_) - -- Fix bug with https proxy acquired cleanup (`#1340 <https://github.com/aio-libs/aiohttp/pull/1340>`_) - -- Use UTF-8 as the default encoding for multipart text parts (`#1484 <https://github.com/aio-libs/aiohttp/pull/1484>`_) - - -1.1.6 (2016-11-28) -================== - -- Fix `BodyPartReader.read_chunk` bug about returns zero bytes before - `EOF` (`#1428 <https://github.com/aio-libs/aiohttp/pull/1428>`_) - -1.1.5 (2016-11-16) -================== - -- Fix static file serving in fallback mode (`#1401 <https://github.com/aio-libs/aiohttp/pull/1401>`_) - -1.1.4 (2016-11-14) -================== - -- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 <https://github.com/aio-libs/aiohttp/pull/1389>`_) - -- Generate informative exception on redirects from server which - does not provide redirection headers (`#1396 <https://github.com/aio-libs/aiohttp/pull/1396>`_) - - -1.1.3 (2016-11-10) -================== - -- Support *root* resources for sub-applications (`#1379 <https://github.com/aio-libs/aiohttp/pull/1379>`_) - - -1.1.2 (2016-11-08) -================== - -- Allow starting variables with an underscore (`#1379 <https://github.com/aio-libs/aiohttp/pull/1379>`_) - -- Properly process UNIX sockets by gunicorn worker (`#1375 <https://github.com/aio-libs/aiohttp/pull/1375>`_) - -- Fix ordering for `FrozenList` - -- Don't propagate pre and post signals to sub-application (`#1377 <https://github.com/aio-libs/aiohttp/pull/1377>`_) - -1.1.1 (2016-11-04) -================== - -- Fix documentation generation (`#1120 <https://github.com/aio-libs/aiohttp/pull/1120>`_) - -1.1.0 (2016-11-03) -================== - -- Drop deprecated `WSClientDisconnectedError` (BACKWARD INCOMPATIBLE) - -- Use `yarl.URL` in client API. The change is 99% backward compatible - but `ClientResponse.url` is an `yarl.URL` instance now. (`#1217 <https://github.com/aio-libs/aiohttp/pull/1217>`_) - -- Close idle keep-alive connections on shutdown (`#1222 <https://github.com/aio-libs/aiohttp/pull/1222>`_) - -- Modify regex in AccessLogger to accept underscore and numbers (`#1225 <https://github.com/aio-libs/aiohttp/pull/1225>`_) - -- Use `yarl.URL` in web server API. `web.Request.rel_url` and - `web.Request.url` are added. URLs and templates are percent-encoded - now. (`#1224 <https://github.com/aio-libs/aiohttp/pull/1224>`_) - -- Accept `yarl.URL` by server redirections (`#1278 <https://github.com/aio-libs/aiohttp/pull/1278>`_) - -- Return `yarl.URL` by `.make_url()` testing utility (`#1279 <https://github.com/aio-libs/aiohttp/pull/1279>`_) - -- Properly format IPv6 addresses by `aiohttp.web.run_app` (`#1139 <https://github.com/aio-libs/aiohttp/pull/1139>`_) - -- Use `yarl.URL` by server API (`#1288 <https://github.com/aio-libs/aiohttp/pull/1288>`_) - - * Introduce `resource.url_for()`, deprecate `resource.url()`. - - * Implement `StaticResource`. - - * Inherit `SystemRoute` from `AbstractRoute` - - * Drop old-style routes: `Route`, `PlainRoute`, `DynamicRoute`, - `StaticRoute`, `ResourceAdapter`. - -- Revert `resp.url` back to `str`, introduce `resp.url_obj` (`#1292 <https://github.com/aio-libs/aiohttp/pull/1292>`_) - -- Raise ValueError if BasicAuth login has a ":" character (`#1307 <https://github.com/aio-libs/aiohttp/pull/1307>`_) - -- Fix bug when ClientRequest send payload file with opened as - open('filename', 'r+b') (`#1306 <https://github.com/aio-libs/aiohttp/pull/1306>`_) - -- Enhancement to AccessLogger (pass *extra* dict) (`#1303 <https://github.com/aio-libs/aiohttp/pull/1303>`_) - -- Show more verbose message on import errors (`#1319 <https://github.com/aio-libs/aiohttp/pull/1319>`_) - -- Added save and load functionality for `CookieJar` (`#1219 <https://github.com/aio-libs/aiohttp/pull/1219>`_) - -- Added option on `StaticRoute` to follow symlinks (`#1299 <https://github.com/aio-libs/aiohttp/pull/1299>`_) - -- Force encoding of `application/json` content type to utf-8 (`#1339 <https://github.com/aio-libs/aiohttp/pull/1339>`_) - -- Fix invalid invocations of `errors.LineTooLong` (`#1335 <https://github.com/aio-libs/aiohttp/pull/1335>`_) - -- Websockets: Stop `async for` iteration when connection is closed (`#1144 <https://github.com/aio-libs/aiohttp/pull/1144>`_) - -- Ensure TestClient HTTP methods return a context manager (`#1318 <https://github.com/aio-libs/aiohttp/pull/1318>`_) - -- Raise `ClientDisconnectedError` to `FlowControlStreamReader` read function - if `ClientSession` object is closed by client when reading data. (`#1323 <https://github.com/aio-libs/aiohttp/pull/1323>`_) - -- Document deployment without `Gunicorn` (`#1120 <https://github.com/aio-libs/aiohttp/pull/1120>`_) - -- Add deprecation warning for MD5 and SHA1 digests when used for fingerprint - of site certs in TCPConnector. (`#1186 <https://github.com/aio-libs/aiohttp/pull/1186>`_) - -- Implement sub-applications (`#1301 <https://github.com/aio-libs/aiohttp/pull/1301>`_) - -- Don't inherit `web.Request` from `dict` but implement - `MutableMapping` protocol. - -- Implement frozen signals - -- Don't inherit `web.Application` from `dict` but implement - `MutableMapping` protocol. - -- Support freezing for web applications - -- Accept access_log parameter in `web.run_app`, use `None` to disable logging - -- Don't flap `tcp_cork` and `tcp_nodelay` in regular request handling. - `tcp_nodelay` is still enabled by default. - -- Improve performance of web server by removing premature computing of - Content-Type if the value was set by `web.Response` constructor. - - While the patch boosts speed of trivial `web.Response(text='OK', - content_type='text/plain)` very well please don't expect significant - boost of your application -- a couple DB requests and business logic - is still the main bottleneck. - -- Boost performance by adding a custom time service (`#1350 <https://github.com/aio-libs/aiohttp/pull/1350>`_) - -- Extend `ClientResponse` with `content_type` and `charset` - properties like in `web.Request`. (`#1349 <https://github.com/aio-libs/aiohttp/pull/1349>`_) - -- Disable aiodns by default (`#559 <https://github.com/aio-libs/aiohttp/pull/559>`_) - -- Don't flap `tcp_cork` in client code, use TCP_NODELAY mode by default. - -- Implement `web.Request.clone()` (`#1361 <https://github.com/aio-libs/aiohttp/pull/1361>`_) - -1.0.5 (2016-10-11) -================== - -- Fix StreamReader._read_nowait to return all available - data up to the requested amount (`#1297 <https://github.com/aio-libs/aiohttp/pull/1297>`_) - - -1.0.4 (2016-09-22) -================== - -- Fix FlowControlStreamReader.read_nowait so that it checks - whether the transport is paused (`#1206 <https://github.com/aio-libs/aiohttp/pull/1206>`_) - - -1.0.2 (2016-09-22) -================== - -- Make CookieJar compatible with 32-bit systems (`#1188 <https://github.com/aio-libs/aiohttp/pull/1188>`_) - -- Add missing `WSMsgType` to `web_ws.__all__`, see (`#1200 <https://github.com/aio-libs/aiohttp/pull/1200>`_) - -- Fix `CookieJar` ctor when called with `loop=None` (`#1203 <https://github.com/aio-libs/aiohttp/pull/1203>`_) - -- Fix broken upper-casing in wsgi support (`#1197 <https://github.com/aio-libs/aiohttp/pull/1197>`_) - - -1.0.1 (2016-09-16) -================== - -- Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake - of backward compatibility (`#1178 <https://github.com/aio-libs/aiohttp/pull/1178>`_) - -- Tune alabaster schema. - -- Use `text/html` content type for displaying index pages by static - file handler. - -- Fix `AssertionError` in static file handling (`#1177 <https://github.com/aio-libs/aiohttp/pull/1177>`_) - -- Fix access log formats `%O` and `%b` for static file handling - -- Remove `debug` setting of GunicornWorker, use `app.debug` - to control its debug-mode instead - - -1.0.0 (2016-09-16) -================== - -- Change default size for client session's connection pool from - unlimited to 20 (`#977 <https://github.com/aio-libs/aiohttp/pull/977>`_) - -- Add IE support for cookie deletion. (`#994 <https://github.com/aio-libs/aiohttp/pull/994>`_) - -- Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD - INCOMPATIBLE) - -- Remove deprecated `force` parameter for `ClientResponse.close` - method (BACKWARD INCOMPATIBLE) - -- Avoid using of mutable CIMultiDict kw param in make_mocked_request - (`#997 <https://github.com/aio-libs/aiohttp/pull/997>`_) - -- Make WebSocketResponse.close a little bit faster by avoiding new - task creating just for timeout measurement - -- Add `proxy` and `proxy_auth` params to `client.get()` and family, - deprecate `ProxyConnector` (`#998 <https://github.com/aio-libs/aiohttp/pull/998>`_) - -- Add support for websocket send_json and receive_json, synchronize - server and client API for websockets (`#984 <https://github.com/aio-libs/aiohttp/pull/984>`_) - -- Implement router shourtcuts for most useful HTTP methods, use - `app.router.add_get()`, `app.router.add_post()` etc. instead of - `app.router.add_route()` (`#986 <https://github.com/aio-libs/aiohttp/pull/986>`_) - -- Support SSL connections for gunicorn worker (`#1003 <https://github.com/aio-libs/aiohttp/pull/1003>`_) - -- Move obsolete examples to legacy folder - -- Switch to multidict 2.0 and title-cased strings (`#1015 <https://github.com/aio-libs/aiohttp/pull/1015>`_) - -- `{FOO}e` logger format is case-sensitive now - -- Fix logger report for unix socket 8e8469b - -- Rename aiohttp.websocket to aiohttp._ws_impl - -- Rename aiohttp.MsgType tp aiohttp.WSMsgType - -- Introduce aiohttp.WSMessage officially - -- Rename Message -> WSMessage - -- Remove deprecated decode param from resp.read(decode=True) - -- Use 5min default client timeout (`#1028 <https://github.com/aio-libs/aiohttp/pull/1028>`_) - -- Relax HTTP method validation in UrlDispatcher (`#1037 <https://github.com/aio-libs/aiohttp/pull/1037>`_) - -- Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()` - should be present) - -- Remove aiohttp.websocket module (BACKWARD INCOMPATIBLE) - Please use high-level client and server approaches - -- Link header for 451 status code is mandatory - -- Fix test_client fixture to allow multiple clients per test (`#1072 <https://github.com/aio-libs/aiohttp/pull/1072>`_) - -- make_mocked_request now accepts dict as headers (`#1073 <https://github.com/aio-libs/aiohttp/pull/1073>`_) - -- Add Python 3.5.2/3.6+ compatibility patch for async generator - protocol change (`#1082 <https://github.com/aio-libs/aiohttp/pull/1082>`_) - -- Improvement test_client can accept instance object (`#1083 <https://github.com/aio-libs/aiohttp/pull/1083>`_) - -- Simplify ServerHttpProtocol implementation (`#1060 <https://github.com/aio-libs/aiohttp/pull/1060>`_) - -- Add a flag for optional showing directory index for static file - handling (`#921 <https://github.com/aio-libs/aiohttp/pull/921>`_) - -- Define `web.Application.on_startup()` signal handler (`#1103 <https://github.com/aio-libs/aiohttp/pull/1103>`_) - -- Drop ChunkedParser and LinesParser (`#1111 <https://github.com/aio-libs/aiohttp/pull/1111>`_) - -- Call `Application.startup` in GunicornWebWorker (`#1105 <https://github.com/aio-libs/aiohttp/pull/1105>`_) - -- Fix client handling hostnames with 63 bytes when a port is given in - the url (`#1044 <https://github.com/aio-libs/aiohttp/pull/1044>`_) - -- Implement proxy support for ClientSession.ws_connect (`#1025 <https://github.com/aio-libs/aiohttp/pull/1025>`_) - -- Return named tuple from WebSocketResponse.can_prepare (`#1016 <https://github.com/aio-libs/aiohttp/pull/1016>`_) - -- Fix access_log_format in `GunicornWebWorker` (`#1117 <https://github.com/aio-libs/aiohttp/pull/1117>`_) - -- Setup Content-Type to application/octet-stream by default (`#1124 <https://github.com/aio-libs/aiohttp/pull/1124>`_) - -- Deprecate debug parameter from app.make_handler(), use - `Application(debug=True)` instead (`#1121 <https://github.com/aio-libs/aiohttp/pull/1121>`_) - -- Remove fragment string in request path (`#846 <https://github.com/aio-libs/aiohttp/pull/846>`_) - -- Use aiodns.DNSResolver.gethostbyname() if available (`#1136 <https://github.com/aio-libs/aiohttp/pull/1136>`_) - -- Fix static file sending on uvloop when sendfile is available (`#1093 <https://github.com/aio-libs/aiohttp/pull/1093>`_) - -- Make prettier urls if query is empty dict (`#1143 <https://github.com/aio-libs/aiohttp/pull/1143>`_) - -- Fix redirects for HEAD requests (`#1147 <https://github.com/aio-libs/aiohttp/pull/1147>`_) - -- Default value for `StreamReader.read_nowait` is -1 from now (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_) - -- `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now - (BACKWARD INCOMPATIBLE) (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_) - -- Streams documentation added (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_) - -- Add `multipart` coroutine method for web Request object (`#1067 <https://github.com/aio-libs/aiohttp/pull/1067>`_) - -- Publish ClientSession.loop property (`#1149 <https://github.com/aio-libs/aiohttp/pull/1149>`_) - -- Fix static file with spaces (`#1140 <https://github.com/aio-libs/aiohttp/pull/1140>`_) - -- Fix piling up asyncio loop by cookie expiration callbacks (`#1061 <https://github.com/aio-libs/aiohttp/pull/1061>`_) - -- Drop `Timeout` class for sake of `async_timeout` external library. - `aiohttp.Timeout` is an alias for `async_timeout.timeout` - -- `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by - default (BACKWARD INCOMPATIBLE) (`#1152 <https://github.com/aio-libs/aiohttp/pull/1152>`_) - -- `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by - default (BACKWARD INCOMPATIBLE) (`#1152 <https://github.com/aio-libs/aiohttp/pull/1152>`_) - -- Conform to RFC3986 - do not include url fragments in client requests (`#1174 <https://github.com/aio-libs/aiohttp/pull/1174>`_) - -- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (`#1173 <https://github.com/aio-libs/aiohttp/pull/1173>`_) - -- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (`#1173 <https://github.com/aio-libs/aiohttp/pull/1173>`_) - -- Fix clashing cookies with have the same name but belong to different - domains (BACKWARD INCOMPATIBLE) (`#1125 <https://github.com/aio-libs/aiohttp/pull/1125>`_) - -- Support binary Content-Transfer-Encoding (`#1169 <https://github.com/aio-libs/aiohttp/pull/1169>`_) - - -0.22.5 (08-02-2016) -=================== - -- Pin miltidict version to >=1.2.2 - -0.22.3 (07-26-2016) -=================== - -- Do not filter cookies if unsafe flag provided (`#1005 <https://github.com/aio-libs/aiohttp/pull/1005>`_) - - -0.22.2 (07-23-2016) -=================== - -- Suppress CancelledError when Timeout raises TimeoutError (`#970 <https://github.com/aio-libs/aiohttp/pull/970>`_) - -- Don't expose `aiohttp.__version__` - -- Add unsafe parameter to CookieJar (`#968 <https://github.com/aio-libs/aiohttp/pull/968>`_) - -- Use unsafe cookie jar in test client tools - -- Expose aiohttp.CookieJar name - - -0.22.1 (07-16-2016) -=================== - -- Large cookie expiration/max-age does not break an event loop from now - (fixes (`#967 <https://github.com/aio-libs/aiohttp/pull/967>`_)) - - -0.22.0 (07-15-2016) -=================== - -- Fix bug in serving static directory (`#803 <https://github.com/aio-libs/aiohttp/pull/803>`_) - -- Fix command line arg parsing (`#797 <https://github.com/aio-libs/aiohttp/pull/797>`_) - -- Fix a documentation chapter about cookie usage (`#790 <https://github.com/aio-libs/aiohttp/pull/790>`_) - -- Handle empty body with gzipped encoding (`#758 <https://github.com/aio-libs/aiohttp/pull/758>`_) - -- Support 451 Unavailable For Legal Reasons http status (`#697 <https://github.com/aio-libs/aiohttp/pull/697>`_) - -- Fix Cookie share example and few small typos in docs (`#817 <https://github.com/aio-libs/aiohttp/pull/817>`_) - -- UrlDispatcher.add_route with partial coroutine handler (`#814 <https://github.com/aio-libs/aiohttp/pull/814>`_) - -- Optional support for aiodns (`#728 <https://github.com/aio-libs/aiohttp/pull/728>`_) - -- Add ServiceRestart and TryAgainLater websocket close codes (`#828 <https://github.com/aio-libs/aiohttp/pull/828>`_) - -- Fix prompt message for `web.run_app` (`#832 <https://github.com/aio-libs/aiohttp/pull/832>`_) - -- Allow to pass None as a timeout value to disable timeout logic (`#834 <https://github.com/aio-libs/aiohttp/pull/834>`_) - -- Fix leak of connection slot during connection error (`#835 <https://github.com/aio-libs/aiohttp/pull/835>`_) - -- Gunicorn worker with uvloop support - `aiohttp.worker.GunicornUVLoopWebWorker` (`#878 <https://github.com/aio-libs/aiohttp/pull/878>`_) - -- Don't send body in response to HEAD request (`#838 <https://github.com/aio-libs/aiohttp/pull/838>`_) - -- Skip the preamble in MultipartReader (`#881 <https://github.com/aio-libs/aiohttp/pull/881>`_) - -- Implement BasicAuth decode classmethod. (`#744 <https://github.com/aio-libs/aiohttp/pull/744>`_) - -- Don't crash logger when transport is None (`#889 <https://github.com/aio-libs/aiohttp/pull/889>`_) - -- Use a create_future compatibility wrapper instead of creating - Futures directly (`#896 <https://github.com/aio-libs/aiohttp/pull/896>`_) - -- Add test utilities to aiohttp (`#902 <https://github.com/aio-libs/aiohttp/pull/902>`_) - -- Improve Request.__repr__ (`#875 <https://github.com/aio-libs/aiohttp/pull/875>`_) - -- Skip DNS resolving if provided host is already an ip address (`#874 <https://github.com/aio-libs/aiohttp/pull/874>`_) - -- Add headers to ClientSession.ws_connect (`#785 <https://github.com/aio-libs/aiohttp/pull/785>`_) - -- Document that server can send pre-compressed data (`#906 <https://github.com/aio-libs/aiohttp/pull/906>`_) - -- Don't add Content-Encoding and Transfer-Encoding if no body (`#891 <https://github.com/aio-libs/aiohttp/pull/891>`_) - -- Add json() convenience methods to websocket message objects (`#897 <https://github.com/aio-libs/aiohttp/pull/897>`_) - -- Add client_resp.raise_for_status() (`#908 <https://github.com/aio-libs/aiohttp/pull/908>`_) - -- Implement cookie filter (`#799 <https://github.com/aio-libs/aiohttp/pull/799>`_) - -- Include an example of middleware to handle error pages (`#909 <https://github.com/aio-libs/aiohttp/pull/909>`_) - -- Fix error handling in StaticFileMixin (`#856 <https://github.com/aio-libs/aiohttp/pull/856>`_) - -- Add mocked request helper (`#900 <https://github.com/aio-libs/aiohttp/pull/900>`_) - -- Fix empty ALLOW Response header for cls based View (`#929 <https://github.com/aio-libs/aiohttp/pull/929>`_) - -- Respect CONNECT method to implement a proxy server (`#847 <https://github.com/aio-libs/aiohttp/pull/847>`_) - -- Add pytest_plugin (`#914 <https://github.com/aio-libs/aiohttp/pull/914>`_) - -- Add tutorial - -- Add backlog option to support more than 128 (default value in - "create_server" function) concurrent connections (`#892 <https://github.com/aio-libs/aiohttp/pull/892>`_) - -- Allow configuration of header size limits (`#912 <https://github.com/aio-libs/aiohttp/pull/912>`_) - -- Separate sending file logic from StaticRoute dispatcher (`#901 <https://github.com/aio-libs/aiohttp/pull/901>`_) - -- Drop deprecated share_cookies connector option (BACKWARD INCOMPATIBLE) - -- Drop deprecated support for tuple as auth parameter. - Use aiohttp.BasicAuth instead (BACKWARD INCOMPATIBLE) - -- Remove deprecated `request.payload` property, use `content` instead. - (BACKWARD INCOMPATIBLE) - -- Drop all mentions about api changes in documentation for versions - older than 0.16 - -- Allow to override default cookie jar (`#963 <https://github.com/aio-libs/aiohttp/pull/963>`_) - -- Add manylinux wheel builds - -- Dup a socket for sendfile usage (`#964 <https://github.com/aio-libs/aiohttp/pull/964>`_) - -0.21.6 (05-05-2016) -=================== - -- Drop initial query parameters on redirects (`#853 <https://github.com/aio-libs/aiohttp/pull/853>`_) - - -0.21.5 (03-22-2016) -=================== - -- Fix command line arg parsing (`#797 <https://github.com/aio-libs/aiohttp/pull/797>`_) - -0.21.4 (03-12-2016) -=================== - -- Fix ResourceAdapter: don't add method to allowed if resource is not - match (`#826 <https://github.com/aio-libs/aiohttp/pull/826>`_) - -- Fix Resource: append found method to returned allowed methods - -0.21.2 (02-16-2016) -=================== - -- Fix a regression: support for handling ~/path in static file routes was - broken (`#782 <https://github.com/aio-libs/aiohttp/pull/782>`_) - -0.21.1 (02-10-2016) -=================== - -- Make new resources classes public (`#767 <https://github.com/aio-libs/aiohttp/pull/767>`_) - -- Add `router.resources()` view - -- Fix cmd-line parameter names in doc - -0.21.0 (02-04-2016) -=================== - -- Introduce on_shutdown signal (`#722 <https://github.com/aio-libs/aiohttp/pull/722>`_) - -- Implement raw input headers (`#726 <https://github.com/aio-libs/aiohttp/pull/726>`_) - -- Implement web.run_app utility function (`#734 <https://github.com/aio-libs/aiohttp/pull/734>`_) - -- Introduce on_cleanup signal - -- Deprecate Application.finish() / Application.register_on_finish() in favor of - on_cleanup. - -- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (`#729 <https://github.com/aio-libs/aiohttp/pull/729>`_) - -- Deprecate bare aiohttp.request(), aiohttp.get() and family (`#729 <https://github.com/aio-libs/aiohttp/pull/729>`_) - -- Refactor keep-alive support (`#737 <https://github.com/aio-libs/aiohttp/pull/737>`_): - - - Enable keepalive for HTTP 1.0 by default - - - Disable it for HTTP 0.9 (who cares about 0.9, BTW?) - - - For keepalived connections - - - Send `Connection: keep-alive` for HTTP 1.0 only - - - don't send `Connection` header for HTTP 1.1 - - - For non-keepalived connections - - - Send `Connection: close` for HTTP 1.1 only - - - don't send `Connection` header for HTTP 1.0 - -- Add version parameter to ClientSession constructor, - deprecate it for session.request() and family (`#736 <https://github.com/aio-libs/aiohttp/pull/736>`_) - -- Enable access log by default (`#735 <https://github.com/aio-libs/aiohttp/pull/735>`_) - -- Deprecate app.router.register_route() (the method was not documented - intentionally BTW). - -- Deprecate app.router.named_routes() in favor of app.router.named_resources() - -- route.add_static accepts pathlib.Path now (`#743 <https://github.com/aio-libs/aiohttp/pull/743>`_) - -- Add command line support: `$ python -m aiohttp.web package.main` (`#740 <https://github.com/aio-libs/aiohttp/pull/740>`_) - -- FAQ section was added to docs. Enjoy and fill free to contribute new topics - -- Add async context manager support to ClientSession - -- Document ClientResponse's host, method, url properties - -- Use CORK/NODELAY in client API (`#748 <https://github.com/aio-libs/aiohttp/pull/748>`_) - -- ClientSession.close and Connector.close are coroutines now - -- Close client connection on exception in ClientResponse.release() - -- Allow to read multipart parts without content-length specified (`#750 <https://github.com/aio-libs/aiohttp/pull/750>`_) - -- Add support for unix domain sockets to gunicorn worker (`#470 <https://github.com/aio-libs/aiohttp/pull/470>`_) - -- Add test for default Expect handler (`#601 <https://github.com/aio-libs/aiohttp/pull/601>`_) - -- Add the first demo project - -- Rename `loader` keyword argument in `web.Request.json` method. (`#646 <https://github.com/aio-libs/aiohttp/pull/646>`_) - -- Add local socket binding for TCPConnector (`#678 <https://github.com/aio-libs/aiohttp/pull/678>`_) - -0.20.2 (01-07-2016) -=================== - -- Enable use of `await` for a class based view (`#717 <https://github.com/aio-libs/aiohttp/pull/717>`_) - -- Check address family to fill wsgi env properly (`#718 <https://github.com/aio-libs/aiohttp/pull/718>`_) - -- Fix memory leak in headers processing (thanks to Marco Paolini) (`#723 <https://github.com/aio-libs/aiohttp/pull/723>`_) - -0.20.1 (12-30-2015) -=================== - -- Raise RuntimeError is Timeout context manager was used outside of - task context. - -- Add number of bytes to stream.read_nowait (`#700 <https://github.com/aio-libs/aiohttp/pull/700>`_) - -- Use X-FORWARDED-PROTO for wsgi.url_scheme when available - - -0.20.0 (12-28-2015) -=================== - -- Extend list of web exceptions, add HTTPMisdirectedRequest, - HTTPUpgradeRequired, HTTPPreconditionRequired, HTTPTooManyRequests, - HTTPRequestHeaderFieldsTooLarge, HTTPVariantAlsoNegotiates, - HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (`#644 <https://github.com/aio-libs/aiohttp/pull/644>`_) - -- Do not remove AUTHORIZATION header by WSGI handler (`#649 <https://github.com/aio-libs/aiohttp/pull/649>`_) - -- Fix broken support for https proxies with authentication (`#617 <https://github.com/aio-libs/aiohttp/pull/617>`_) - -- Get REMOTE_* and SEVER_* http vars from headers when listening on - unix socket (`#654 <https://github.com/aio-libs/aiohttp/pull/654>`_) - -- Add HTTP 308 support (`#663 <https://github.com/aio-libs/aiohttp/pull/663>`_) - -- Add Tf format (time to serve request in seconds, %06f format) to - access log (`#669 <https://github.com/aio-libs/aiohttp/pull/669>`_) - -- Remove one and a half years long deprecated - ClientResponse.read_and_close() method - -- Optimize chunked encoding: use a single syscall instead of 3 calls - on sending chunked encoded data - -- Use TCP_CORK and TCP_NODELAY to optimize network latency and - throughput (`#680 <https://github.com/aio-libs/aiohttp/pull/680>`_) - -- Websocket XOR performance improved (`#687 <https://github.com/aio-libs/aiohttp/pull/687>`_) - -- Avoid sending cookie attributes in Cookie header (`#613 <https://github.com/aio-libs/aiohttp/pull/613>`_) - -- Round server timeouts to seconds for grouping pending calls. That - leads to less amount of poller syscalls e.g. epoll.poll(). (`#702 <https://github.com/aio-libs/aiohttp/pull/702>`_) - -- Close connection on websocket handshake error (`#703 <https://github.com/aio-libs/aiohttp/pull/703>`_) - -- Implement class based views (`#684 <https://github.com/aio-libs/aiohttp/pull/684>`_) - -- Add *headers* parameter to ws_connect() (`#709 <https://github.com/aio-libs/aiohttp/pull/709>`_) - -- Drop unused function `parse_remote_addr()` (`#708 <https://github.com/aio-libs/aiohttp/pull/708>`_) - -- Close session on exception (`#707 <https://github.com/aio-libs/aiohttp/pull/707>`_) - -- Store http code and headers in WSServerHandshakeError (`#706 <https://github.com/aio-libs/aiohttp/pull/706>`_) - -- Make some low-level message properties readonly (`#710 <https://github.com/aio-libs/aiohttp/pull/710>`_) - - -0.19.0 (11-25-2015) -=================== - -- Memory leak in ParserBuffer (`#579 <https://github.com/aio-libs/aiohttp/pull/579>`_) - -- Support gunicorn's `max_requests` settings in gunicorn worker - -- Fix wsgi environment building (`#573 <https://github.com/aio-libs/aiohttp/pull/573>`_) - -- Improve access logging (`#572 <https://github.com/aio-libs/aiohttp/pull/572>`_) - -- Drop unused host and port from low-level server (`#586 <https://github.com/aio-libs/aiohttp/pull/586>`_) - -- Add Python 3.5 `async for` implementation to server websocket (`#543 <https://github.com/aio-libs/aiohttp/pull/543>`_) - -- Add Python 3.5 `async for` implementation to client websocket - -- Add Python 3.5 `async with` implementation to client websocket - -- Add charset parameter to web.Response constructor (`#593 <https://github.com/aio-libs/aiohttp/pull/593>`_) - -- Forbid passing both Content-Type header and content_type or charset - params into web.Response constructor - -- Forbid duplicating of web.Application and web.Request (`#602 <https://github.com/aio-libs/aiohttp/pull/602>`_) - -- Add an option to pass Origin header in ws_connect (`#607 <https://github.com/aio-libs/aiohttp/pull/607>`_) - -- Add json_response function (`#592 <https://github.com/aio-libs/aiohttp/pull/592>`_) - -- Make concurrent connections respect limits (`#581 <https://github.com/aio-libs/aiohttp/pull/581>`_) - -- Collect history of responses if redirects occur (`#614 <https://github.com/aio-libs/aiohttp/pull/614>`_) - -- Enable passing pre-compressed data in requests (`#621 <https://github.com/aio-libs/aiohttp/pull/621>`_) - -- Expose named routes via UrlDispatcher.named_routes() (`#622 <https://github.com/aio-libs/aiohttp/pull/622>`_) - -- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (`#629 <https://github.com/aio-libs/aiohttp/pull/629>`_) - -- Use ensure_future if available - -- Always quote params for Content-Disposition (`#641 <https://github.com/aio-libs/aiohttp/pull/641>`_) - -- Support async for in multipart reader (`#640 <https://github.com/aio-libs/aiohttp/pull/640>`_) - -- Add Timeout context manager (`#611 <https://github.com/aio-libs/aiohttp/pull/611>`_) - -0.18.4 (13-11-2015) -=================== - -- Relax rule for router names again by adding dash to allowed - characters: they may contain identifiers, dashes, dots and columns - -0.18.3 (25-10-2015) -=================== - -- Fix formatting for _RequestContextManager helper (`#590 <https://github.com/aio-libs/aiohttp/pull/590>`_) - -0.18.2 (22-10-2015) -=================== - -- Fix regression for OpenSSL < 1.0.0 (`#583 <https://github.com/aio-libs/aiohttp/pull/583>`_) - -0.18.1 (20-10-2015) -=================== - -- Relax rule for router names: they may contain dots and columns - starting from now - -0.18.0 (19-10-2015) -=================== - -- Use errors.HttpProcessingError.message as HTTP error reason and - message (`#459 <https://github.com/aio-libs/aiohttp/pull/459>`_) - -- Optimize cythonized multidict a bit - -- Change repr's of multidicts and multidict views - -- default headers in ClientSession are now case-insensitive - -- Make '=' char and 'wss://' schema safe in urls (`#477 <https://github.com/aio-libs/aiohttp/pull/477>`_) - -- `ClientResponse.close()` forces connection closing by default from now (`#479 <https://github.com/aio-libs/aiohttp/pull/479>`_) - - N.B. Backward incompatible change: was `.close(force=False) Using - `force` parameter for the method is deprecated: use `.release()` - instead. - -- Properly requote URL's path (`#480 <https://github.com/aio-libs/aiohttp/pull/480>`_) - -- add `skip_auto_headers` parameter for client API (`#486 <https://github.com/aio-libs/aiohttp/pull/486>`_) - -- Properly parse URL path in aiohttp.web.Request (`#489 <https://github.com/aio-libs/aiohttp/pull/489>`_) - -- Raise RuntimeError when chunked enabled and HTTP is 1.0 (`#488 <https://github.com/aio-libs/aiohttp/pull/488>`_) - -- Fix a bug with processing io.BytesIO as data parameter for client API (`#500 <https://github.com/aio-libs/aiohttp/pull/500>`_) - -- Skip auto-generation of Content-Type header (`#507 <https://github.com/aio-libs/aiohttp/pull/507>`_) - -- Use sendfile facility for static file handling (`#503 <https://github.com/aio-libs/aiohttp/pull/503>`_) - -- Default `response_factory` in `app.router.add_static` now is - `StreamResponse`, not `None`. The functionality is not changed if - default is not specified. - -- Drop `ClientResponse.message` attribute, it was always implementation detail. - -- Streams are optimized for speed and mostly memory in case of a big - HTTP message sizes (`#496 <https://github.com/aio-libs/aiohttp/pull/496>`_) - -- Fix a bug for server-side cookies for dropping cookie and setting it - again without Max-Age parameter. - -- Don't trim redirect URL in client API (`#499 <https://github.com/aio-libs/aiohttp/pull/499>`_) - -- Extend precision of access log "D" to milliseconds (`#527 <https://github.com/aio-libs/aiohttp/pull/527>`_) - -- Deprecate `StreamResponse.start()` method in favor of - `StreamResponse.prepare()` coroutine (`#525 <https://github.com/aio-libs/aiohttp/pull/525>`_) - - `.start()` is still supported but responses begun with `.start()` - does not call signal for response preparing to be sent. - -- Add `StreamReader.__repr__` - -- Drop Python 3.3 support, from now minimal required version is Python - 3.4.1 (`#541 <https://github.com/aio-libs/aiohttp/pull/541>`_) - -- Add `async with` support for `ClientSession.request()` and family (`#536 <https://github.com/aio-libs/aiohttp/pull/536>`_) - -- Ignore message body on 204 and 304 responses (`#505 <https://github.com/aio-libs/aiohttp/pull/505>`_) - -- `TCPConnector` processed both IPv4 and IPv6 by default (`#559 <https://github.com/aio-libs/aiohttp/pull/559>`_) - -- Add `.routes()` view for urldispatcher (`#519 <https://github.com/aio-libs/aiohttp/pull/519>`_) - -- Route name should be a valid identifier name from now (`#567 <https://github.com/aio-libs/aiohttp/pull/567>`_) - -- Implement server signals (`#562 <https://github.com/aio-libs/aiohttp/pull/562>`_) - -- Drop a year-old deprecated *files* parameter from client API. - -- Added `async for` support for aiohttp stream (`#542 <https://github.com/aio-libs/aiohttp/pull/542>`_) - -0.17.4 (09-29-2015) -=================== - -- Properly parse URL path in aiohttp.web.Request (`#489 <https://github.com/aio-libs/aiohttp/pull/489>`_) - -- Add missing coroutine decorator, the client api is await-compatible now - -0.17.3 (08-28-2015) -=================== - -- Remove Content-Length header on compressed responses (`#450 <https://github.com/aio-libs/aiohttp/pull/450>`_) - -- Support Python 3.5 - -- Improve performance of transport in-use list (`#472 <https://github.com/aio-libs/aiohttp/pull/472>`_) - -- Fix connection pooling (`#473 <https://github.com/aio-libs/aiohttp/pull/473>`_) - -0.17.2 (08-11-2015) -=================== - -- Don't forget to pass `data` argument forward (`#462 <https://github.com/aio-libs/aiohttp/pull/462>`_) - -- Fix multipart read bytes count (`#463 <https://github.com/aio-libs/aiohttp/pull/463>`_) - -0.17.1 (08-10-2015) -=================== - -- Fix multidict comparison to arbitrary abc.Mapping - -0.17.0 (08-04-2015) -=================== - -- Make StaticRoute support Last-Modified and If-Modified-Since headers (`#386 <https://github.com/aio-libs/aiohttp/pull/386>`_) - -- Add Request.if_modified_since and Stream.Response.last_modified properties - -- Fix deflate compression when writing a chunked response (`#395 <https://github.com/aio-libs/aiohttp/pull/395>`_) - -- Request`s content-length header is cleared now after redirect from - POST method (`#391 <https://github.com/aio-libs/aiohttp/pull/391>`_) - -- Return a 400 if server received a non HTTP content (`#405 <https://github.com/aio-libs/aiohttp/pull/405>`_) - -- Fix keep-alive support for aiohttp clients (`#406 <https://github.com/aio-libs/aiohttp/pull/406>`_) - -- Allow gzip compression in high-level server response interface (`#403 <https://github.com/aio-libs/aiohttp/pull/403>`_) - -- Rename TCPConnector.resolve and family to dns_cache (`#415 <https://github.com/aio-libs/aiohttp/pull/415>`_) - -- Make UrlDispatcher ignore quoted characters during url matching (`#414 <https://github.com/aio-libs/aiohttp/pull/414>`_) - Backward-compatibility warning: this may change the url matched by - your queries if they send quoted character (like %2F for /) (`#414 <https://github.com/aio-libs/aiohttp/pull/414>`_) - -- Use optional cchardet accelerator if present (`#418 <https://github.com/aio-libs/aiohttp/pull/418>`_) - -- Borrow loop from Connector in ClientSession if loop is not set - -- Add context manager support to ClientSession for session closing. - -- Add toplevel get(), post(), put(), head(), delete(), options(), - patch() coroutines. - -- Fix IPv6 support for client API (`#425 <https://github.com/aio-libs/aiohttp/pull/425>`_) - -- Pass SSL context through proxy connector (`#421 <https://github.com/aio-libs/aiohttp/pull/421>`_) - -- Make the rule: path for add_route should start with slash - -- Don't process request finishing by low-level server on closed event loop - -- Don't override data if multiple files are uploaded with same key (`#433 <https://github.com/aio-libs/aiohttp/pull/433>`_) - -- Ensure multipart.BodyPartReader.read_chunk read all the necessary data - to avoid false assertions about malformed multipart payload - -- Don't send body for 204, 205 and 304 http exceptions (`#442 <https://github.com/aio-libs/aiohttp/pull/442>`_) - -- Correctly skip Cython compilation in MSVC not found (`#453 <https://github.com/aio-libs/aiohttp/pull/453>`_) - -- Add response factory to StaticRoute (`#456 <https://github.com/aio-libs/aiohttp/pull/456>`_) - -- Don't append trailing CRLF for multipart.BodyPartReader (`#454 <https://github.com/aio-libs/aiohttp/pull/454>`_) - - -0.16.6 (07-15-2015) -=================== - -- Skip compilation on Windows if vcvarsall.bat cannot be found (`#438 <https://github.com/aio-libs/aiohttp/pull/438>`_) - -0.16.5 (06-13-2015) -=================== - -- Get rid of all comprehensions and yielding in _multidict (`#410 <https://github.com/aio-libs/aiohttp/pull/410>`_) - - -0.16.4 (06-13-2015) -=================== - -- Don't clear current exception in multidict's `__repr__` (cythonized - versions) (`#410 <https://github.com/aio-libs/aiohttp/pull/410>`_) - - -0.16.3 (05-30-2015) -=================== - -- Fix StaticRoute vulnerability to directory traversal attacks (`#380 <https://github.com/aio-libs/aiohttp/pull/380>`_) - - -0.16.2 (05-27-2015) -=================== - -- Update python version required for `__del__` usage: it's actually - 3.4.1 instead of 3.4.0 - -- Add check for presence of loop.is_closed() method before call the - former (`#378 <https://github.com/aio-libs/aiohttp/pull/378>`_) - - -0.16.1 (05-27-2015) -=================== - -- Fix regression in static file handling (`#377 <https://github.com/aio-libs/aiohttp/pull/377>`_) - -0.16.0 (05-26-2015) -=================== - -- Unset waiter future after cancellation (`#363 <https://github.com/aio-libs/aiohttp/pull/363>`_) - -- Update request url with query parameters (`#372 <https://github.com/aio-libs/aiohttp/pull/372>`_) - -- Support new `fingerprint` param of TCPConnector to enable verifying - SSL certificates via MD5, SHA1, or SHA256 digest (`#366 <https://github.com/aio-libs/aiohttp/pull/366>`_) - -- Setup uploaded filename if field value is binary and transfer - encoding is not specified (`#349 <https://github.com/aio-libs/aiohttp/pull/349>`_) - -- Implement `ClientSession.close()` method - -- Implement `connector.closed` readonly property - -- Implement `ClientSession.closed` readonly property - -- Implement `ClientSession.connector` readonly property - -- Implement `ClientSession.detach` method - -- Add `__del__` to client-side objects: sessions, connectors, - connections, requests, responses. - -- Refactor connections cleanup by connector (`#357 <https://github.com/aio-libs/aiohttp/pull/357>`_) - -- Add `limit` parameter to connector constructor (`#358 <https://github.com/aio-libs/aiohttp/pull/358>`_) - -- Add `request.has_body` property (`#364 <https://github.com/aio-libs/aiohttp/pull/364>`_) - -- Add `response_class` parameter to `ws_connect()` (`#367 <https://github.com/aio-libs/aiohttp/pull/367>`_) - -- `ProxyConnector` does not support keep-alive requests by default - starting from now (`#368 <https://github.com/aio-libs/aiohttp/pull/368>`_) - -- Add `connector.force_close` property - -- Add ws_connect to ClientSession (`#374 <https://github.com/aio-libs/aiohttp/pull/374>`_) - -- Support optional `chunk_size` parameter in `router.add_static()` - - -0.15.3 (04-22-2015) -=================== - -- Fix graceful shutdown handling - -- Fix `Expect` header handling for not found and not allowed routes (`#340 <https://github.com/aio-libs/aiohttp/pull/340>`_) - - -0.15.2 (04-19-2015) -=================== - -- Flow control subsystem refactoring - -- HTTP server performance optimizations - -- Allow to match any request method with `*` - -- Explicitly call drain on transport (`#316 <https://github.com/aio-libs/aiohttp/pull/316>`_) - -- Make chardet module dependency mandatory (`#318 <https://github.com/aio-libs/aiohttp/pull/318>`_) - -- Support keep-alive for HTTP 1.0 (`#325 <https://github.com/aio-libs/aiohttp/pull/325>`_) - -- Do not chunk single file during upload (`#327 <https://github.com/aio-libs/aiohttp/pull/327>`_) - -- Add ClientSession object for cookie storage and default headers (`#328 <https://github.com/aio-libs/aiohttp/pull/328>`_) - -- Add `keep_alive_on` argument for HTTP server handler. - - -0.15.1 (03-31-2015) -=================== - -- Pass Autobahn Testsuite tests - -- Fixed websocket fragmentation - -- Fixed websocket close procedure - -- Fixed parser buffer limits - -- Added `timeout` parameter to WebSocketResponse ctor - -- Added `WebSocketResponse.close_code` attribute - - -0.15.0 (03-27-2015) -=================== - -- Client WebSockets support - -- New Multipart system (`#273 <https://github.com/aio-libs/aiohttp/pull/273>`_) - -- Support for "Except" header (`#287 <https://github.com/aio-libs/aiohttp/pull/287>`_) (`#267 <https://github.com/aio-libs/aiohttp/pull/267>`_) - -- Set default Content-Type for post requests (`#184 <https://github.com/aio-libs/aiohttp/pull/184>`_) - -- Fix issue with construction dynamic route with regexps and trailing slash (`#266 <https://github.com/aio-libs/aiohttp/pull/266>`_) - -- Add repr to web.Request - -- Add repr to web.Response - -- Add repr for NotFound and NotAllowed match infos - -- Add repr for web.Application - -- Add repr to UrlMappingMatchInfo (`#217 <https://github.com/aio-libs/aiohttp/pull/217>`_) - -- Gunicorn 19.2.x compatibility - - -0.14.4 (01-29-2015) -=================== - -- Fix issue with error during constructing of url with regex parts (`#264 <https://github.com/aio-libs/aiohttp/pull/264>`_) - - -0.14.3 (01-28-2015) -=================== - -- Use path='/' by default for cookies (`#261 <https://github.com/aio-libs/aiohttp/pull/261>`_) - - -0.14.2 (01-23-2015) -=================== - -- Connections leak in BaseConnector (`#253 <https://github.com/aio-libs/aiohttp/pull/253>`_) - -- Do not swallow websocket reader exceptions (`#255 <https://github.com/aio-libs/aiohttp/pull/255>`_) - -- web.Request's read, text, json are memorized (`#250 <https://github.com/aio-libs/aiohttp/pull/250>`_) - - -0.14.1 (01-15-2015) -=================== - -- HttpMessage._add_default_headers does not overwrite existing headers (`#216 <https://github.com/aio-libs/aiohttp/pull/216>`_) - -- Expose multidict classes at package level - -- add `aiohttp.web.WebSocketResponse` - -- According to RFC 6455 websocket subprotocol preference order is - provided by client, not by server - -- websocket's ping and pong accept optional message parameter - -- multidict views do not accept `getall` parameter anymore, it - returns the full body anyway. - -- multidicts have optional Cython optimization, cythonized version of - multidicts is about 5 times faster than pure Python. - -- multidict.getall() returns `list`, not `tuple`. - -- Backward incompatible change: now there are two mutable multidicts - (`MultiDict`, `CIMultiDict`) and two immutable multidict proxies - (`MultiDictProxy` and `CIMultiDictProxy`). Previous edition of - multidicts was not a part of public API BTW. - -- Router refactoring to push Not Allowed and Not Found in middleware processing - -- Convert `ConnectionError` to `aiohttp.DisconnectedError` and don't - eat `ConnectionError` exceptions from web handlers. - -- Remove hop headers from Response class, wsgi response still uses hop headers. - -- Allow to send raw chunked encoded response. - -- Allow to encode output bytes stream into chunked encoding. - -- Allow to compress output bytes stream with `deflate` encoding. - -- Server has 75 seconds keepalive timeout now, was non-keepalive by default. - -- Application does not accept `**kwargs` anymore ((`#243 <https://github.com/aio-libs/aiohttp/pull/243>`_)). - -- Request is inherited from dict now for making per-request storage to - middlewares ((`#242 <https://github.com/aio-libs/aiohttp/pull/242>`_)). - - -0.13.1 (12-31-2014) -=================== - -- Add `aiohttp.web.StreamResponse.started` property (`#213 <https://github.com/aio-libs/aiohttp/pull/213>`_) - -- HTML escape traceback text in `ServerHttpProtocol.handle_error` - -- Mention handler and middlewares in `aiohttp.web.RequestHandler.handle_request` - on error ((`#218 <https://github.com/aio-libs/aiohttp/pull/218>`_)) - - -0.13.0 (12-29-2014) -=================== - -- `StreamResponse.charset` converts value to lower-case on assigning. - -- Chain exceptions when raise `ClientRequestError`. - -- Support custom regexps in route variables (`#204 <https://github.com/aio-libs/aiohttp/pull/204>`_) - -- Fixed graceful shutdown, disable keep-alive on connection closing. - -- Decode HTTP message with `utf-8` encoding, some servers send headers - in utf-8 encoding (`#207 <https://github.com/aio-libs/aiohttp/pull/207>`_) - -- Support `aiohtt.web` middlewares (`#209 <https://github.com/aio-libs/aiohttp/pull/209>`_) - -- Add ssl_context to TCPConnector (`#206 <https://github.com/aio-libs/aiohttp/pull/206>`_) - - -0.12.0 (12-12-2014) -=================== - -- Deep refactoring of `aiohttp.web` in backward-incompatible manner. - Sorry, we have to do this. - -- Automatically force aiohttp.web handlers to coroutines in - `UrlDispatcher.add_route()` (`#186 <https://github.com/aio-libs/aiohttp/pull/186>`_) - -- Rename `Request.POST()` function to `Request.post()` - -- Added POST attribute - -- Response processing refactoring: constructor does not accept Request - instance anymore. - -- Pass application instance to finish callback - -- Exceptions refactoring - -- Do not unquote query string in `aiohttp.web.Request` - -- Fix concurrent access to payload in `RequestHandle.handle_request()` - -- Add access logging to `aiohttp.web` - -- Gunicorn worker for `aiohttp.web` - -- Removed deprecated `AsyncGunicornWorker` - -- Removed deprecated HttpClient - - -0.11.0 (11-29-2014) -=================== - -- Support named routes in `aiohttp.web.UrlDispatcher` (`#179 <https://github.com/aio-libs/aiohttp/pull/179>`_) - -- Make websocket subprotocols conform to spec (`#181 <https://github.com/aio-libs/aiohttp/pull/181>`_) - - -0.10.2 (11-19-2014) -=================== - -- Don't unquote `environ['PATH_INFO']` in wsgi.py (`#177 <https://github.com/aio-libs/aiohttp/pull/177>`_) - - -0.10.1 (11-17-2014) -=================== - -- aiohttp.web.HTTPException and descendants now files response body - with string like `404: NotFound` - -- Fix multidict `__iter__`, the method should iterate over keys, not - (key, value) pairs. - - -0.10.0 (11-13-2014) -=================== - -- Add aiohttp.web subpackage for highlevel HTTP server support. - -- Add *reason* optional parameter to aiohttp.protocol.Response ctor. - -- Fix aiohttp.client bug for sending file without content-type. - -- Change error text for connection closed between server responses - from 'Can not read status line' to explicit 'Connection closed by - server' - -- Drop closed connections from connector (`#173 <https://github.com/aio-libs/aiohttp/pull/173>`_) - -- Set server.transport to None on .closing() (`#172 <https://github.com/aio-libs/aiohttp/pull/172>`_) - - -0.9.3 (10-30-2014) -================== - -- Fix compatibility with asyncio 3.4.1+ (`#170 <https://github.com/aio-libs/aiohttp/pull/170>`_) - - -0.9.2 (10-16-2014) -================== - -- Improve redirect handling (`#157 <https://github.com/aio-libs/aiohttp/pull/157>`_) - -- Send raw files as is (`#153 <https://github.com/aio-libs/aiohttp/pull/153>`_) - -- Better websocket support (`#150 <https://github.com/aio-libs/aiohttp/pull/150>`_) - - -0.9.1 (08-30-2014) -================== - -- Added MultiDict support for client request params and data (`#114 <https://github.com/aio-libs/aiohttp/pull/114>`_). - -- Fixed parameter type for IncompleteRead exception (`#118 <https://github.com/aio-libs/aiohttp/pull/118>`_). - -- Strictly require ASCII headers names and values (`#137 <https://github.com/aio-libs/aiohttp/pull/137>`_) - -- Keep port in ProxyConnector (`#128 <https://github.com/aio-libs/aiohttp/pull/128>`_). - -- Python 3.4.1 compatibility (`#131 <https://github.com/aio-libs/aiohttp/pull/131>`_). - - -0.9.0 (07-08-2014) -================== - -- Better client basic authentication support (`#112 <https://github.com/aio-libs/aiohttp/pull/112>`_). - -- Fixed incorrect line splitting in HttpRequestParser (`#97 <https://github.com/aio-libs/aiohttp/pull/97>`_). - -- Support StreamReader and DataQueue as request data. - -- Client files handling refactoring (`#20 <https://github.com/aio-libs/aiohttp/pull/20>`_). - -- Backward incompatible: Replace DataQueue with StreamReader for - request payload (`#87 <https://github.com/aio-libs/aiohttp/pull/87>`_). - - -0.8.4 (07-04-2014) -================== - -- Change ProxyConnector authorization parameters. - - -0.8.3 (07-03-2014) -================== - -- Publish TCPConnector properties: verify_ssl, family, resolve, resolved_hosts. - -- Don't parse message body for HEAD responses. - -- Refactor client response decoding. - - -0.8.2 (06-22-2014) -================== - -- Make ProxyConnector.proxy immutable property. - -- Make UnixConnector.path immutable property. - -- Fix resource leak for aiohttp.request() with implicit connector. - -- Rename Connector's reuse_timeout to keepalive_timeout. - - -0.8.1 (06-18-2014) -================== - -- Use case insensitive multidict for server request/response headers. - -- MultiDict.getall() accepts default value. - -- Catch server ConnectionError. - -- Accept MultiDict (and derived) instances in aiohttp.request header argument. - -- Proxy 'CONNECT' support. - - -0.8.0 (06-06-2014) -================== - -- Add support for utf-8 values in HTTP headers - -- Allow to use custom response class instead of HttpResponse - -- Use MultiDict for client request headers - -- Use MultiDict for server request/response headers - -- Store response headers in ClientResponse.headers attribute - -- Get rid of timeout parameter in aiohttp.client API - -- Exceptions refactoring - - -0.7.3 (05-20-2014) -================== - -- Simple HTTP proxy support. - - -0.7.2 (05-14-2014) -================== - -- Get rid of `__del__` methods - -- Use ResourceWarning instead of logging warning record. - - -0.7.1 (04-28-2014) -================== - -- Do not unquote client request urls. - -- Allow multiple waiters on transport drain. - -- Do not return client connection to pool in case of exceptions. - -- Rename SocketConnector to TCPConnector and UnixSocketConnector to - UnixConnector. - - -0.7.0 (04-16-2014) -================== - -- Connection flow control. - -- HTTP client session/connection pool refactoring. - -- Better handling for bad server requests. - - -0.6.5 (03-29-2014) -================== - -- Added client session reuse timeout. - -- Better client request cancellation support. - -- Better handling responses without content length. - -- Added HttpClient verify_ssl parameter support. - - -0.6.4 (02-27-2014) -================== - -- Log content-length missing warning only for put and post requests. - - -0.6.3 (02-27-2014) -================== - -- Better support for server exit. - -- Read response body until EOF if content-length is not defined (`#14 <https://github.com/aio-libs/aiohttp/pull/14>`_) - - -0.6.2 (02-18-2014) -================== - -- Fix trailing char in allowed_methods. - -- Start slow request timer for first request. - - -0.6.1 (02-17-2014) -================== - -- Added utility method HttpResponse.read_and_close() - -- Added slow request timeout. - -- Enable socket SO_KEEPALIVE if available. - - -0.6.0 (02-12-2014) -================== - -- Better handling for process exit. - - -0.5.0 (01-29-2014) -================== - -- Allow to use custom HttpRequest client class. - -- Use gunicorn keepalive setting for asynchronous worker. - -- Log leaking responses. - -- python 3.4 compatibility - - -0.4.4 (11-15-2013) -================== - -- Resolve only AF_INET family, because it is not clear how to pass - extra info to asyncio. - - -0.4.3 (11-15-2013) -================== - -- Allow to wait completion of request with `HttpResponse.wait_for_close()` - - -0.4.2 (11-14-2013) -================== - -- Handle exception in client request stream. - -- Prevent host resolving for each client request. - - -0.4.1 (11-12-2013) -================== - -- Added client support for `expect: 100-continue` header. - - -0.4 (11-06-2013) -================ - -- Added custom wsgi application close procedure - -- Fixed concurrent host failure in HttpClient - - -0.3 (11-04-2013) -================ - -- Added PortMapperWorker - -- Added HttpClient - -- Added TCP connection timeout to HTTP client - -- Better client connection errors handling - -- Gracefully handle process exit - - -0.2 -=== - -- Fix packaging diff --git a/docs/changes.rst b/docs/changes.rst index 089f67235a1..8c7a9bb0512 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -16,5 +16,3 @@ Changelog .. include:: ../CHANGES.rst :start-after: .. towncrier release notes start - -.. include:: ../HISTORY.rst From fe49d08c06c5ae50336355b635b12d6a62b099db Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 20:57:08 +0000 Subject: [PATCH 0785/1511] [PR #9497/e23e036d backport][3.10] Add missing docs for timeout parameter in WebSocketResponse.__init__() (#9586) Co-authored-by: lenard-mosys <lenard@mo-sys.com> --- docs/web_reference.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 06c1c03f598..4174bbd48b9 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -982,12 +982,18 @@ and :ref:`aiohttp-web-signals` handlers:: connection if `pong` response is not received. The timer is reset on any data reception. + :param float timeout: Timeout value for the ``close`` + operation. After sending the close websocket message, + ``close`` waits for ``timeout`` seconds for a response. + Default value is ``10.0`` (10 seconds for ``close`` + operation) + :param float receive_timeout: Timeout value for `receive` - operations. Default value is None + operations. Default value is :data:`None` (no timeout for receive operation) :param bool compress: Enable per-message deflate extension support. - False for disabled, default value is True. + :data:`False` for disabled, default value is :data:`True`. :param int max_msg_size: maximum size of read websocket message, 4 MB by default. To disable the size limit use ``0``. From 09d86c7c88a23f2bbb18e40ba86b804e7d1f853f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 29 Oct 2024 12:29:38 -1000 Subject: [PATCH 0786/1511] [PR #7942/0b71e1c backport][3.11] added server_hostname param to ws_connect method (#9589) Co-authored-by: tamir1400 <tamir1400@gmail.com> closes #9414 --- CHANGES/7941.feature | 1 + aiohttp/client.py | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 CHANGES/7941.feature diff --git a/CHANGES/7941.feature b/CHANGES/7941.feature new file mode 100644 index 00000000000..6f4530f103a --- /dev/null +++ b/CHANGES/7941.feature @@ -0,0 +1 @@ +Added ``server_hostname`` parameter to ``ws_connect``. diff --git a/aiohttp/client.py b/aiohttp/client.py index 34df772eba8..fbba62739a8 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -878,6 +878,7 @@ def ws_connect( verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, + server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, @@ -903,6 +904,7 @@ def ws_connect( verify_ssl=verify_ssl, fingerprint=fingerprint, ssl_context=ssl_context, + server_hostname=server_hostname, proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size, @@ -930,6 +932,7 @@ async def _ws_connect( verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, + server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, @@ -1004,6 +1007,7 @@ async def _ws_connect( proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, + server_hostname=server_hostname, proxy_headers=proxy_headers, ) From 0664a50bc7f52ce78a12205082df94db66bb9db2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 23:12:08 +0000 Subject: [PATCH 0787/1511] [PR #9585/0f2e9ab4 backport][3.10] Add client request creation benchmarks for headers and cookies (#9591) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_request.py | 45 +++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 63c77dfcdc8..cf03dffdfdc 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -7,6 +7,7 @@ from yarl import URL from aiohttp.client_reqrep import ClientRequest +from aiohttp.http_writer import HttpVersion11 def test_client_request_update_cookies( @@ -20,3 +21,47 @@ def test_client_request_update_cookies( @benchmark def _run() -> None: req.update_cookies(cookies=morsel_cookie) + + +def test_create_client_request_with_cookies( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + url = URL("http://python.org") + + @benchmark + def _run() -> None: + ClientRequest( + method="get", + url=url, + loop=loop, + headers=None, + data=None, + cookies={"cookie": "value"}, + auth=None, + version=HttpVersion11, + compress=False, + chunked=None, + expect100=False, + ) + + +def test_create_client_request_with_headers( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + url = URL("http://python.org") + + @benchmark + def _run() -> None: + ClientRequest( + method="get", + url=url, + loop=loop, + headers={"header": "value", "another": "header"}, + data=None, + cookies=None, + auth=None, + version=HttpVersion11, + compress=False, + chunked=None, + expect100=False, + ) From a7ae51ad56a05d5d99fdfb2ff7b6bf0908a77e1c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 23:21:55 +0000 Subject: [PATCH 0788/1511] [PR #9585/0f2e9ab4 backport][3.11] Add client request creation benchmarks for headers and cookies (#9592) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_request.py | 45 +++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 63c77dfcdc8..cf03dffdfdc 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -7,6 +7,7 @@ from yarl import URL from aiohttp.client_reqrep import ClientRequest +from aiohttp.http_writer import HttpVersion11 def test_client_request_update_cookies( @@ -20,3 +21,47 @@ def test_client_request_update_cookies( @benchmark def _run() -> None: req.update_cookies(cookies=morsel_cookie) + + +def test_create_client_request_with_cookies( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + url = URL("http://python.org") + + @benchmark + def _run() -> None: + ClientRequest( + method="get", + url=url, + loop=loop, + headers=None, + data=None, + cookies={"cookie": "value"}, + auth=None, + version=HttpVersion11, + compress=False, + chunked=None, + expect100=False, + ) + + +def test_create_client_request_with_headers( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + url = URL("http://python.org") + + @benchmark + def _run() -> None: + ClientRequest( + method="get", + url=url, + loop=loop, + headers={"header": "value", "another": "header"}, + data=None, + cookies=None, + auth=None, + version=HttpVersion11, + compress=False, + chunked=None, + expect100=False, + ) From e86b72f663ab1918e588853bf5fe5f458f72dbf2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 02:37:54 +0000 Subject: [PATCH 0789/1511] [PR #9595/7d0f980a backport][3.11] Upgrade codspeed to 3.0.0 (#9596) --- .mypy.ini | 19 ------------------- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- tests/test_benchmarks_client_request.py | 2 +- tests/test_benchmarks_cookiejar.py | 2 +- tests/test_benchmarks_http_websocket.py | 2 +- 8 files changed, 7 insertions(+), 26 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index c4bc02d7b94..78001c36e8f 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -36,22 +36,3 @@ ignore_missing_imports = True [mypy-gunicorn.*] ignore_missing_imports = True - -# Benchmark configuration is because pytest_codspeed is missing -# a py.typed file. Can be removed once the following PR is merged -# and released: -# https://github.com/CodSpeedHQ/pytest-codspeed/pull/53 -[mypy-test_benchmarks_client_request] -disable_error_code = - no-any-unimported, - misc - -[mypy-test_benchmarks_cookiejar] -disable_error_code = - no-any-unimported, - misc - -[mypy-test_benchmarks_http_websocket] -disable_error_code = - no-any-unimported, - misc diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 087f7acba6f..e091346a5f3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -174,7 +174,7 @@ pytest==8.3.3 # pytest-codspeed # pytest-cov # pytest-mock -pytest-codspeed==2.2.1 +pytest-codspeed==3.0.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d1c84f46cfb..47bd3a020ff 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -169,7 +169,7 @@ pytest==8.3.3 # pytest-codspeed # pytest-cov # pytest-mock -pytest-codspeed==2.2.1 +pytest-codspeed==3.0.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 6d15e42d3e0..d7d97277bce 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -78,7 +78,7 @@ pytest==8.3.3 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==2.2.1 +pytest-codspeed==3.0.0 # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index e78e3f01ca9..ccb20bda665 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -96,7 +96,7 @@ pytest==8.3.3 # pytest-codspeed # pytest-cov # pytest-mock -pytest-codspeed==2.2.1 +pytest-codspeed==3.0.0 # via -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index cf03dffdfdc..de019149076 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -3,7 +3,7 @@ import asyncio from http.cookies import Morsel -from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from pytest_codspeed import BenchmarkFixture from yarl import URL from aiohttp.client_reqrep import ClientRequest diff --git a/tests/test_benchmarks_cookiejar.py b/tests/test_benchmarks_cookiejar.py index 508b49f68cb..78566151ef4 100644 --- a/tests/test_benchmarks_cookiejar.py +++ b/tests/test_benchmarks_cookiejar.py @@ -2,7 +2,7 @@ from http.cookies import BaseCookie -from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from pytest_codspeed import BenchmarkFixture from yarl import URL from aiohttp.cookiejar import CookieJar diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 8fbeb5f4835..11fe132879f 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -2,7 +2,7 @@ import asyncio -from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] +from pytest_codspeed import BenchmarkFixture from aiohttp import DataQueue from aiohttp.base_protocol import BaseProtocol From c8165cf96ed244fb747a7449df1fc288d3d52302 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:26:15 +0000 Subject: [PATCH 0790/1511] [PR #9602/033c4d09 backport][3.10] Remove unused cookies from `BaseConnector` (#9605) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 1bdd14b7e25..8ae588d6e44 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -8,7 +8,6 @@ from collections import defaultdict, deque from contextlib import suppress from http import HTTPStatus -from http.cookies import SimpleCookie from itertools import chain, cycle, islice from time import monotonic from types import TracebackType @@ -274,8 +273,6 @@ def __init__( self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) - self.cookies = SimpleCookie() - # start keep-alive connection cleanup task self._cleanup_handle: Optional[asyncio.TimerHandle] = None From 3d011460b68a2c7448bd214586699c598d815913 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 17:34:32 +0000 Subject: [PATCH 0791/1511] [PR #9602/033c4d09 backport][3.11] Remove unused cookies from `BaseConnector` (#9606) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 1be9d6d1201..b40a462aeb4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -8,7 +8,6 @@ from collections import defaultdict, deque from contextlib import suppress from http import HTTPStatus -from http.cookies import SimpleCookie from itertools import chain, cycle, islice from time import monotonic from types import TracebackType @@ -273,8 +272,6 @@ def __init__( self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) - self.cookies = SimpleCookie() - # start keep-alive connection cleanup task self._cleanup_handle: Optional[asyncio.TimerHandle] = None From f892979bc71718663a255f0e2a6c5fcf80258954 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 1 Nov 2024 12:42:35 -0500 Subject: [PATCH 0792/1511] [PR #9600/e6187f6 backport][3.11] Avoid starting connection timeout when a connection is already available (#9607) --- CHANGES/9600.breaking.rst | 3 + aiohttp/client.py | 11 +--- aiohttp/connector.py | 109 ++++++++++++++++++++++--------------- docs/spelling_wordlist.txt | 1 + 4 files changed, 72 insertions(+), 52 deletions(-) create mode 100644 CHANGES/9600.breaking.rst diff --git a/CHANGES/9600.breaking.rst b/CHANGES/9600.breaking.rst new file mode 100644 index 00000000000..5997344e4cd --- /dev/null +++ b/CHANGES/9600.breaking.rst @@ -0,0 +1,3 @@ +Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. + +If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. diff --git a/aiohttp/client.py b/aiohttp/client.py index fbba62739a8..fc92fee7264 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -93,7 +93,6 @@ DEBUG, BasicAuth, TimeoutHandle, - ceil_timeout, get_env_proxy_for_url, method_must_be_empty_body, sentinel, @@ -692,13 +691,9 @@ async def _request( # connection timeout try: - async with ceil_timeout( - real_timeout.connect, - ceil_threshold=real_timeout.ceil_threshold, - ): - conn = await self._connector.connect( - req, traces=traces, timeout=real_timeout - ) + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) except asyncio.TimeoutError as exc: raise ConnectionTimeoutError( f"Connection timeout to host {url}" diff --git a/aiohttp/connector.py b/aiohttp/connector.py index b40a462aeb4..4e5d03b83a1 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -512,41 +512,20 @@ async def connect( """Get from pool or create new connection.""" key = req.connection_key available = self._available_connections(key) + wait_for_conn = available <= 0 or key in self._waiters + if not wait_for_conn and (proto := self._get(key)) is not None: + # If we do not have to wait and we can get a connection from the pool + # we can avoid the timeout ceil logic and directly return the connection + return await self._reused_connection(key, proto, traces) + + async with ceil_timeout(timeout.connect, timeout.ceil_threshold): + # Wait if there are no available connections or if there are/were + # waiters (i.e. don't steal connection from a waiter about to wake up) + if wait_for_conn: + await self._wait_for_available_connection(key, traces) + if (proto := self._get(key)) is not None: + return await self._reused_connection(key, proto, traces) - # Wait if there are no available connections or if there are/were - # waiters (i.e. don't steal connection from a waiter about to wake up) - if available <= 0 or key in self._waiters: - fut: asyncio.Future[None] = self._loop.create_future() - - # This connection will now count towards the limit. - self._waiters[key].append(fut) - - if traces: - for trace in traces: - await trace.send_connection_queued_start() - - try: - await fut - except BaseException as e: - if key in self._waiters: - # remove a waiter even if it was cancelled, normally it's - # removed when it's notified - try: - self._waiters[key].remove(fut) - except ValueError: # fut may no longer be in list - pass - - raise e - finally: - if key in self._waiters and not self._waiters[key]: - del self._waiters[key] - - if traces: - for trace in traces: - await trace.send_connection_queued_end() - - proto = self._get(key) - if proto is None: placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) @@ -574,21 +553,63 @@ async def connect( if traces: for trace in traces: await trace.send_connection_create_end() - else: - if traces: - # Acquire the connection to prevent race conditions with limits - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - for trace in traces: - await trace.send_connection_reuseconn() - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) + return self._acquired_connection(proto, key) + + async def _reused_connection( + self, key: "ConnectionKey", proto: ResponseHandler, traces: List["Trace"] + ) -> Connection: + if traces: + # Acquire the connection to prevent race conditions with limits + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + for trace in traces: + await trace.send_connection_reuseconn() + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + return self._acquired_connection(proto, key) + + def _acquired_connection( + self, proto: ResponseHandler, key: "ConnectionKey" + ) -> Connection: + """Mark proto as acquired and wrap it in a Connection object.""" self._acquired.add(proto) self._acquired_per_host[key].add(proto) return Connection(self, key, proto, self._loop) + async def _wait_for_available_connection( + self, key: "ConnectionKey", traces: List["Trace"] + ) -> None: + """Wait until there is an available connection.""" + fut: asyncio.Future[None] = self._loop.create_future() + + # This connection will now count towards the limit. + self._waiters[key].append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + if key in self._waiters: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + with suppress(ValueError): + # fut may no longer be in list + self._waiters[key].remove(fut) + + raise e + finally: + if key in self._waiters and not self._waiters[key]: + del self._waiters[key] + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: try: conns = self._conns[key] diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 514477e8fcb..c135ebd2084 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -294,6 +294,7 @@ ssl SSLContext startup subapplication +subclassed subclasses subdirectory submodules From 5e704c49f3a5b22f22b4a56391233f14894da2d5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 18:32:38 +0000 Subject: [PATCH 0793/1511] [PR #9604/1e34a67e backport][3.10] Add benchmark for serializing headers (#9608) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_writer.py | 33 ++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/test_benchmarks_http_writer.py diff --git a/tests/test_benchmarks_http_writer.py b/tests/test_benchmarks_http_writer.py new file mode 100644 index 00000000000..0d52ca875e6 --- /dev/null +++ b/tests/test_benchmarks_http_writer.py @@ -0,0 +1,33 @@ +"""codspeed benchmarks for http writer.""" + +from multidict import CIMultiDict +from pytest_codspeed import BenchmarkFixture + +from aiohttp import hdrs +from aiohttp.http_writer import _serialize_headers + + +def test_serialize_headers(benchmark: BenchmarkFixture) -> None: + """Benchmark 100 calls to _serialize_headers.""" + status_line = "HTTP/1.1 200 OK" + headers = CIMultiDict( + { + hdrs.CONTENT_TYPE: "text/plain", + hdrs.CONTENT_LENGTH: "100", + hdrs.CONNECTION: "keep-alive", + hdrs.DATE: "Mon, 23 May 2005 22:38:34 GMT", + hdrs.SERVER: "Test/1.0", + hdrs.CONTENT_ENCODING: "gzip", + hdrs.VARY: "Accept-Encoding", + hdrs.CACHE_CONTROL: "no-cache", + hdrs.PRAGMA: "no-cache", + hdrs.EXPIRES: "0", + hdrs.LAST_MODIFIED: "Mon, 23 May 2005 22:38:34 GMT", + hdrs.ETAG: "1234567890", + } + ) + + @benchmark + def _run() -> None: + for _ in range(100): + _serialize_headers(status_line, headers) From 6873da1d98f2bf1b3d7b34274642761a5ea889a8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 18:39:27 +0000 Subject: [PATCH 0794/1511] [PR #9604/1e34a67e backport][3.11] Add benchmark for serializing headers (#9609) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_writer.py | 33 ++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/test_benchmarks_http_writer.py diff --git a/tests/test_benchmarks_http_writer.py b/tests/test_benchmarks_http_writer.py new file mode 100644 index 00000000000..0d52ca875e6 --- /dev/null +++ b/tests/test_benchmarks_http_writer.py @@ -0,0 +1,33 @@ +"""codspeed benchmarks for http writer.""" + +from multidict import CIMultiDict +from pytest_codspeed import BenchmarkFixture + +from aiohttp import hdrs +from aiohttp.http_writer import _serialize_headers + + +def test_serialize_headers(benchmark: BenchmarkFixture) -> None: + """Benchmark 100 calls to _serialize_headers.""" + status_line = "HTTP/1.1 200 OK" + headers = CIMultiDict( + { + hdrs.CONTENT_TYPE: "text/plain", + hdrs.CONTENT_LENGTH: "100", + hdrs.CONNECTION: "keep-alive", + hdrs.DATE: "Mon, 23 May 2005 22:38:34 GMT", + hdrs.SERVER: "Test/1.0", + hdrs.CONTENT_ENCODING: "gzip", + hdrs.VARY: "Accept-Encoding", + hdrs.CACHE_CONTROL: "no-cache", + hdrs.PRAGMA: "no-cache", + hdrs.EXPIRES: "0", + hdrs.LAST_MODIFIED: "Mon, 23 May 2005 22:38:34 GMT", + hdrs.ETAG: "1234567890", + } + ) + + @benchmark + def _run() -> None: + for _ in range(100): + _serialize_headers(status_line, headers) From 8902edd8cee75e16534072ac4a90b8a584ee758b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 20:34:27 +0000 Subject: [PATCH 0795/1511] [PR #9611/487d179b backport][3.10] Remove unused `_transport` from `Connection` (#9613) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 8ae588d6e44..456ab89eb83 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -109,7 +109,6 @@ def __del__(self) -> None: class Connection: _source_traceback = None - _transport = None def __init__( self, @@ -1439,7 +1438,6 @@ async def _create_proxy_connection( raise else: conn._protocol = None - conn._transport = None try: if resp.status != 200: message = resp.reason From 4670b28552ab9b52fd07f1b0cecc345708c89360 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 20:58:18 +0000 Subject: [PATCH 0796/1511] [PR #9611/487d179b backport][3.11] Remove unused `_transport` from `Connection` (#9614) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 4e5d03b83a1..3365d9b89ea 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -108,7 +108,6 @@ def __del__(self) -> None: class Connection: _source_traceback = None - _transport = None def __init__( self, @@ -1464,7 +1463,6 @@ async def _create_proxy_connection( raise else: conn._protocol = None - conn._transport = None try: if resp.status != 200: message = resp.reason From d31e0a869db6cd39f53e27ba0165f0e8619627f6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 1 Nov 2024 15:59:34 -0500 Subject: [PATCH 0797/1511] [3.11] Add `__slots__` to `_TransportPlaceholder` (#9617) --- aiohttp/connector.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 3365d9b89ea..5149e886b1e 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -198,8 +198,10 @@ def closed(self) -> bool: class _TransportPlaceholder: """placeholder for BaseConnector.connect function""" + __slots__ = () + def close(self) -> None: - pass + """Close the placeholder transport.""" class BaseConnector: From b10292ce14d207b711a312702565e39ede10de95 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 21:55:41 +0000 Subject: [PATCH 0798/1511] [PR #9612/31abf3f4 backport][3.10] Remove unnecessary call to `.keys()` in `BaseConnector._release_waiter` (#9618) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 456ab89eb83..b5143444ec4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -632,7 +632,7 @@ def _release_waiter(self) -> None: # Having the dict keys ordered this avoids to iterate # at the same order at each call. - queues = list(self._waiters.keys()) + queues = list(self._waiters) random.shuffle(queues) for key in queues: From 0566b827e6e29dc7d3a7a3bc7b59816dd62665cd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 22:03:09 +0000 Subject: [PATCH 0799/1511] [PR #9612/31abf3f4 backport][3.11] Remove unnecessary call to `.keys()` in `BaseConnector._release_waiter` (#9619) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 5149e886b1e..353e4c697d4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -654,7 +654,7 @@ def _release_waiter(self) -> None: # Having the dict keys ordered this avoids to iterate # at the same order at each call. - queues = list(self._waiters.keys()) + queues = list(self._waiters) random.shuffle(queues) for key in queues: From 8f8c0e5d6d3c2c4b3580891c2b8babd073fb5095 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 22:14:14 +0000 Subject: [PATCH 0800/1511] [PR #9610/82c32033 backport][3.10] Simplify `BaseConnector._available_connections` logic (#9620) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 30 ++++++++---------------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index b5143444ec4..19ea6e9e49a 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -482,29 +482,15 @@ def _available_connections(self, key: "ConnectionKey") -> int: If it returns less than 1 means that there are no connections available. """ - if self._limit: - # total calc available connections - available = self._limit - len(self._acquired) - - # check limit per host - if ( - self._limit_per_host - and available > 0 - and key in self._acquired_per_host - ): - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - - elif self._limit_per_host and key in self._acquired_per_host: - # check limit per host - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - else: - available = 1 + # check total available connections + if self._limit and (available := self._limit - len(self._acquired)) <= 0: + return available + + # check limit per host + if self._limit_per_host and key in self._acquired_per_host: + return self._limit_per_host - len(self._acquired_per_host[key]) - return available + return 1 async def connect( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" From 3aa963e6b99bc9fb875123a294a99ab053cc91b7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 22:14:17 +0000 Subject: [PATCH 0801/1511] [PR #9610/82c32033 backport][3.11] Simplify `BaseConnector._available_connections` logic (#9621) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 30 ++++++++---------------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 353e4c697d4..13d7a45801c 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -483,29 +483,15 @@ def _available_connections(self, key: "ConnectionKey") -> int: If it returns less than 1 means that there are no connections available. """ - if self._limit: - # total calc available connections - available = self._limit - len(self._acquired) - - # check limit per host - if ( - self._limit_per_host - and available > 0 - and key in self._acquired_per_host - ): - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - - elif self._limit_per_host and key in self._acquired_per_host: - # check limit per host - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - else: - available = 1 + # check total available connections + if self._limit and (available := self._limit - len(self._acquired)) <= 0: + return available + + # check limit per host + if self._limit_per_host and key in self._acquired_per_host: + return self._limit_per_host - len(self._acquired_per_host[key]) - return available + return 1 async def connect( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" From 8607d75aab599d8350eb11661fe4c99374b83f30 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 22:50:32 +0000 Subject: [PATCH 0802/1511] [PR #9617/d31e0a86 backport][3.10] Add `__slots__` to `_TransportPlaceholder` (#9622) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 19ea6e9e49a..99d95344994 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -199,8 +199,10 @@ def closed(self) -> bool: class _TransportPlaceholder: """placeholder for BaseConnector.connect function""" + __slots__ = () + def close(self) -> None: - pass + """Close the placeholder transport.""" class BaseConnector: From d9c65b97afe64d2c27e46421dc115c80535930ca Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 1 Nov 2024 20:04:14 -0500 Subject: [PATCH 0803/1511] [PR #9623/898aa28 backport][3.11] Fix available connection calculation (#9625) --- aiohttp/connector.py | 16 +++++--- tests/test_connector.py | 82 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 92 insertions(+), 6 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 13d7a45801c..3f0614109ca 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -484,14 +484,20 @@ def _available_connections(self, key: "ConnectionKey") -> int: available. """ # check total available connections - if self._limit and (available := self._limit - len(self._acquired)) <= 0: - return available + # If there are no limits, this will always return 1 + total_remain = 1 + + if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0: + return total_remain # check limit per host - if self._limit_per_host and key in self._acquired_per_host: - return self._limit_per_host - len(self._acquired_per_host[key]) + if host_remain := self._limit_per_host: + if acquired := self._acquired_per_host.get(key): + host_remain -= len(acquired) + if total_remain > host_remain: + return host_remain - return 1 + return total_remain async def connect( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" diff --git a/tests/test_connector.py b/tests/test_connector.py index 74713b74acd..02352ff498b 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -47,7 +47,13 @@ def key2(): @pytest.fixture -def ssl_key(): +def other_host_key2() -> ConnectionKey: + # Connection key + return ConnectionKey("otherhost", 80, False, True, None, None, None) + + +@pytest.fixture +def ssl_key() -> ConnectionKey: # Connection key return ConnectionKey("localhost", 80, True, True, None, None, None) @@ -3146,3 +3152,77 @@ def test_default_ssl_context_creation_without_ssl() -> None: with mock.patch.object(connector_module, "ssl", None): assert connector_module._make_ssl_context(False) is None assert connector_module._make_ssl_context(True) is None + + +async def test_available_connections_with_limit_per_host( + key: ConnectionKey, other_host_key2: ConnectionKey +) -> None: + """Verify expected values based on active connections with host limit.""" + conn = aiohttp.BaseConnector(limit=3, limit_per_host=2) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + proto1 = create_mocked_conn() + connection1 = conn._acquired_connection(proto1, key) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 2 + proto2 = create_mocked_conn() + connection2 = conn._acquired_connection(proto2, key) + assert conn._available_connections(key) == 0 + assert conn._available_connections(other_host_key2) == 1 + connection1.close() + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 2 + connection2.close() + other_proto1 = create_mocked_conn() + other_connection1 = conn._acquired_connection(other_proto1, other_host_key2) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 1 + other_connection1.close() + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + + +@pytest.mark.parametrize("limit_per_host", [0, 10]) +async def test_available_connections_without_limit_per_host( + key: ConnectionKey, other_host_key2: ConnectionKey, limit_per_host: int +) -> None: + """Verify expected values based on active connections with higher host limit.""" + conn = aiohttp.BaseConnector(limit=3, limit_per_host=limit_per_host) + assert conn._available_connections(key) == 3 + assert conn._available_connections(other_host_key2) == 3 + proto1 = create_mocked_conn() + connection1 = conn._acquired_connection(proto1, key) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + proto2 = create_mocked_conn() + connection2 = conn._acquired_connection(proto2, key) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 + connection1.close() + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + connection2.close() + other_proto1 = create_mocked_conn() + other_connection1 = conn._acquired_connection(other_proto1, other_host_key2) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + other_connection1.close() + assert conn._available_connections(key) == 3 + assert conn._available_connections(other_host_key2) == 3 + + +async def test_available_connections_no_limits( + key: ConnectionKey, other_host_key2: ConnectionKey +) -> None: + """Verify expected values based on active connections with no limits.""" + # No limits is a special case where available connections should always be 1. + conn = aiohttp.BaseConnector(limit=0, limit_per_host=0) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 + proto1 = create_mocked_conn() + connection1 = conn._acquired_connection(proto1, key) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 + connection1.close() + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 From 210bcbb8ee0c3ce34664836403a07a0b97123696 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 1 Nov 2024 22:04:41 -0500 Subject: [PATCH 0804/1511] [PR #9623/898aa28 backport][3.10] Fix available connection calculation (#9624) --- aiohttp/connector.py | 16 +++++--- tests/test_connector.py | 91 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 101 insertions(+), 6 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 99d95344994..efc926d7f48 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -485,14 +485,20 @@ def _available_connections(self, key: "ConnectionKey") -> int: available. """ # check total available connections - if self._limit and (available := self._limit - len(self._acquired)) <= 0: - return available + # If there are no limits, this will always return 1 + total_remain = 1 + + if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0: + return total_remain # check limit per host - if self._limit_per_host and key in self._acquired_per_host: - return self._limit_per_host - len(self._acquired_per_host[key]) + if host_remain := self._limit_per_host: + if acquired := self._acquired_per_host.get(key): + host_remain -= len(acquired) + if total_remain > host_remain: + return host_remain - return 1 + return total_remain async def connect( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" diff --git a/tests/test_connector.py b/tests/test_connector.py index 94eeb3ca85b..3ad2b1e346a 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -52,7 +52,13 @@ def key2(): @pytest.fixture -def ssl_key(): +def other_host_key2() -> ConnectionKey: + # Connection key + return ConnectionKey("otherhost", 80, False, True, None, None, None) + + +@pytest.fixture +def ssl_key() -> ConnectionKey: # Connection key return ConnectionKey("localhost", 80, True, True, None, None, None) @@ -3151,3 +3157,86 @@ def test_default_ssl_context_creation_without_ssl() -> None: with mock.patch.object(connector_module, "ssl", None): assert connector_module._make_ssl_context(False) is None assert connector_module._make_ssl_context(True) is None + + +def _acquired_connection( + conn: aiohttp.BaseConnector, proto: ResponseHandler, key: "ConnectionKey" +) -> Connection: + """Mark proto as acquired and wrap it in a Connection object.""" + conn._acquired.add(proto) + conn._acquired_per_host[key].add(proto) + return Connection(conn, key, proto, conn._loop) + + +async def test_available_connections_with_limit_per_host( + key: ConnectionKey, other_host_key2: ConnectionKey +) -> None: + """Verify expected values based on active connections with host limit.""" + conn = aiohttp.BaseConnector(limit=3, limit_per_host=2) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + proto1 = create_mocked_conn() + connection1 = _acquired_connection(conn, proto1, key) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 2 + proto2 = create_mocked_conn() + connection2 = _acquired_connection(conn, proto2, key) + assert conn._available_connections(key) == 0 + assert conn._available_connections(other_host_key2) == 1 + connection1.close() + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 2 + connection2.close() + other_proto1 = create_mocked_conn() + other_connection1 = _acquired_connection(conn, other_proto1, other_host_key2) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 1 + other_connection1.close() + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + + +@pytest.mark.parametrize("limit_per_host", [0, 10]) +async def test_available_connections_without_limit_per_host( + key: ConnectionKey, other_host_key2: ConnectionKey, limit_per_host: int +) -> None: + """Verify expected values based on active connections with higher host limit.""" + conn = aiohttp.BaseConnector(limit=3, limit_per_host=limit_per_host) + assert conn._available_connections(key) == 3 + assert conn._available_connections(other_host_key2) == 3 + proto1 = create_mocked_conn() + connection1 = _acquired_connection(conn, proto1, key) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + proto2 = create_mocked_conn() + connection2 = _acquired_connection(conn, proto2, key) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 + connection1.close() + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + connection2.close() + other_proto1 = create_mocked_conn() + other_connection1 = _acquired_connection(conn, other_proto1, other_host_key2) + assert conn._available_connections(key) == 2 + assert conn._available_connections(other_host_key2) == 2 + other_connection1.close() + assert conn._available_connections(key) == 3 + assert conn._available_connections(other_host_key2) == 3 + + +async def test_available_connections_no_limits( + key: ConnectionKey, other_host_key2: ConnectionKey +) -> None: + """Verify expected values based on active connections with no limits.""" + # No limits is a special case where available connections should always be 1. + conn = aiohttp.BaseConnector(limit=0, limit_per_host=0) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 + proto1 = create_mocked_conn() + connection1 = _acquired_connection(conn, proto1, key) + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 + connection1.close() + assert conn._available_connections(key) == 1 + assert conn._available_connections(other_host_key2) == 1 From fab525d68369a88d3febf0fc8e74bc88887da265 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 16:11:03 +0000 Subject: [PATCH 0805/1511] [PR #9603/4108ca51 backport][3.10] Improve performance of _serialize_headers (#9626) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9603.misc.rst | 1 + aiohttp/_http_writer.pyx | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9603.misc.rst diff --git a/CHANGES/9603.misc.rst b/CHANGES/9603.misc.rst new file mode 100644 index 00000000000..8a27657cdb9 --- /dev/null +++ b/CHANGES/9603.misc.rst @@ -0,0 +1 @@ +Improved performance of serializing HTTP headers -- by :user:`bdraco`. diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index eff85219586..d19c20d76cc 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -127,10 +127,6 @@ def _serialize_headers(str status_line, headers): _init_writer(&writer) - for key, val in headers.items(): - _safe_header(to_str(key)) - _safe_header(to_str(val)) - try: if _write_str(&writer, status_line) < 0: raise @@ -140,6 +136,9 @@ def _serialize_headers(str status_line, headers): raise for key, val in headers.items(): + _safe_header(to_str(key)) + _safe_header(to_str(val)) + if _write_str(&writer, to_str(key)) < 0: raise if _write_byte(&writer, b':') < 0: From 0692581592ed08f13e090df421d1109927f7b084 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 16:18:31 +0000 Subject: [PATCH 0806/1511] [PR #9603/4108ca51 backport][3.11] Improve performance of _serialize_headers (#9627) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9603.misc.rst | 1 + aiohttp/_http_writer.pyx | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9603.misc.rst diff --git a/CHANGES/9603.misc.rst b/CHANGES/9603.misc.rst new file mode 100644 index 00000000000..8a27657cdb9 --- /dev/null +++ b/CHANGES/9603.misc.rst @@ -0,0 +1 @@ +Improved performance of serializing HTTP headers -- by :user:`bdraco`. diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index eff85219586..d19c20d76cc 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -127,10 +127,6 @@ def _serialize_headers(str status_line, headers): _init_writer(&writer) - for key, val in headers.items(): - _safe_header(to_str(key)) - _safe_header(to_str(val)) - try: if _write_str(&writer, status_line) < 0: raise @@ -140,6 +136,9 @@ def _serialize_headers(str status_line, headers): raise for key, val in headers.items(): + _safe_header(to_str(key)) + _safe_header(to_str(val)) + if _write_str(&writer, to_str(key)) < 0: raise if _write_byte(&writer, b':') < 0: From 0550d8f845f0ddd89fecf8e0722227c2c3663f87 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 18:05:38 +0000 Subject: [PATCH 0807/1511] [PR #9629/c95c0251 backport][3.10] Simplify branching in the connector (#9630) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 54 +++++++++++++++++++------------------------- 1 file changed, 23 insertions(+), 31 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index efc926d7f48..0cdb0ecb9f0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -364,21 +364,15 @@ def _cleanup(self) -> None: connections = {} deadline = now - timeout for key, conns in self._conns.items(): - alive = [] + alive: List[Tuple[ResponseHandler, float]] = [] for proto, use_time in conns: - if proto.is_connected(): - if use_time - deadline < 0: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - alive.append((proto, use_time)) - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) + if proto.is_connected() and use_time - deadline >= 0: + alive.append((proto, use_time)) + continue + transport = proto.transport + proto.close() + if not self._cleanup_closed_disabled and key.is_ssl: + self._cleanup_closed_transports.append(transport) if alive: connections[key] = alive @@ -584,6 +578,7 @@ async def connect( return Connection(self, key, proto, self._loop) def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: + """Get next reusable connection for the key or None.""" try: conns = self._conns[key] except KeyError: @@ -592,23 +587,20 @@ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: t1 = self._loop.time() while conns: proto, t0 = conns.pop() - if proto.is_connected(): - if t1 - t0 > self._keepalive_timeout: - transport = proto.transport - proto.close() - # only for SSL transports - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - if not conns: - # The very last connection was reclaimed: drop the key - del self._conns[key] - return proto - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) + # We will we reuse the connection if its connected and + # the keepalive timeout has not been exceeded + if proto.is_connected() and t1 - t0 <= self._keepalive_timeout: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + + # Connection cannot be reused, close it + transport = proto.transport + proto.close() + # only for SSL transports + if not self._cleanup_closed_disabled and key.is_ssl: + self._cleanup_closed_transports.append(transport) # No more connections: drop the key del self._conns[key] From 594ceaf5a431a5e1df1b1dd9bbe7a0186300045e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 18:13:51 +0000 Subject: [PATCH 0808/1511] [PR #9629/c95c0251 backport][3.11] Simplify branching in the connector (#9631) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 54 +++++++++++++++++++------------------------- 1 file changed, 23 insertions(+), 31 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 3f0614109ca..9ea1a63fd35 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -363,21 +363,15 @@ def _cleanup(self) -> None: connections = {} deadline = now - timeout for key, conns in self._conns.items(): - alive = [] + alive: List[Tuple[ResponseHandler, float]] = [] for proto, use_time in conns: - if proto.is_connected(): - if use_time - deadline < 0: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - alive.append((proto, use_time)) - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) + if proto.is_connected() and use_time - deadline >= 0: + alive.append((proto, use_time)) + continue + transport = proto.transport + proto.close() + if not self._cleanup_closed_disabled and key.is_ssl: + self._cleanup_closed_transports.append(transport) if alive: connections[key] = alive @@ -604,6 +598,7 @@ async def _wait_for_available_connection( await trace.send_connection_queued_end() def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: + """Get next reusable connection for the key or None.""" try: conns = self._conns[key] except KeyError: @@ -612,23 +607,20 @@ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: t1 = self._loop.time() while conns: proto, t0 = conns.pop() - if proto.is_connected(): - if t1 - t0 > self._keepalive_timeout: - transport = proto.transport - proto.close() - # only for SSL transports - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - if not conns: - # The very last connection was reclaimed: drop the key - del self._conns[key] - return proto - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) + # We will we reuse the connection if its connected and + # the keepalive timeout has not been exceeded + if proto.is_connected() and t1 - t0 <= self._keepalive_timeout: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + + # Connection cannot be reused, close it + transport = proto.transport + proto.close() + # only for SSL transports + if not self._cleanup_closed_disabled and key.is_ssl: + self._cleanup_closed_transports.append(transport) # No more connections: drop the key del self._conns[key] From e503f7afc8e130cbb69837fb4d80637a42619289 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 2 Nov 2024 13:34:25 -0500 Subject: [PATCH 0809/1511] Release 3.11.0b1 (#9632) --- CHANGES.rst | 37 ++++++++++++++++++++++++++++++++++++- aiohttp/__init__.py | 2 +- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index dc8b711eb42..ce7308508cf 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0b0 (2024-10-28) +3.11.0b1 (2024-11-02) ===================== Bug fixes @@ -55,6 +55,14 @@ Features +- Added ``server_hostname`` parameter to ``ws_connect``. + + + *Related issues and pull requests on GitHub:* + :issue:`7941`. + + + - Exported :py:class:`~aiohttp.ClientWSTimeout` to top-level namespace -- by :user:`Dreamsorcerer`. @@ -103,6 +111,15 @@ Features +- Updated :py:class:`~aiohttp.ClientSession` to support paths in ``base_url`` parameter. + ``base_url`` paths must end with a ``/`` -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`9530`. + + + - Improved performance of reading WebSocket messages with a Cython implementation -- by :user:`bdraco`. @@ -167,6 +184,16 @@ Removals and backward incompatible breaking changes +- Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. + + If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. + + + *Related issues and pull requests on GitHub:* + :issue:`9600`. + + + Packaging updates and notes for downstreams ------------------------------------------- @@ -274,6 +301,14 @@ Miscellaneous internal changes +- Improved performance of serializing HTTP headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9603`. + + + ---- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 83eabcc3acc..e4408409113 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0b0" +__version__ = "3.11.0b1" from typing import TYPE_CHECKING, Tuple From 4cc55e317d03977772d47d1ffc17e419223277e3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 03:57:25 +0000 Subject: [PATCH 0810/1511] [PR #9636/42a69afc backport][3.11] Avoid multiple slice calls in the WebSocket reader (#9637) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9636.feature.rst | 1 + aiohttp/_websocket/reader_c.pxd | 1 - aiohttp/_websocket/reader_py.py | 7 +++---- 3 files changed, 4 insertions(+), 5 deletions(-) create mode 120000 CHANGES/9636.feature.rst diff --git a/CHANGES/9636.feature.rst b/CHANGES/9636.feature.rst new file mode 120000 index 00000000000..a93584bccd8 --- /dev/null +++ b/CHANGES/9636.feature.rst @@ -0,0 +1 @@ +9543.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 74eb07073ec..af26d350db3 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -74,7 +74,6 @@ cdef class WebSocketReader: chunk_size="unsigned int", chunk_len="unsigned int", buf_length="unsigned int", - data=bytes, payload=bytearray, first_byte="unsigned char", second_byte="unsigned char", diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index b4b57195a96..2c77cde4c72 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -243,10 +243,9 @@ def parse_frame( if self._state == READ_HEADER: if buf_length - start_pos < 2: break - data = buf[start_pos : start_pos + 2] + first_byte = buf[start_pos] + second_byte = buf[start_pos + 1] start_pos += 2 - first_byte = data[0] - second_byte = data[1] fin = (first_byte >> 7) & 1 rsv1 = (first_byte >> 6) & 1 @@ -360,6 +359,6 @@ def parse_frame( self._frame_payload = bytearray() self._state = READ_HEADER - self._tail = buf[start_pos:] + self._tail = buf[start_pos:] if start_pos < buf_length else b"" return frames From ef53ab19a9b683fb49f543dbca8fdfe54acfc083 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 04:09:33 +0000 Subject: [PATCH 0811/1511] [PR #9635/23d2966b backport][3.11] Add benchmark for sending large WebSocket messages (#9639) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_websocket.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 11fe132879f..05379b6e38f 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -1,10 +1,12 @@ """codspeed benchmarks for http websocket.""" import asyncio +from typing import Union from pytest_codspeed import BenchmarkFixture from aiohttp import DataQueue +from aiohttp._websocket.helpers import MSG_SIZE from aiohttp.base_protocol import BaseProtocol from aiohttp.http_websocket import ( WebSocketReader, @@ -41,7 +43,7 @@ def is_closing(self) -> bool: """Swallow is_closing.""" return False - def write(self, data: bytes) -> None: + def write(self, data: Union[bytes, bytearray, memoryview]) -> None: """Swallow writes.""" @@ -67,6 +69,22 @@ def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) +def test_send_one_hundred_large_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 WebSocket text messages.""" + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport()) + raw_message = b"x" * MSG_SIZE * 4 + + async def _send_one_hundred_websocket_text_messages() -> None: + for _ in range(100): + await writer.send_frame(raw_message, WSMsgType.TEXT) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_text_messages()) + + def test_send_one_hundred_websocket_text_messages_with_mask( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: From 771542a66b8a04963e1b6dfaca2aeee6b67a89cd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 05:31:55 +0000 Subject: [PATCH 0812/1511] [PR #9635/23d2966b backport][3.10] Add benchmark for sending large WebSocket messages (#9638) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_websocket.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index c48239a8c7a..7d773d18bb7 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -1,12 +1,14 @@ """codspeed benchmarks for http websocket.""" import asyncio +from typing import Union from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] from aiohttp import DataQueue from aiohttp.base_protocol import BaseProtocol from aiohttp.http_websocket import ( + MSG_SIZE, WebSocketReader, WebSocketWriter, WSMessage, @@ -41,7 +43,7 @@ def is_closing(self) -> bool: """Swallow is_closing.""" return False - def write(self, data: bytes) -> None: + def write(self, data: Union[bytes, bytearray, memoryview]) -> None: """Swallow writes.""" @@ -67,6 +69,22 @@ def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) +def test_send_one_hundred_large_websocket_text_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 WebSocket text messages.""" + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport()) + raw_message = b"x" * MSG_SIZE * 4 + + async def _send_one_hundred_websocket_text_messages() -> None: + for _ in range(100): + await writer._send_frame(raw_message, WSMsgType.TEXT) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_text_messages()) + + def test_send_one_hundred_websocket_text_messages_with_mask( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: From 6fff131cadedb8f72004578fdc466a856e94cdc2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 07:05:18 +0000 Subject: [PATCH 0813/1511] [PR #9628/70a2c346 backport][3.10] Add benchmark for client WebSocket round trip (#9641) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 39 ++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 tests/test_benchmarks_client_ws.py diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py new file mode 100644 index 00000000000..29d6047b3fa --- /dev/null +++ b/tests/test_benchmarks_client_ws.py @@ -0,0 +1,39 @@ +"""codspeed benchmarks for websocket client.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient + + +def test_one_thousand_round_trip_websocket_text_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark round trip of 1000 WebSocket text messages.""" + message_count = 1000 + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_str("answer") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) From ad630bbbe8d0604a3dc14f3b7488ac29ae2e0d17 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 07:06:34 +0000 Subject: [PATCH 0814/1511] [PR #9628/70a2c346 backport][3.11] Add benchmark for client WebSocket round trip (#9642) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 39 ++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 tests/test_benchmarks_client_ws.py diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py new file mode 100644 index 00000000000..29d6047b3fa --- /dev/null +++ b/tests/test_benchmarks_client_ws.py @@ -0,0 +1,39 @@ +"""codspeed benchmarks for websocket client.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient + + +def test_one_thousand_round_trip_websocket_text_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark round trip of 1000 WebSocket text messages.""" + message_count = 1000 + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_str("answer") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) From e63d7e9f1780cbf448429dbdb524d241b68ff849 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 08:02:03 +0000 Subject: [PATCH 0815/1511] [PR #9645/541b1496 backport][3.10] Add benchmark for roundtrip of 100 large WebSocket messages (#9647) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 33 ++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index 29d6047b3fa..88f14173aca 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -5,6 +5,7 @@ from pytest_codspeed import BenchmarkFixture from aiohttp import web +from aiohttp.http_websocket import MSG_SIZE from aiohttp.pytest_plugin import AiohttpClient @@ -37,3 +38,35 @@ async def run_websocket_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) + + +def test_one_thousand_large_round_trip_websocket_text_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark round trip of 100 large WebSocket text messages.""" + message_count = 100 + raw_message = "x" * MSG_SIZE * 4 + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_str(raw_message) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) From 4e398d71261dbee830fbc4615d0167d672104f36 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 08:08:32 +0000 Subject: [PATCH 0816/1511] [PR #9645/541b1496 backport][3.11] Add benchmark for roundtrip of 100 large WebSocket messages (#9648) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 33 ++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index 29d6047b3fa..f93e84a9cba 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -5,6 +5,7 @@ from pytest_codspeed import BenchmarkFixture from aiohttp import web +from aiohttp._websocket.helpers import MSG_SIZE from aiohttp.pytest_plugin import AiohttpClient @@ -37,3 +38,35 @@ async def run_websocket_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) + + +def test_one_thousand_large_round_trip_websocket_text_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark round trip of 100 large WebSocket text messages.""" + message_count = 100 + raw_message = "x" * MSG_SIZE * 4 + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_str(raw_message) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) From bd4e31ea9fc31e5bfae3d16828bbe7f59c63fb9d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 10:08:22 -0600 Subject: [PATCH 0817/1511] [PR #9649/f2f5b056 backport][3.11] Avoid memory copy in the WebSocket reader for small payloads (#9650) **This is a backport of PR #9649 as merged into master (f2f5b056e895e11eddd054132a20f83d2d6a7a07).** Only convert to `bytearray` when we know we are going to append messages or unmask. If the message comes in without being fragmented, we can avoid many conversions from `bytes` to `bytearray` which is the common case for small messages Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9649.feature.rst | 1 + aiohttp/_websocket/reader_c.pxd | 5 ++-- aiohttp/_websocket/reader_py.py | 50 +++++++++++++++++++++++---------- tests/test_websocket_parser.py | 14 +++++++++ 4 files changed, 53 insertions(+), 17 deletions(-) create mode 120000 CHANGES/9649.feature.rst diff --git a/CHANGES/9649.feature.rst b/CHANGES/9649.feature.rst new file mode 120000 index 00000000000..a93584bccd8 --- /dev/null +++ b/CHANGES/9649.feature.rst @@ -0,0 +1 @@ +9543.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index af26d350db3..2a60f327061 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -45,7 +45,8 @@ cdef class WebSocketReader: cdef object _opcode cdef object _frame_fin cdef object _frame_opcode - cdef bytearray _frame_payload + cdef object _frame_payload + cdef unsigned int _frame_payload_len cdef bytes _tail cdef bint _has_mask @@ -74,9 +75,9 @@ cdef class WebSocketReader: chunk_size="unsigned int", chunk_len="unsigned int", buf_length="unsigned int", - payload=bytearray, first_byte="unsigned char", second_byte="unsigned char", + end_pos="unsigned int", has_mask=bint, fin=bint, ) diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 2c77cde4c72..0910a340629 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -55,7 +55,8 @@ def __init__( self._opcode: Optional[int] = None self._frame_fin = False self._frame_opcode: Optional[int] = None - self._frame_payload = bytearray() + self._frame_payload: Union[bytes, bytearray] = b"" + self._frame_payload_len = 0 self._tail: bytes = b"" self._has_mask = False @@ -133,6 +134,7 @@ def _feed_data(self, data: bytes) -> None: "to be zero, got {!r}".format(opcode), ) + assembled_payload: Union[bytes, bytearray] if has_partial: assembled_payload = self._partial + payload self._partial.clear() @@ -165,6 +167,8 @@ def _feed_data(self, data: bytes) -> None: self._max_msg_size + left, self._max_msg_size ), ) + elif type(assembled_payload) is bytes: + payload_merged = assembled_payload else: payload_merged = bytes(assembled_payload) @@ -229,9 +233,11 @@ def _feed_data(self, data: bytes) -> None: def parse_frame( self, buf: bytes - ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + ) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]: """Return the next frame from the socket.""" - frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = [] + frames: List[ + Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]] + ] = [] if self._tail: buf, self._tail = self._tail + buf, b"" @@ -333,30 +339,44 @@ def parse_frame( self._state = READ_PAYLOAD if self._state == READ_PAYLOAD: - length = self._payload_length - payload = self._frame_payload - chunk_len = buf_length - start_pos - if length >= chunk_len: - self._payload_length = length - chunk_len - payload += buf[start_pos:] - start_pos = buf_length + if self._payload_length >= chunk_len: + end_pos = buf_length + self._payload_length -= chunk_len else: + end_pos = start_pos + self._payload_length self._payload_length = 0 - payload += buf[start_pos : start_pos + length] - start_pos = start_pos + length + + if self._frame_payload_len: + if type(self._frame_payload) is not bytearray: + self._frame_payload = bytearray(self._frame_payload) + self._frame_payload += buf[start_pos:end_pos] + else: + # Fast path for the first frame + self._frame_payload = buf[start_pos:end_pos] + + self._frame_payload_len += end_pos - start_pos + start_pos = end_pos if self._payload_length != 0: break if self._has_mask: assert self._frame_mask is not None - websocket_mask(self._frame_mask, payload) + if type(self._frame_payload) is not bytearray: + self._frame_payload = bytearray(self._frame_payload) + websocket_mask(self._frame_mask, self._frame_payload) frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) + ( + self._frame_fin, + self._frame_opcode, + self._frame_payload, + self._compressed, + ) ) - self._frame_payload = bytearray() + self._frame_payload = b"" + self._frame_payload_len = 0 self._state = READ_HEADER self._tail = buf[start_pos:] if start_pos < buf_length else b"" diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index abddeadf5a1..d034245af7c 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -139,6 +139,20 @@ def test_parse_frame_length2_multi_byte(parser: WebSocketReader) -> None: assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) +def test_parse_frame_length2_multi_byte_multi_packet(parser: WebSocketReader) -> None: + """Ensure a multi-byte length with multiple packets is parsed correctly.""" + expected_payload = b"1" * 32768 + assert parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) == [] + assert parser.parse_frame(struct.pack("!H", 32768)) == [] + assert parser.parse_frame(b"1" * 8192) == [] + assert parser.parse_frame(b"1" * 8192) == [] + assert parser.parse_frame(b"1" * 8192) == [] + res = parser.parse_frame(b"1" * 8192) + fin, opcode, payload, compress = res[0] + assert len(payload) == 32768 + assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) + + def test_parse_frame_length4(parser: WebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 127)) parser.parse_frame(struct.pack("!Q", 4)) From d722b2bfeb71b9b357003d9e854afa9e4fd5d1e7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 17:44:38 +0000 Subject: [PATCH 0818/1511] [PR #9651/8cf9caab backport][3.10] Add a simple client benchmark (#9652) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/test_benchmarks_client.py diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py new file mode 100644 index 00000000000..fdddef462ca --- /dev/null +++ b/tests/test_benchmarks_client.py @@ -0,0 +1,33 @@ +"""codspeed benchmarks for HTTP client.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient + + +def test_one_hundred_simple_get_requests( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple GET requests.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From 2fe3390a56b4966839eb4a3a367b8a79cdce5052 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 17:44:48 +0000 Subject: [PATCH 0819/1511] [PR #9651/8cf9caab backport][3.11] Add a simple client benchmark (#9653) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/test_benchmarks_client.py diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py new file mode 100644 index 00000000000..fdddef462ca --- /dev/null +++ b/tests/test_benchmarks_client.py @@ -0,0 +1,33 @@ +"""codspeed benchmarks for HTTP client.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient + + +def test_one_hundred_simple_get_requests( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple GET requests.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From b3a1d034c162c0baa3cd2fcc3bbd748da87d4bce Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 18:29:41 +0000 Subject: [PATCH 0820/1511] [PR #9655/fe8a2ecd backport][3.11] Bump yarl to 1.17.1 (#9656) Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 87f8eb686c9..ec2e6399590 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.17.0 +yarl==1.17.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e091346a5f3..ac846b0b100 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -285,7 +285,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.17.0 +yarl==1.17.1 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 47bd3a020ff..2c7da214ab2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -277,7 +277,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.17.0 +yarl==1.17.1 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 246c5934f50..553347e35dd 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.17.0 +yarl==1.17.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index ccb20bda665..1d28725481e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -147,5 +147,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.17.0 +yarl==1.17.1 # via -r requirements/runtime-deps.in From 8ffa384c3d3eeb184fcc416c28e824af5d0e17f6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 3 Nov 2024 13:00:33 -0600 Subject: [PATCH 0821/1511] Release 3.11.0b2 (#9657) --- CHANGES.rst | 4 ++-- aiohttp/__init__.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index ce7308508cf..01e67e6c317 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0b1 (2024-11-02) +3.11.0b2 (2024-11-03) ===================== Bug fixes @@ -124,7 +124,7 @@ Features *Related issues and pull requests on GitHub:* - :issue:`9543`, :issue:`9554`, :issue:`9556`, :issue:`9558`. + :issue:`9543`, :issue:`9554`, :issue:`9556`, :issue:`9558`, :issue:`9636`, :issue:`9649`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e4408409113..874e3d83b26 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0b1" +__version__ = "3.11.0b2" from typing import TYPE_CHECKING, Tuple From 8f6d7b53681cb0eccf7bfa50d08c794adc694245 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 00:38:36 +0000 Subject: [PATCH 0822/1511] [PR #9658/6c932dd7 backport][3.11] Add benchmark for sending binary WebSocket messages (#9662) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 31 ++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index f93e84a9cba..6d4cf309cad 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -40,6 +40,37 @@ def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) +def test_one_thousand_round_trip_websocket_binary_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark round trip of 1000 WebSocket binary messages.""" + message_count = 1000 + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_bytes(b"answer") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) + + def test_one_thousand_large_round_trip_websocket_text_messages( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From cbb6eb340a4b4954c48b1daf7d01ab60f89833ef Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 00:45:12 +0000 Subject: [PATCH 0823/1511] [PR #9658/6c932dd7 backport][3.10] Add benchmark for sending binary WebSocket messages (#9661) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 31 ++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index 88f14173aca..9c8ab2dd78a 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -40,6 +40,37 @@ def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) +def test_one_thousand_round_trip_websocket_binary_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark round trip of 1000 WebSocket binary messages.""" + message_count = 1000 + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_bytes(b"answer") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) + + def test_one_thousand_large_round_trip_websocket_text_messages( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From aa42f602b04451b16a6deed2b6824473a0bae40f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 3 Nov 2024 21:13:15 -0600 Subject: [PATCH 0824/1511] [PR #9659/1bb146a backport][3.11] Refactor `FlowControlDataQueue` to improve performances (#9665) --- CHANGES/9659.misc.rst | 1 + aiohttp/streams.py | 65 ++++++++++++++++++++++--------------------- 2 files changed, 34 insertions(+), 32 deletions(-) create mode 100644 CHANGES/9659.misc.rst diff --git a/CHANGES/9659.misc.rst b/CHANGES/9659.misc.rst new file mode 100644 index 00000000000..b121e2f7485 --- /dev/null +++ b/CHANGES/9659.misc.rst @@ -0,0 +1 @@ +Improved performance of the internal ``DataQueue`` -- by :user:`bdraco`. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 6b805973754..bec547aa10f 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -622,7 +622,6 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._eof = False self._waiter: Optional[asyncio.Future[None]] = None self._exception: Optional[BaseException] = None - self._size = 0 self._buffer: Deque[Tuple[_T, int]] = collections.deque() def __len__(self) -> int: @@ -644,48 +643,40 @@ def set_exception( ) -> None: self._eof = True self._exception = exc - - waiter = self._waiter - if waiter is not None: + if (waiter := self._waiter) is not None: self._waiter = None set_exception(waiter, exc, exc_cause) def feed_data(self, data: _T, size: int = 0) -> None: - self._size += size self._buffer.append((data, size)) - - waiter = self._waiter - if waiter is not None: + if (waiter := self._waiter) is not None: self._waiter = None set_result(waiter, None) def feed_eof(self) -> None: self._eof = True - - waiter = self._waiter - if waiter is not None: + if (waiter := self._waiter) is not None: self._waiter = None set_result(waiter, None) + async def _wait_for_data(self) -> None: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + async def read(self) -> _T: if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - + await self._wait_for_data() if self._buffer: - data, size = self._buffer.popleft() - self._size -= size + data, _ = self._buffer.popleft() return data - else: - if self._exception is not None: - raise self._exception - else: - raise EofStream + if self._exception is not None: + raise self._exception + raise EofStream def __aiter__(self) -> AsyncStreamIterator[_T]: return AsyncStreamIterator(self.read) @@ -701,19 +692,29 @@ def __init__( self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop ) -> None: super().__init__(loop=loop) - + self._size = 0 self._protocol = protocol self._limit = limit * 2 + self._buffer: Deque[Tuple[_T, int]] = collections.deque() def feed_data(self, data: _T, size: int = 0) -> None: - super().feed_data(data, size) - + self._size += size + self._buffer.append((data, size)) + if (waiter := self._waiter) is not None: + self._waiter = None + set_result(waiter, None) if self._size > self._limit and not self._protocol._reading_paused: self._protocol.pause_reading() async def read(self) -> _T: - try: - return await super().read() - finally: + if not self._buffer and not self._eof: + await self._wait_for_data() + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size if self._size < self._limit and self._protocol._reading_paused: self._protocol.resume_reading() + return data + if self._exception is not None: + raise self._exception + raise EofStream From 262fb12b1f2e584e3f0f46da4b47786b4b0ecdc6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 04:21:39 +0000 Subject: [PATCH 0825/1511] [PR #9667/5b654d57 backport][3.11] Simplify ``close`` logic in client ``WebSocketResponse`` (#9668) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_ws.py | 50 ++++++++++++++++++++++---------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 5eb9e7415ad..70ba21f7627 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -267,10 +267,30 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._close_wait - if not self._closed: - self._set_closed() + if self._closed: + return False + + self._set_closed() + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if self._close_code: + self._response.close() + return True + + while True: try: - await self._writer.close(code, message) + async with async_timeout.timeout(self._timeout.ws_close): + msg = await self._reader.read() except asyncio.CancelledError: self._close_code = WSCloseCode.ABNORMAL_CLOSURE self._response.close() @@ -281,31 +301,11 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._response.close() return True - if self._close_code: + if msg.type is WSMsgType.CLOSE: + self._close_code = msg.data self._response.close() return True - while True: - try: - async with async_timeout.timeout(self._timeout.ws_close): - msg = await self._reader.read() - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._response.close() - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - return True - - if msg.type is WSMsgType.CLOSE: - self._close_code = msg.data - self._response.close() - return True - else: - return False - async def receive(self, timeout: Optional[float] = None) -> WSMessage: receive_timeout = timeout or self._timeout.ws_receive From a103e618e59e8f2502e3925c63da8ff7c45017bd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 3 Nov 2024 22:28:08 -0600 Subject: [PATCH 0826/1511] [PR #9667/5b654d57 backport][3.10] Simplify ``close`` logic in client ``WebSocketResponse`` (#9669) --- aiohttp/client_ws.py | 50 ++++++++++++++++++++++---------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index c6b5da5103b..96e2c59715c 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -249,10 +249,30 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._close_wait - if not self._closed: - self._set_closed() + if self._closed: + return False + + self._set_closed() + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if self._close_code: + self._response.close() + return True + + while True: try: - await self._writer.close(code, message) + async with async_timeout.timeout(self._timeout): + msg = await self._reader.read() except asyncio.CancelledError: self._close_code = WSCloseCode.ABNORMAL_CLOSURE self._response.close() @@ -263,31 +283,11 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._response.close() return True - if self._close_code: + if msg.type is WSMsgType.CLOSE: + self._close_code = msg.data self._response.close() return True - while True: - try: - async with async_timeout.timeout(self._timeout): - msg = await self._reader.read() - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._response.close() - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - return True - - if msg.type is WSMsgType.CLOSE: - self._close_code = msg.data - self._response.close() - return True - else: - return False - async def receive(self, timeout: Optional[float] = None) -> WSMessage: receive_timeout = timeout or self._receive_timeout From e141032a4c4609ff8b728e6c4b03d7138675da5f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 09:24:24 -0600 Subject: [PATCH 0827/1511] [PR #9600/e6187f6 backport][3.10] Avoid starting connection timeout when a connection is already available (#9673) --- CHANGES/9600.breaking.rst | 3 + aiohttp/client.py | 11 +--- aiohttp/connector.py | 109 ++++++++++++++++++++++--------------- docs/spelling_wordlist.txt | 1 + 4 files changed, 72 insertions(+), 52 deletions(-) create mode 100644 CHANGES/9600.breaking.rst diff --git a/CHANGES/9600.breaking.rst b/CHANGES/9600.breaking.rst new file mode 100644 index 00000000000..5997344e4cd --- /dev/null +++ b/CHANGES/9600.breaking.rst @@ -0,0 +1,3 @@ +Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. + +If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. diff --git a/aiohttp/client.py b/aiohttp/client.py index a6d279718ae..28228dd8030 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -89,7 +89,6 @@ DEBUG, BasicAuth, TimeoutHandle, - ceil_timeout, get_env_proxy_for_url, method_must_be_empty_body, sentinel, @@ -661,13 +660,9 @@ async def _request( # connection timeout try: - async with ceil_timeout( - real_timeout.connect, - ceil_threshold=real_timeout.ceil_threshold, - ): - conn = await self._connector.connect( - req, traces=traces, timeout=real_timeout - ) + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) except asyncio.TimeoutError as exc: raise ConnectionTimeoutError( f"Connection timeout to host {url}" diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 0cdb0ecb9f0..f374478b3ea 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -500,41 +500,20 @@ async def connect( """Get from pool or create new connection.""" key = req.connection_key available = self._available_connections(key) + wait_for_conn = available <= 0 or key in self._waiters + if not wait_for_conn and (proto := self._get(key)) is not None: + # If we do not have to wait and we can get a connection from the pool + # we can avoid the timeout ceil logic and directly return the connection + return await self._reused_connection(key, proto, traces) + + async with ceil_timeout(timeout.connect, timeout.ceil_threshold): + # Wait if there are no available connections or if there are/were + # waiters (i.e. don't steal connection from a waiter about to wake up) + if wait_for_conn: + await self._wait_for_available_connection(key, traces) + if (proto := self._get(key)) is not None: + return await self._reused_connection(key, proto, traces) - # Wait if there are no available connections or if there are/were - # waiters (i.e. don't steal connection from a waiter about to wake up) - if available <= 0 or key in self._waiters: - fut: asyncio.Future[None] = self._loop.create_future() - - # This connection will now count towards the limit. - self._waiters[key].append(fut) - - if traces: - for trace in traces: - await trace.send_connection_queued_start() - - try: - await fut - except BaseException as e: - if key in self._waiters: - # remove a waiter even if it was cancelled, normally it's - # removed when it's notified - try: - self._waiters[key].remove(fut) - except ValueError: # fut may no longer be in list - pass - - raise e - finally: - if key in self._waiters and not self._waiters[key]: - del self._waiters[key] - - if traces: - for trace in traces: - await trace.send_connection_queued_end() - - proto = self._get(key) - if proto is None: placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) @@ -562,21 +541,63 @@ async def connect( if traces: for trace in traces: await trace.send_connection_create_end() - else: - if traces: - # Acquire the connection to prevent race conditions with limits - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - for trace in traces: - await trace.send_connection_reuseconn() - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) + return self._acquired_connection(proto, key) + + async def _reused_connection( + self, key: "ConnectionKey", proto: ResponseHandler, traces: List["Trace"] + ) -> Connection: + if traces: + # Acquire the connection to prevent race conditions with limits + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + for trace in traces: + await trace.send_connection_reuseconn() + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + return self._acquired_connection(proto, key) + + def _acquired_connection( + self, proto: ResponseHandler, key: "ConnectionKey" + ) -> Connection: + """Mark proto as acquired and wrap it in a Connection object.""" self._acquired.add(proto) self._acquired_per_host[key].add(proto) return Connection(self, key, proto, self._loop) + async def _wait_for_available_connection( + self, key: "ConnectionKey", traces: List["Trace"] + ) -> None: + """Wait until there is an available connection.""" + fut: asyncio.Future[None] = self._loop.create_future() + + # This connection will now count towards the limit. + self._waiters[key].append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + if key in self._waiters: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + with suppress(ValueError): + # fut may no longer be in list + self._waiters[key].remove(fut) + + raise e + finally: + if key in self._waiters and not self._waiters[key]: + del self._waiters[key] + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: """Get next reusable connection for the key or None.""" try: diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 514477e8fcb..c135ebd2084 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -294,6 +294,7 @@ ssl SSLContext startup subapplication +subclassed subclasses subdirectory submodules From fb7295489c537b509e886f6480bf94048b70c9c3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 11:09:11 -0600 Subject: [PATCH 0828/1511] [PR #9511/75ae623 backport][3.11] Fix exceptions in websocket receive_* methods (#9676) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: ara-25 <50909043+ara-25@users.noreply.github.com> --- CHANGES/6800.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/__init__.py | 2 + aiohttp/client.py | 2 + aiohttp/client_exceptions.py | 5 +++ aiohttp/client_ws.py | 10 +++-- aiohttp/web_ws.py | 11 ++--- docs/client_reference.rst | 10 ++++- docs/web_reference.rst | 4 +- tests/test_client_ws_functional.py | 59 +++++++++++++++++++++++--- tests/test_web_websocket.py | 35 ++++++++++++++- tests/test_web_websocket_functional.py | 47 +++++++++++++++++++- 12 files changed, 166 insertions(+), 21 deletions(-) create mode 100644 CHANGES/6800.bugfix.rst diff --git a/CHANGES/6800.bugfix.rst b/CHANGES/6800.bugfix.rst new file mode 100644 index 00000000000..880d70dc1fb --- /dev/null +++ b/CHANGES/6800.bugfix.rst @@ -0,0 +1 @@ +Modified websocket :meth:`aiohttp.ClientWebSocketResponse.receive_str`, :py:meth:`aiohttp.ClientWebSocketResponse.receive_bytes`, :py:meth:`aiohttp.web.WebSocketResponse.receive_str` & :py:meth:`aiohttp.web.WebSocketResponse.receive_bytes` methods to raise new :py:exc:`aiohttp.WSMessageTypeError` exception, instead of generic :py:exc:`TypeError`, when websocket messages of incorrect types are received -- by :user:`ara-25`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index ef0d7d81429..60ff22d14d2 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -1,6 +1,7 @@ - Contributors - ---------------- A. Jesse Jiryu Davis +Abdur Rehman Ali Adam Bannister Adam Cooper Adam Horacek diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 874e3d83b26..a26afd4f8c6 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -43,6 +43,7 @@ TCPConnector, TooManyRedirects, UnixConnector, + WSMessageTypeError, WSServerHandshakeError, request, ) @@ -238,6 +239,7 @@ # workers (imported lazily with __getattr__) "GunicornUVLoopWebWorker", "GunicornWebWorker", + "WSMessageTypeError", ) diff --git a/aiohttp/client.py b/aiohttp/client.py index fc92fee7264..f93558db29d 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -66,6 +66,7 @@ ServerTimeoutError, SocketTimeoutError, TooManyRedirects, + WSMessageTypeError, WSServerHandshakeError, ) from .client_reqrep import ( @@ -151,6 +152,7 @@ "ClientTimeout", "ClientWSTimeout", "request", + "WSMessageTypeError", ) diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 2cf6cf88328..667da8d5084 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -50,6 +50,7 @@ "NonHttpUrlClientError", "InvalidUrlRedirectClientError", "NonHttpUrlRedirectClientError", + "WSMessageTypeError", ) @@ -410,3 +411,7 @@ def __str__(self) -> str: "[{0.certificate_error.__class__.__name__}: " "{0.certificate_error.args}]".format(self) ) + + +class WSMessageTypeError(TypeError): + """WebSocket message type is not valid.""" diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 70ba21f7627..cd79dd01f91 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -7,7 +7,7 @@ import attr -from .client_exceptions import ClientError, ServerTimeoutError +from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError from .client_reqrep import ClientResponse from .helpers import calculate_timeout_when, set_result from .http import ( @@ -377,13 +377,17 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) if msg.type is not WSMsgType.TEXT: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") + raise WSMessageTypeError( + f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT" + ) return cast(str, msg.data) async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) if msg.type is not WSMsgType.BINARY: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + raise WSMessageTypeError( + f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY" + ) return cast(bytes, msg.data) async def receive_json( diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index fa6d30276b5..66cd269420d 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -12,6 +12,7 @@ from . import hdrs from ._websocket.writer import DEFAULT_LIMIT from .abc import AbstractStreamWriter +from .client_exceptions import WSMessageTypeError from .helpers import calculate_timeout_when, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, @@ -578,17 +579,17 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: async def receive_str(self, *, timeout: Optional[float] = None) -> str: msg = await self.receive(timeout) if msg.type is not WSMsgType.TEXT: - raise TypeError( - "Received message {}:{!r} is not WSMsgType.TEXT".format( - msg.type, msg.data - ) + raise WSMessageTypeError( + f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT" ) return cast(str, msg.data) async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: msg = await self.receive(timeout) if msg.type is not WSMsgType.BINARY: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + raise WSMessageTypeError( + f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY" + ) return cast(bytes, msg.data) async def receive_json( diff --git a/docs/client_reference.rst b/docs/client_reference.rst index bb640e7ab0f..a7484a57c5a 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1689,7 +1689,7 @@ manually. :return str: peer's message content. - :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. + :raise aiohttp.WSMessageTypeError: if message is not :const:`~aiohttp.WSMsgType.TEXT`. .. method:: receive_bytes() :async: @@ -1700,7 +1700,7 @@ manually. :return bytes: peer's message content. - :raise TypeError: if message is :const:`~aiohttp.WSMsgType.TEXT`. + :raise aiohttp.WSMessageTypeError: if message is not :const:`~aiohttp.WSMsgType.BINARY`. .. method:: receive_json(*, loads=json.loads) :async: @@ -2257,6 +2257,12 @@ Response errors Derived from :exc:`ClientResponseError` +.. exception:: WSMessageTypeError + + Received WebSocket message of unexpected type + + Derived from :exc:`TypeError` + Connection errors ^^^^^^^^^^^^^^^^^ diff --git a/docs/web_reference.rst b/docs/web_reference.rst index cffff89e01d..51f5c43d7e7 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1279,7 +1279,7 @@ and :ref:`aiohttp-web-signals` handlers:: :return str: peer's message content. - :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. + :raise aiohttp.WSMessageTypeError: if message is not :const:`~aiohttp.WSMsgType.TEXT`. .. method:: receive_bytes(*, timeout=None) :async: @@ -1298,7 +1298,7 @@ and :ref:`aiohttp-web-signals` handlers:: :return bytes: peer's message content. - :raise TypeError: if message is :const:`~aiohttp.WSMsgType.TEXT`. + :raise aiohttp.WSMessageTypeError: if message is not :const:`~aiohttp.WSMsgType.BINARY`. .. method:: receive_json(*, loads=json.loads, timeout=None) :async: diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index f28db879037..9ee498e0075 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -6,7 +6,14 @@ import pytest import aiohttp -from aiohttp import ClientConnectionResetError, ServerTimeoutError, WSMsgType, hdrs, web +from aiohttp import ( + ClientConnectionResetError, + ServerTimeoutError, + WSMessageTypeError, + WSMsgType, + hdrs, + web, +) from aiohttp.client_ws import ClientWSTimeout from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -58,7 +65,28 @@ async def handler(request): resp = await client.ws_connect("/") await resp.send_str("ask") - with pytest.raises(TypeError): + with pytest.raises(WSMessageTypeError): + await resp.receive_bytes() + await resp.close() + + +async def test_recv_bytes_after_close(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + await ws.prepare(request) + + await ws.close() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + + with pytest.raises( + WSMessageTypeError, + match=f"Received message {WSMsgType.CLOSE}:.+ is not WSMsgType.BINARY", + ): await resp.receive_bytes() await resp.close() @@ -103,14 +131,35 @@ async def handler(request): await resp.send_bytes(b"ask") - with pytest.raises(TypeError): + with pytest.raises(WSMessageTypeError): await resp.receive_str() await resp.close() -async def test_send_recv_json(aiohttp_client) -> None: - async def handler(request): +async def test_recv_text_after_close(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + await ws.prepare(request) + + await ws.close() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + + with pytest.raises( + WSMessageTypeError, + match=f"Received message {WSMsgType.CLOSE}:.+ is not WSMsgType.TEXT", + ): + await resp.receive_str() + await resp.close() + + +async def test_send_recv_json(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 9bc28838530..e728c6a1504 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -7,11 +7,12 @@ import pytest from multidict import CIMultiDict -from aiohttp import WSMessage, WSMsgType +from aiohttp import WSMessage, WSMessageTypeError, WSMsgType, web +from aiohttp.http import WS_CLOSED_MESSAGE from aiohttp.streams import EofStream from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import HTTPBadRequest, WebSocketResponse -from aiohttp.web_ws import WS_CLOSED_MESSAGE, WebSocketReady +from aiohttp.web_ws import WebSocketReady @pytest.fixture @@ -273,6 +274,21 @@ async def test_send_str_closed(make_request) -> None: await ws.send_str("string") +async def test_recv_str_closed(make_request) -> None: + req = make_request("GET", "/") + ws = web.WebSocketResponse() + await ws.prepare(req) + assert ws._reader is not None + ws._reader.feed_data(WS_CLOSED_MESSAGE) + await ws.close() + + with pytest.raises( + WSMessageTypeError, + match=f"Received message {WSMsgType.CLOSED}:.+ is not WSMsgType.TEXT", + ): + await ws.receive_str() + + async def test_send_bytes_closed(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() @@ -284,6 +300,21 @@ async def test_send_bytes_closed(make_request) -> None: await ws.send_bytes(b"bytes") +async def test_recv_bytes_closed(make_request) -> None: + req = make_request("GET", "/") + ws = web.WebSocketResponse() + await ws.prepare(req) + assert ws._reader is not None + ws._reader.feed_data(WS_CLOSED_MESSAGE) + await ws.close() + + with pytest.raises( + WSMessageTypeError, + match=f"Received message {WSMsgType.CLOSED}:.+ is not WSMsgType.BINARY", + ): + await ws.receive_bytes() + + async def test_send_json_closed(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 10f9279803e..188694c8dce 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -949,11 +949,54 @@ async def handler(request): assert data == "OK" -async def test_bug3380(loop, aiohttp_client) -> None: +async def test_receive_str_nonstring( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + assert ws.can_prepare(request) + + await ws.prepare(request) + await ws.send_bytes(b"answer") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/") + with pytest.raises(TypeError): + await ws.receive_str() + + +async def test_receive_bytes_nonbytes( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + assert ws.can_prepare(request) + + await ws.prepare(request) + await ws.send_str("answer") + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/") + with pytest.raises(TypeError): + await ws.receive_bytes() + + +async def test_bug3380( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: async def handle_null(request): return aiohttp.web.json_response({"err": None}) - async def ws_handler(request): + async def ws_handler(request: web.Request) -> web.Response: return web.Response(status=401) app = web.Application() From 37ef0fbd590e299c9e553a32ed441ecfd4081265 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 11:35:14 -0600 Subject: [PATCH 0829/1511] [PR #9671/37d9fe6 backport][3.11] Refactor connection waiters to be cancellation safe (#9675) --- CHANGES/9670.bugfix.rst | 1 + CHANGES/9671.bugfix.rst | 3 + aiohttp/connector.py | 200 ++++++++++---------- tests/test_connector.py | 394 ++++++++++++++++++++++++++++++---------- 4 files changed, 401 insertions(+), 197 deletions(-) create mode 120000 CHANGES/9670.bugfix.rst create mode 100644 CHANGES/9671.bugfix.rst diff --git a/CHANGES/9670.bugfix.rst b/CHANGES/9670.bugfix.rst new file mode 120000 index 00000000000..b0411a405a0 --- /dev/null +++ b/CHANGES/9670.bugfix.rst @@ -0,0 +1 @@ +9671.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9671.bugfix.rst b/CHANGES/9671.bugfix.rst new file mode 100644 index 00000000000..d2ca2e8ccb5 --- /dev/null +++ b/CHANGES/9671.bugfix.rst @@ -0,0 +1,3 @@ +Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. + +The connector was not cancellation-safe. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 9ea1a63fd35..3307cb42ab4 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -5,7 +5,7 @@ import sys import traceback import warnings -from collections import defaultdict, deque +from collections import OrderedDict, defaultdict from contextlib import suppress from http import HTTPStatus from itertools import chain, cycle, islice @@ -266,9 +266,11 @@ def __init__( self._force_close = force_close # {host_key: FIFO list of waiters} - self._waiters: DefaultDict[ConnectionKey, deque[asyncio.Future[None]]] = ( - defaultdict(deque) - ) + # The FIFO is implemented with an OrderedDict with None keys because + # python does not have an ordered set. + self._waiters: DefaultDict[ + ConnectionKey, OrderedDict[asyncio.Future[None], None] + ] = defaultdict(OrderedDict) self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) @@ -356,7 +358,7 @@ def _cleanup(self) -> None: # recreate it ever! self._cleanup_handle = None - now = self._loop.time() + now = monotonic() timeout = self._keepalive_timeout if self._conns: @@ -387,14 +389,6 @@ def _cleanup(self) -> None: timeout_ceil_threshold=self._timeout_ceil_threshold, ) - def _drop_acquired_per_host( - self, key: "ConnectionKey", val: ResponseHandler - ) -> None: - if conns := self._acquired_per_host.get(key): - conns.remove(val) - if not conns: - del self._acquired_per_host[key] - def _cleanup_closed(self) -> None: """Double confirmation for transport close. @@ -455,6 +449,9 @@ def _close(self) -> None: finally: self._conns.clear() self._acquired.clear() + for keyed_waiters in self._waiters.values(): + for keyed_waiter in keyed_waiters: + keyed_waiter.cancel() self._waiters.clear() self._cleanup_handle = None self._cleanup_closed_transports.clear() @@ -498,113 +495,107 @@ async def connect( ) -> Connection: """Get from pool or create new connection.""" key = req.connection_key - available = self._available_connections(key) - wait_for_conn = available <= 0 or key in self._waiters - if not wait_for_conn and (proto := self._get(key)) is not None: + if (conn := await self._get(key, traces)) is not None: # If we do not have to wait and we can get a connection from the pool # we can avoid the timeout ceil logic and directly return the connection - return await self._reused_connection(key, proto, traces) + return conn async with ceil_timeout(timeout.connect, timeout.ceil_threshold): - # Wait if there are no available connections or if there are/were - # waiters (i.e. don't steal connection from a waiter about to wake up) - if wait_for_conn: + if self._available_connections(key) <= 0: await self._wait_for_available_connection(key, traces) - if (proto := self._get(key)) is not None: - return await self._reused_connection(key, proto, traces) + if (conn := await self._get(key, traces)) is not None: + return conn placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) - if traces: - for trace in traces: - await trace.send_connection_create_start() - try: + # Traces are done inside the try block to ensure that the + # that the placeholder is still cleaned up if an exception + # is raised. + if traces: + for trace in traces: + await trace.send_connection_create_start() proto = await self._create_connection(req, traces, timeout) - if self._closed: - proto.close() - raise ClientConnectionError("Connector is closed.") + if traces: + for trace in traces: + await trace.send_connection_create_end() except BaseException: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - self._release_waiter() + self._release_acquired(key, placeholder) raise else: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - - if traces: - for trace in traces: - await trace.send_connection_create_end() - - return self._acquired_connection(proto, key) - - async def _reused_connection( - self, key: "ConnectionKey", proto: ResponseHandler, traces: List["Trace"] - ) -> Connection: - if traces: - # Acquire the connection to prevent race conditions with limits - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - for trace in traces: - await trace.send_connection_reuseconn() - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - return self._acquired_connection(proto, key) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") - def _acquired_connection( - self, proto: ResponseHandler, key: "ConnectionKey" - ) -> Connection: - """Mark proto as acquired and wrap it in a Connection object.""" + # The connection was successfully created, drop the placeholder + # and add the real connection to the acquired set. There should + # be no awaits after the proto is added to the acquired set + # to ensure that the connection is not left in the acquired set + # on cancellation. + acquired_per_host = self._acquired_per_host[key] + self._acquired.remove(placeholder) + acquired_per_host.remove(placeholder) self._acquired.add(proto) - self._acquired_per_host[key].add(proto) + acquired_per_host.add(proto) return Connection(self, key, proto, self._loop) async def _wait_for_available_connection( self, key: "ConnectionKey", traces: List["Trace"] ) -> None: - """Wait until there is an available connection.""" - fut: asyncio.Future[None] = self._loop.create_future() - - # This connection will now count towards the limit. - self._waiters[key].append(fut) + """Wait for an available connection slot.""" + # We loop here because there is a race between + # the connection limit check and the connection + # being acquired. If the connection is acquired + # between the check and the await statement, we + # need to loop again to check if the connection + # slot is still available. + attempts = 0 + while True: + fut: asyncio.Future[None] = self._loop.create_future() + keyed_waiters = self._waiters[key] + keyed_waiters[fut] = None + if attempts: + # If we have waited before, we need to move the waiter + # to the front of the queue as otherwise we might get + # starved and hit the timeout. + keyed_waiters.move_to_end(fut, last=False) - if traces: - for trace in traces: - await trace.send_connection_queued_start() + try: + # Traces happen in the try block to ensure that the + # the waiter is still cleaned up if an exception is raised. + if traces: + for trace in traces: + await trace.send_connection_queued_start() + await fut + if traces: + for trace in traces: + await trace.send_connection_queued_end() + finally: + # pop the waiter from the queue if its still + # there and not already removed by _release_waiter + keyed_waiters.pop(fut, None) + if not self._waiters.get(key, True): + del self._waiters[key] - try: - await fut - except BaseException as e: - if key in self._waiters: - # remove a waiter even if it was cancelled, normally it's - # removed when it's notified - with suppress(ValueError): - # fut may no longer be in list - self._waiters[key].remove(fut) - - raise e - finally: - if key in self._waiters and not self._waiters[key]: - del self._waiters[key] + if self._available_connections(key) > 0: + break + attempts += 1 - if traces: - for trace in traces: - await trace.send_connection_queued_end() + async def _get( + self, key: "ConnectionKey", traces: List["Trace"] + ) -> Optional[Connection]: + """Get next reusable connection for the key or None. - def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: - """Get next reusable connection for the key or None.""" + The connection will be marked as acquired. + """ try: conns = self._conns[key] except KeyError: return None - t1 = self._loop.time() + t1 = monotonic() while conns: proto, t0 = conns.pop() # We will we reuse the connection if its connected and @@ -613,7 +604,16 @@ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: if not conns: # The very last connection was reclaimed: drop the key del self._conns[key] - return proto + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + if traces: + for trace in traces: + try: + await trace.send_connection_reuseconn() + except BaseException: + self._release_acquired(key, proto) + raise + return Connection(self, key, proto, self._loop) # Connection cannot be reused, close it transport = proto.transport @@ -647,25 +647,23 @@ def _release_waiter(self) -> None: waiters = self._waiters[key] while waiters: - waiter = waiters.popleft() + waiter, _ = waiters.popitem(last=False) if not waiter.done(): waiter.set_result(None) return def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + """Release acquired connection.""" if self._closed: # acquired connection is already released on connector closing return - try: - self._acquired.remove(proto) - self._drop_acquired_per_host(key, proto) - except KeyError: # pragma: no cover - # this may be result of undetermenistic order of objects - # finalization due garbage collection. - pass - else: - self._release_waiter() + self._acquired.discard(proto) + if conns := self._acquired_per_host.get(key): + conns.discard(proto) + if not conns: + del self._acquired_per_host[key] + self._release_waiter() def _release( self, @@ -694,7 +692,7 @@ def _release( conns = self._conns.get(key) if conns is None: conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) + conns.append((protocol, monotonic())) if self._cleanup_handle is None: self._cleanup_handle = helpers.weakref_handle( diff --git a/tests/test_connector.py b/tests/test_connector.py index 02352ff498b..f542adfebfc 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -7,7 +7,6 @@ import ssl import sys import uuid -from collections import deque from concurrent import futures from contextlib import closing, suppress from typing import Any, List, Literal, Optional, Sequence, Tuple @@ -104,7 +103,7 @@ def create_mocked_conn(conn_closing_result=None, **kwargs): return proto -def test_connection_del(loop) -> None: +async def test_connection_del(loop: asyncio.AbstractEventLoop) -> None: connector = mock.Mock() key = mock.Mock() protocol = mock.Mock() @@ -117,6 +116,7 @@ def test_connection_del(loop) -> None: del conn gc.collect() + await asyncio.sleep(0) connector._release.assert_called_with(key, protocol, should_close=True) msg = { "message": mock.ANY, @@ -291,69 +291,78 @@ async def test_close(loop) -> None: assert conn.closed -async def test_get(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - assert conn._get(1) is None +async def test_get(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: + conn = aiohttp.BaseConnector() + assert await conn._get(key, []) is None - proto = mock.Mock() - conn._conns[1] = [(proto, loop.time())] - assert conn._get(1) == proto + proto = create_mocked_conn(loop) + conn._conns[key] = [(proto, loop.time())] + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() await conn.close() async def test_get_unconnected_proto(loop) -> None: conn = aiohttp.BaseConnector() - key = ConnectionKey("localhost", 80, False, None, None, None, None) - assert conn._get(key) is None + key = ConnectionKey("localhost", 80, False, False, None, None, None) + assert await conn._get(key, []) is None proto = create_mocked_conn(loop) conn._conns[key] = [(proto, loop.time())] - assert conn._get(key) == proto + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() - assert conn._get(key) is None + assert await conn._get(key, []) is None conn._conns[key] = [(proto, loop.time())] proto.is_connected = lambda *args: False - assert conn._get(key) is None + assert await conn._get(key, []) is None await conn.close() async def test_get_unconnected_proto_ssl(loop) -> None: conn = aiohttp.BaseConnector() - key = ConnectionKey("localhost", 80, True, None, None, None, None) - assert conn._get(key) is None + key = ConnectionKey("localhost", 80, True, False, None, None, None) + assert await conn._get(key, []) is None proto = create_mocked_conn(loop) conn._conns[key] = [(proto, loop.time())] - assert conn._get(key) == proto + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() - assert conn._get(key) is None + assert await conn._get(key, []) is None conn._conns[key] = [(proto, loop.time())] proto.is_connected = lambda *args: False - assert conn._get(key) is None + assert await conn._get(key, []) is None await conn.close() -async def test_get_expired(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - key = ConnectionKey("localhost", 80, False, None, None, None, None) - assert conn._get(key) is None +async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.BaseConnector() + key = ConnectionKey("localhost", 80, False, False, None, None, None) + assert await conn._get(key, []) is None proto = mock.Mock() conn._conns[key] = [(proto, loop.time() - 1000)] - assert conn._get(key) is None + assert await conn._get(key, []) is None assert not conn._conns await conn.close() -async def test_get_expired_ssl(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) - key = ConnectionKey("localhost", 80, True, None, None, None, None) - assert conn._get(key) is None +async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + key = ConnectionKey("localhost", 80, True, False, None, None, None) + assert await conn._get(key, []) is None proto = mock.Mock() transport = proto.transport conn._conns[key] = [(proto, loop.time() - 1000)] - assert conn._get(key) is None + assert await conn._get(key, []) is None assert not conn._conns assert conn._cleanup_closed_transports == [transport] await conn.close() @@ -446,7 +455,7 @@ async def test_release_waiter_no_limit(loop, key, key2) -> None: conn = aiohttp.BaseConnector(limit=0, loop=loop) w = mock.Mock() w.done.return_value = False - conn._waiters[key].append(w) + conn._waiters[key][w] = None conn._release_waiter() assert len(conn._waiters[key]) == 0 assert w.done.called @@ -458,8 +467,8 @@ async def test_release_waiter_first_available(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False - conn._waiters[key].append(w2) - conn._waiters[key2].append(w1) + conn._waiters[key][w2] = None + conn._waiters[key2][w1] = None conn._release_waiter() assert ( w1.set_result.called @@ -475,7 +484,8 @@ async def test_release_waiter_release_first(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False - conn._waiters[key] = deque([w1, w2]) + conn._waiters[key][w1] = None + conn._waiters[key][w2] = None conn._release_waiter() assert w1.set_result.called assert not w2.set_result.called @@ -487,7 +497,8 @@ async def test_release_waiter_skip_done_waiter(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = True w2.done.return_value = False - conn._waiters[key] = deque([w1, w2]) + conn._waiters[key][w1] = None + conn._waiters[key][w2] = None conn._release_waiter() assert not w1.set_result.called assert w2.set_result.called @@ -500,8 +511,8 @@ async def test_release_waiter_per_host(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False - conn._waiters[key] = deque([w1]) - conn._waiters[key2] = deque([w2]) + conn._waiters[key][w1] = None + conn._waiters[key2][w2] = None conn._release_waiter() assert (w1.set_result.called and not w2.set_result.called) or ( not w1.set_result.called and w2.set_result.called @@ -514,12 +525,14 @@ async def test_release_waiter_no_available(loop, key, key2) -> None: conn = aiohttp.BaseConnector(limit=0, loop=loop) w = mock.Mock() w.done.return_value = False - conn._waiters[key].append(w) - conn._available_connections = mock.Mock(return_value=0) - conn._release_waiter() - assert len(conn._waiters) == 1 - assert not w.done.called - await conn.close() + conn._waiters[key][w] = None + with mock.patch.object( + conn, "_available_connections", autospec=True, spec_set=True, return_value=0 + ): + conn._release_waiter() + assert len(conn._waiters) == 1 + assert not w.done.called + await conn.close() async def test_release_close(loop, key) -> None: @@ -532,26 +545,35 @@ async def test_release_close(loop, key) -> None: assert proto.close.called -async def test__drop_acquire_per_host1(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - conn._drop_acquired_per_host(123, 456) +async def test__release_acquired_per_host1( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector() + conn._release_acquired(key, create_mocked_conn(loop)) assert len(conn._acquired_per_host) == 0 -async def test__drop_acquire_per_host2(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - conn._acquired_per_host[123].add(456) - conn._drop_acquired_per_host(123, 456) +async def test__release_acquired_per_host2( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector() + handler = create_mocked_conn(loop) + conn._acquired_per_host[key].add(handler) + conn._release_acquired(key, handler) assert len(conn._acquired_per_host) == 0 -async def test__drop_acquire_per_host3(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - conn._acquired_per_host[123].add(456) - conn._acquired_per_host[123].add(789) - conn._drop_acquired_per_host(123, 456) +async def test__release_acquired_per_host3( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector() + handler = create_mocked_conn(loop) + handler2 = create_mocked_conn(loop) + conn._acquired_per_host[key].add(handler) + conn._acquired_per_host[key].add(handler2) + conn._release_acquired(key, handler) assert len(conn._acquired_per_host) == 1 - assert conn._acquired_per_host[123] == {789} + assert conn._acquired_per_host[key] == {handler2} async def test_tcp_connector_certificate_error( @@ -1370,8 +1392,7 @@ async def test_get_pop_empty_conns(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) key = ("127.0.0.1", 80, False) conn._conns[key] = [] - proto = conn._get(key) - assert proto is None + assert await conn._get(key, []) is None assert not conn._conns @@ -1478,8 +1499,164 @@ async def test_connect_tracing(loop) -> None: ) -async def test_close_during_connect(loop) -> None: - proto = mock.Mock() +@pytest.mark.parametrize( + "signal", + [ + "on_connection_create_start", + "on_connection_create_end", + ], +) +async def test_exception_during_connetion_create_tracing( + loop: asyncio.AbstractEventLoop, signal: str +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError) + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + getattr(trace_config, signal).append(on_signal) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector() + assert not conn._acquired + assert key not in conn._acquired_per_host + + with pytest.raises(asyncio.CancelledError), mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + await conn.connect(req, traces, ClientTimeout()) + + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_exception_during_connection_queued_tracing( + loop: asyncio.AbstractEventLoop, +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError) + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + trace_config.on_connection_queued_start.append(on_signal) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector(limit=1) + assert not conn._acquired + assert key not in conn._acquired_per_host + + with pytest.raises(asyncio.CancelledError), mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + resp1 = await conn.connect(req, traces, ClientTimeout()) + assert resp1 + # 2nd connect request will be queued + await conn.connect(req, traces, ClientTimeout()) + + resp1.close() + assert not conn._waiters + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_exception_during_connection_reuse_tracing( + loop: asyncio.AbstractEventLoop, +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError) + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + trace_config.on_connection_reuseconn.append(on_signal) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector() + assert not conn._acquired + assert key not in conn._acquired_per_host + + with pytest.raises(asyncio.CancelledError), mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + resp = await conn.connect(req, traces, ClientTimeout()) + with mock.patch.object(resp.protocol, "should_close", False): + resp.release() + assert not conn._acquired + assert key not in conn._acquired_per_host + assert key in conn._conns + + await conn.connect(req, traces, ClientTimeout()) + + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_cancellation_during_waiting_for_free_connection( + loop: asyncio.AbstractEventLoop, +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + waiter_wait_stated_future = loop.create_future() + + async def on_connection_queued_start(*args: object, **kwargs: object) -> None: + waiter_wait_stated_future.set_result(None) + + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + trace_config.on_connection_queued_start.append(on_connection_queued_start) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector(limit=1) + assert not conn._acquired + assert key not in conn._acquired_per_host + + with mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + resp1 = await conn.connect(req, traces, ClientTimeout()) + assert resp1 + # 2nd connect request will be queued + task = asyncio.create_task(conn.connect(req, traces, ClientTimeout())) + await waiter_wait_stated_future + list(conn._waiters[key])[0].cancel() + with pytest.raises(asyncio.CancelledError): + await task + + resp1.close() + assert not conn._waiters + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_close_during_connect(loop: asyncio.AbstractEventLoop) -> None: + proto = create_mocked_conn(loop) proto.is_connected.return_value = True fut = loop.create_future() @@ -1523,7 +1700,8 @@ async def test_cleanup(key) -> None: conn._conns = testset existing_handle = conn._cleanup_handle = mock.Mock() - conn._cleanup() + with mock.patch("aiohttp.connector.monotonic", return_value=300): + conn._cleanup() assert existing_handle.cancel.called assert conn._conns == {} assert conn._cleanup_handle is None @@ -1535,12 +1713,15 @@ async def test_cleanup_close_ssl_transport(ssl_key) -> None: testset = {ssl_key: [(proto, 10)]} loop = mock.Mock() - loop.time.return_value = 300 - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) + new_time = asyncio.get_event_loop().time() + 300 + loop.time.return_value = new_time + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + conn._loop = loop conn._conns = testset existing_handle = conn._cleanup_handle = mock.Mock() - conn._cleanup() + with mock.patch("aiohttp.connector.monotonic", return_value=new_time): + conn._cleanup() assert existing_handle.cancel.called assert conn._conns == {} assert conn._cleanup_closed_transports == [transport] @@ -1550,16 +1731,16 @@ async def test_cleanup2() -> None: testset = {1: [(mock.Mock(), 300)]} testset[1][0][0].is_connected.return_value = True - loop = mock.Mock() - loop.time.return_value = 300 - - conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=10) - conn._conns = testset - conn._cleanup() + conn = aiohttp.BaseConnector(keepalive_timeout=10) + conn._loop = mock.Mock() + conn._loop.time.return_value = 300 + with mock.patch("aiohttp.connector.monotonic", return_value=300): + conn._conns = testset + conn._cleanup() assert conn._conns == testset assert conn._cleanup_handle is not None - loop.call_at.assert_called_with(310, mock.ANY, mock.ANY) + conn._loop.call_at.assert_called_with(310, mock.ANY, mock.ANY) await conn.close() @@ -1573,7 +1754,9 @@ async def test_cleanup3(key) -> None: conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=10) conn._conns = testset - conn._cleanup() + with mock.patch("aiohttp.connector.monotonic", return_value=308.5): + conn._cleanup() + assert conn._conns == {key: [testset[key][1]]} assert conn._cleanup_handle is not None @@ -3074,23 +3257,16 @@ async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: assert await connector._resolve_host("", 0, traces) == [token] -async def test_connector_does_not_remove_needed_waiters(loop, key) -> None: +async def test_connector_does_not_remove_needed_waiters( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: proto = create_mocked_conn(loop) proto.is_connected.return_value = True req = ClientRequest("GET", URL("https://localhost:80"), loop=loop) connection_key = req.connection_key - connector = aiohttp.BaseConnector() - connector._available_connections = mock.Mock(return_value=0) - connector._conns[key] = [(proto, loop.time())] - connector._create_connection = create_mocked_conn(loop) - connector._create_connection.return_value = loop.create_future() - connector._create_connection.return_value.set_result(proto) - - dummy_waiter = loop.create_future() - - async def await_connection_and_check_waiters(): + async def await_connection_and_check_waiters() -> None: connection = await connector.connect(req, [], ClientTimeout()) try: assert connection_key in connector._waiters @@ -3098,21 +3274,39 @@ async def await_connection_and_check_waiters(): finally: connection.close() - async def allow_connection_and_add_dummy_waiter(): + async def allow_connection_and_add_dummy_waiter() -> None: # `asyncio.gather` may execute coroutines not in order. # Skip one event loop run cycle in such a case. if connection_key not in connector._waiters: await asyncio.sleep(0) - connector._waiters[connection_key].popleft().set_result(None) + list(connector._waiters[connection_key])[0].set_result(None) del connector._waiters[connection_key] - connector._waiters[connection_key].append(dummy_waiter) + connector._waiters[connection_key][dummy_waiter] = None - await asyncio.gather( - await_connection_and_check_waiters(), - allow_connection_and_add_dummy_waiter(), - ) + connector = aiohttp.BaseConnector() + with mock.patch.object( + connector, + "_available_connections", + autospec=True, + spec_set=True, + side_effect=[0, 1, 1, 1], + ): + connector._conns[key] = [(proto, loop.time())] + with mock.patch.object( + connector, + "_create_connection", + autospec=True, + spec_set=True, + return_value=proto, + ): + dummy_waiter = loop.create_future() + + await asyncio.gather( + await_connection_and_check_waiters(), + allow_connection_and_add_dummy_waiter(), + ) - await connector.close() + await connector.close() def test_connector_multiple_event_loop() -> None: @@ -3154,6 +3348,14 @@ def test_default_ssl_context_creation_without_ssl() -> None: assert connector_module._make_ssl_context(True) is None +def _acquired_connection( + conn: aiohttp.BaseConnector, proto: ResponseHandler, key: ConnectionKey +) -> Connection: + conn._acquired.add(proto) + conn._acquired_per_host[key].add(proto) + return Connection(conn, key, proto, conn._loop) + + async def test_available_connections_with_limit_per_host( key: ConnectionKey, other_host_key2: ConnectionKey ) -> None: @@ -3162,11 +3364,11 @@ async def test_available_connections_with_limit_per_host( assert conn._available_connections(key) == 2 assert conn._available_connections(other_host_key2) == 2 proto1 = create_mocked_conn() - connection1 = conn._acquired_connection(proto1, key) + connection1 = _acquired_connection(conn, proto1, key) assert conn._available_connections(key) == 1 assert conn._available_connections(other_host_key2) == 2 proto2 = create_mocked_conn() - connection2 = conn._acquired_connection(proto2, key) + connection2 = _acquired_connection(conn, proto2, key) assert conn._available_connections(key) == 0 assert conn._available_connections(other_host_key2) == 1 connection1.close() @@ -3174,7 +3376,7 @@ async def test_available_connections_with_limit_per_host( assert conn._available_connections(other_host_key2) == 2 connection2.close() other_proto1 = create_mocked_conn() - other_connection1 = conn._acquired_connection(other_proto1, other_host_key2) + other_connection1 = _acquired_connection(conn, other_proto1, other_host_key2) assert conn._available_connections(key) == 2 assert conn._available_connections(other_host_key2) == 1 other_connection1.close() @@ -3191,11 +3393,11 @@ async def test_available_connections_without_limit_per_host( assert conn._available_connections(key) == 3 assert conn._available_connections(other_host_key2) == 3 proto1 = create_mocked_conn() - connection1 = conn._acquired_connection(proto1, key) + connection1 = _acquired_connection(conn, proto1, key) assert conn._available_connections(key) == 2 assert conn._available_connections(other_host_key2) == 2 proto2 = create_mocked_conn() - connection2 = conn._acquired_connection(proto2, key) + connection2 = _acquired_connection(conn, proto2, key) assert conn._available_connections(key) == 1 assert conn._available_connections(other_host_key2) == 1 connection1.close() @@ -3203,7 +3405,7 @@ async def test_available_connections_without_limit_per_host( assert conn._available_connections(other_host_key2) == 2 connection2.close() other_proto1 = create_mocked_conn() - other_connection1 = conn._acquired_connection(other_proto1, other_host_key2) + other_connection1 = _acquired_connection(conn, other_proto1, other_host_key2) assert conn._available_connections(key) == 2 assert conn._available_connections(other_host_key2) == 2 other_connection1.close() @@ -3220,7 +3422,7 @@ async def test_available_connections_no_limits( assert conn._available_connections(key) == 1 assert conn._available_connections(other_host_key2) == 1 proto1 = create_mocked_conn() - connection1 = conn._acquired_connection(proto1, key) + connection1 = _acquired_connection(conn, proto1, key) assert conn._available_connections(key) == 1 assert conn._available_connections(other_host_key2) == 1 connection1.close() From e74abf6d989c176ead6c9f95f770b3f6b035ab00 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 11:42:56 -0600 Subject: [PATCH 0830/1511] [PR #9671/37d9fe6 backport][3.10] Refactor connection waiters to be cancellation safe (#9674) --- CHANGES/9670.bugfix.rst | 1 + CHANGES/9671.bugfix.rst | 3 + aiohttp/connector.py | 200 +++++++++++---------- tests/test_connector.py | 374 ++++++++++++++++++++++++++++++---------- 4 files changed, 387 insertions(+), 191 deletions(-) create mode 120000 CHANGES/9670.bugfix.rst create mode 100644 CHANGES/9671.bugfix.rst diff --git a/CHANGES/9670.bugfix.rst b/CHANGES/9670.bugfix.rst new file mode 120000 index 00000000000..b0411a405a0 --- /dev/null +++ b/CHANGES/9670.bugfix.rst @@ -0,0 +1 @@ +9671.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9671.bugfix.rst b/CHANGES/9671.bugfix.rst new file mode 100644 index 00000000000..d2ca2e8ccb5 --- /dev/null +++ b/CHANGES/9671.bugfix.rst @@ -0,0 +1,3 @@ +Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. + +The connector was not cancellation-safe. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index f374478b3ea..773a949c46e 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -5,7 +5,7 @@ import sys import traceback import warnings -from collections import defaultdict, deque +from collections import OrderedDict, defaultdict from contextlib import suppress from http import HTTPStatus from itertools import chain, cycle, islice @@ -267,9 +267,11 @@ def __init__( self._force_close = force_close # {host_key: FIFO list of waiters} - self._waiters: DefaultDict[ConnectionKey, deque[asyncio.Future[None]]] = ( - defaultdict(deque) - ) + # The FIFO is implemented with an OrderedDict with None keys because + # python does not have an ordered set. + self._waiters: DefaultDict[ + ConnectionKey, OrderedDict[asyncio.Future[None], None] + ] = defaultdict(OrderedDict) self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) @@ -357,7 +359,7 @@ def _cleanup(self) -> None: # recreate it ever! self._cleanup_handle = None - now = self._loop.time() + now = monotonic() timeout = self._keepalive_timeout if self._conns: @@ -388,14 +390,6 @@ def _cleanup(self) -> None: timeout_ceil_threshold=self._timeout_ceil_threshold, ) - def _drop_acquired_per_host( - self, key: "ConnectionKey", val: ResponseHandler - ) -> None: - if conns := self._acquired_per_host.get(key): - conns.remove(val) - if not conns: - del self._acquired_per_host[key] - def _cleanup_closed(self) -> None: """Double confirmation for transport close. @@ -456,6 +450,9 @@ def _close(self) -> None: finally: self._conns.clear() self._acquired.clear() + for keyed_waiters in self._waiters.values(): + for keyed_waiter in keyed_waiters: + keyed_waiter.cancel() self._waiters.clear() self._cleanup_handle = None self._cleanup_closed_transports.clear() @@ -499,113 +496,107 @@ async def connect( ) -> Connection: """Get from pool or create new connection.""" key = req.connection_key - available = self._available_connections(key) - wait_for_conn = available <= 0 or key in self._waiters - if not wait_for_conn and (proto := self._get(key)) is not None: + if (conn := await self._get(key, traces)) is not None: # If we do not have to wait and we can get a connection from the pool # we can avoid the timeout ceil logic and directly return the connection - return await self._reused_connection(key, proto, traces) + return conn async with ceil_timeout(timeout.connect, timeout.ceil_threshold): - # Wait if there are no available connections or if there are/were - # waiters (i.e. don't steal connection from a waiter about to wake up) - if wait_for_conn: + if self._available_connections(key) <= 0: await self._wait_for_available_connection(key, traces) - if (proto := self._get(key)) is not None: - return await self._reused_connection(key, proto, traces) + if (conn := await self._get(key, traces)) is not None: + return conn placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) - if traces: - for trace in traces: - await trace.send_connection_create_start() - try: + # Traces are done inside the try block to ensure that the + # that the placeholder is still cleaned up if an exception + # is raised. + if traces: + for trace in traces: + await trace.send_connection_create_start() proto = await self._create_connection(req, traces, timeout) - if self._closed: - proto.close() - raise ClientConnectionError("Connector is closed.") + if traces: + for trace in traces: + await trace.send_connection_create_end() except BaseException: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - self._release_waiter() + self._release_acquired(key, placeholder) raise else: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - - if traces: - for trace in traces: - await trace.send_connection_create_end() - - return self._acquired_connection(proto, key) - - async def _reused_connection( - self, key: "ConnectionKey", proto: ResponseHandler, traces: List["Trace"] - ) -> Connection: - if traces: - # Acquire the connection to prevent race conditions with limits - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - for trace in traces: - await trace.send_connection_reuseconn() - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - return self._acquired_connection(proto, key) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") - def _acquired_connection( - self, proto: ResponseHandler, key: "ConnectionKey" - ) -> Connection: - """Mark proto as acquired and wrap it in a Connection object.""" + # The connection was successfully created, drop the placeholder + # and add the real connection to the acquired set. There should + # be no awaits after the proto is added to the acquired set + # to ensure that the connection is not left in the acquired set + # on cancellation. + acquired_per_host = self._acquired_per_host[key] + self._acquired.remove(placeholder) + acquired_per_host.remove(placeholder) self._acquired.add(proto) - self._acquired_per_host[key].add(proto) + acquired_per_host.add(proto) return Connection(self, key, proto, self._loop) async def _wait_for_available_connection( self, key: "ConnectionKey", traces: List["Trace"] ) -> None: - """Wait until there is an available connection.""" - fut: asyncio.Future[None] = self._loop.create_future() - - # This connection will now count towards the limit. - self._waiters[key].append(fut) + """Wait for an available connection slot.""" + # We loop here because there is a race between + # the connection limit check and the connection + # being acquired. If the connection is acquired + # between the check and the await statement, we + # need to loop again to check if the connection + # slot is still available. + attempts = 0 + while True: + fut: asyncio.Future[None] = self._loop.create_future() + keyed_waiters = self._waiters[key] + keyed_waiters[fut] = None + if attempts: + # If we have waited before, we need to move the waiter + # to the front of the queue as otherwise we might get + # starved and hit the timeout. + keyed_waiters.move_to_end(fut, last=False) - if traces: - for trace in traces: - await trace.send_connection_queued_start() + try: + # Traces happen in the try block to ensure that the + # the waiter is still cleaned up if an exception is raised. + if traces: + for trace in traces: + await trace.send_connection_queued_start() + await fut + if traces: + for trace in traces: + await trace.send_connection_queued_end() + finally: + # pop the waiter from the queue if its still + # there and not already removed by _release_waiter + keyed_waiters.pop(fut, None) + if not self._waiters.get(key, True): + del self._waiters[key] - try: - await fut - except BaseException as e: - if key in self._waiters: - # remove a waiter even if it was cancelled, normally it's - # removed when it's notified - with suppress(ValueError): - # fut may no longer be in list - self._waiters[key].remove(fut) - - raise e - finally: - if key in self._waiters and not self._waiters[key]: - del self._waiters[key] + if self._available_connections(key) > 0: + break + attempts += 1 - if traces: - for trace in traces: - await trace.send_connection_queued_end() + async def _get( + self, key: "ConnectionKey", traces: List["Trace"] + ) -> Optional[Connection]: + """Get next reusable connection for the key or None. - def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: - """Get next reusable connection for the key or None.""" + The connection will be marked as acquired. + """ try: conns = self._conns[key] except KeyError: return None - t1 = self._loop.time() + t1 = monotonic() while conns: proto, t0 = conns.pop() # We will we reuse the connection if its connected and @@ -614,7 +605,16 @@ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: if not conns: # The very last connection was reclaimed: drop the key del self._conns[key] - return proto + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + if traces: + for trace in traces: + try: + await trace.send_connection_reuseconn() + except BaseException: + self._release_acquired(key, proto) + raise + return Connection(self, key, proto, self._loop) # Connection cannot be reused, close it transport = proto.transport @@ -648,25 +648,23 @@ def _release_waiter(self) -> None: waiters = self._waiters[key] while waiters: - waiter = waiters.popleft() + waiter, _ = waiters.popitem(last=False) if not waiter.done(): waiter.set_result(None) return def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + """Release acquired connection.""" if self._closed: # acquired connection is already released on connector closing return - try: - self._acquired.remove(proto) - self._drop_acquired_per_host(key, proto) - except KeyError: # pragma: no cover - # this may be result of undetermenistic order of objects - # finalization due garbage collection. - pass - else: - self._release_waiter() + self._acquired.discard(proto) + if conns := self._acquired_per_host.get(key): + conns.discard(proto) + if not conns: + del self._acquired_per_host[key] + self._release_waiter() def _release( self, @@ -695,7 +693,7 @@ def _release( conns = self._conns.get(key) if conns is None: conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) + conns.append((protocol, monotonic())) if self._cleanup_handle is None: self._cleanup_handle = helpers.weakref_handle( diff --git a/tests/test_connector.py b/tests/test_connector.py index 3ad2b1e346a..ca302371ef0 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -7,7 +7,6 @@ import ssl import sys import uuid -from collections import deque from concurrent import futures from contextlib import closing, suppress from typing import Any, List, Literal, Optional, Sequence, Tuple @@ -109,7 +108,7 @@ def create_mocked_conn(conn_closing_result=None, **kwargs): return proto -def test_connection_del(loop) -> None: +async def test_connection_del(loop: asyncio.AbstractEventLoop) -> None: connector = mock.Mock() key = mock.Mock() protocol = mock.Mock() @@ -122,6 +121,7 @@ def test_connection_del(loop) -> None: del conn gc.collect() + await asyncio.sleep(0) connector._release.assert_called_with(key, protocol, should_close=True) msg = { "message": mock.ANY, @@ -296,69 +296,78 @@ async def test_close(loop) -> None: assert conn.closed -async def test_get(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - assert conn._get(1) is None +async def test_get(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: + conn = aiohttp.BaseConnector() + assert await conn._get(key, []) is None - proto = mock.Mock() - conn._conns[1] = [(proto, loop.time())] - assert conn._get(1) == proto + proto = create_mocked_conn(loop) + conn._conns[key] = [(proto, loop.time())] + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() await conn.close() async def test_get_unconnected_proto(loop) -> None: conn = aiohttp.BaseConnector() - key = ConnectionKey("localhost", 80, False, None, None, None, None) - assert conn._get(key) is None + key = ConnectionKey("localhost", 80, False, False, None, None, None) + assert await conn._get(key, []) is None proto = create_mocked_conn(loop) conn._conns[key] = [(proto, loop.time())] - assert conn._get(key) == proto + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() - assert conn._get(key) is None + assert await conn._get(key, []) is None conn._conns[key] = [(proto, loop.time())] proto.is_connected = lambda *args: False - assert conn._get(key) is None + assert await conn._get(key, []) is None await conn.close() async def test_get_unconnected_proto_ssl(loop) -> None: conn = aiohttp.BaseConnector() - key = ConnectionKey("localhost", 80, True, None, None, None, None) - assert conn._get(key) is None + key = ConnectionKey("localhost", 80, True, False, None, None, None) + assert await conn._get(key, []) is None proto = create_mocked_conn(loop) conn._conns[key] = [(proto, loop.time())] - assert conn._get(key) == proto + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() - assert conn._get(key) is None + assert await conn._get(key, []) is None conn._conns[key] = [(proto, loop.time())] proto.is_connected = lambda *args: False - assert conn._get(key) is None + assert await conn._get(key, []) is None await conn.close() -async def test_get_expired(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - key = ConnectionKey("localhost", 80, False, None, None, None, None) - assert conn._get(key) is None +async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.BaseConnector() + key = ConnectionKey("localhost", 80, False, False, None, None, None) + assert await conn._get(key, []) is None proto = mock.Mock() conn._conns[key] = [(proto, loop.time() - 1000)] - assert conn._get(key) is None + assert await conn._get(key, []) is None assert not conn._conns await conn.close() -async def test_get_expired_ssl(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) - key = ConnectionKey("localhost", 80, True, None, None, None, None) - assert conn._get(key) is None +async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + key = ConnectionKey("localhost", 80, True, False, None, None, None) + assert await conn._get(key, []) is None proto = mock.Mock() transport = proto.transport conn._conns[key] = [(proto, loop.time() - 1000)] - assert conn._get(key) is None + assert await conn._get(key, []) is None assert not conn._conns assert conn._cleanup_closed_transports == [transport] await conn.close() @@ -451,7 +460,7 @@ async def test_release_waiter_no_limit(loop, key, key2) -> None: conn = aiohttp.BaseConnector(limit=0, loop=loop) w = mock.Mock() w.done.return_value = False - conn._waiters[key].append(w) + conn._waiters[key][w] = None conn._release_waiter() assert len(conn._waiters[key]) == 0 assert w.done.called @@ -463,8 +472,8 @@ async def test_release_waiter_first_available(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False - conn._waiters[key].append(w2) - conn._waiters[key2].append(w1) + conn._waiters[key][w2] = None + conn._waiters[key2][w1] = None conn._release_waiter() assert ( w1.set_result.called @@ -480,7 +489,8 @@ async def test_release_waiter_release_first(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False - conn._waiters[key] = deque([w1, w2]) + conn._waiters[key][w1] = None + conn._waiters[key][w2] = None conn._release_waiter() assert w1.set_result.called assert not w2.set_result.called @@ -492,7 +502,8 @@ async def test_release_waiter_skip_done_waiter(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = True w2.done.return_value = False - conn._waiters[key] = deque([w1, w2]) + conn._waiters[key][w1] = None + conn._waiters[key][w2] = None conn._release_waiter() assert not w1.set_result.called assert w2.set_result.called @@ -505,8 +516,8 @@ async def test_release_waiter_per_host(loop, key, key2) -> None: w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False - conn._waiters[key] = deque([w1]) - conn._waiters[key2] = deque([w2]) + conn._waiters[key][w1] = None + conn._waiters[key2][w2] = None conn._release_waiter() assert (w1.set_result.called and not w2.set_result.called) or ( not w1.set_result.called and w2.set_result.called @@ -519,12 +530,14 @@ async def test_release_waiter_no_available(loop, key, key2) -> None: conn = aiohttp.BaseConnector(limit=0, loop=loop) w = mock.Mock() w.done.return_value = False - conn._waiters[key].append(w) - conn._available_connections = mock.Mock(return_value=0) - conn._release_waiter() - assert len(conn._waiters) == 1 - assert not w.done.called - await conn.close() + conn._waiters[key][w] = None + with mock.patch.object( + conn, "_available_connections", autospec=True, spec_set=True, return_value=0 + ): + conn._release_waiter() + assert len(conn._waiters) == 1 + assert not w.done.called + await conn.close() async def test_release_close(loop, key) -> None: @@ -537,26 +550,35 @@ async def test_release_close(loop, key) -> None: assert proto.close.called -async def test__drop_acquire_per_host1(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - conn._drop_acquired_per_host(123, 456) +async def test__release_acquired_per_host1( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector() + conn._release_acquired(key, create_mocked_conn(loop)) assert len(conn._acquired_per_host) == 0 -async def test__drop_acquire_per_host2(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - conn._acquired_per_host[123].add(456) - conn._drop_acquired_per_host(123, 456) +async def test__release_acquired_per_host2( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector() + handler = create_mocked_conn(loop) + conn._acquired_per_host[key].add(handler) + conn._release_acquired(key, handler) assert len(conn._acquired_per_host) == 0 -async def test__drop_acquire_per_host3(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop) - conn._acquired_per_host[123].add(456) - conn._acquired_per_host[123].add(789) - conn._drop_acquired_per_host(123, 456) +async def test__release_acquired_per_host3( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector() + handler = create_mocked_conn(loop) + handler2 = create_mocked_conn(loop) + conn._acquired_per_host[key].add(handler) + conn._acquired_per_host[key].add(handler2) + conn._release_acquired(key, handler) assert len(conn._acquired_per_host) == 1 - assert conn._acquired_per_host[123] == {789} + assert conn._acquired_per_host[key] == {handler2} async def test_tcp_connector_certificate_error( @@ -1375,8 +1397,7 @@ async def test_get_pop_empty_conns(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) key = ("127.0.0.1", 80, False) conn._conns[key] = [] - proto = conn._get(key) - assert proto is None + assert await conn._get(key, []) is None assert not conn._conns @@ -1483,8 +1504,164 @@ async def test_connect_tracing(loop) -> None: ) -async def test_close_during_connect(loop) -> None: - proto = mock.Mock() +@pytest.mark.parametrize( + "signal", + [ + "on_connection_create_start", + "on_connection_create_end", + ], +) +async def test_exception_during_connetion_create_tracing( + loop: asyncio.AbstractEventLoop, signal: str +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError) + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + getattr(trace_config, signal).append(on_signal) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector() + assert not conn._acquired + assert key not in conn._acquired_per_host + + with pytest.raises(asyncio.CancelledError), mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + await conn.connect(req, traces, ClientTimeout()) + + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_exception_during_connection_queued_tracing( + loop: asyncio.AbstractEventLoop, +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError) + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + trace_config.on_connection_queued_start.append(on_signal) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector(limit=1) + assert not conn._acquired + assert key not in conn._acquired_per_host + + with pytest.raises(asyncio.CancelledError), mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + resp1 = await conn.connect(req, traces, ClientTimeout()) + assert resp1 + # 2nd connect request will be queued + await conn.connect(req, traces, ClientTimeout()) + + resp1.close() + assert not conn._waiters + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_exception_during_connection_reuse_tracing( + loop: asyncio.AbstractEventLoop, +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError) + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + trace_config.on_connection_reuseconn.append(on_signal) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector() + assert not conn._acquired + assert key not in conn._acquired_per_host + + with pytest.raises(asyncio.CancelledError), mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + resp = await conn.connect(req, traces, ClientTimeout()) + with mock.patch.object(resp.protocol, "should_close", False): + resp.release() + assert not conn._acquired + assert key not in conn._acquired_per_host + assert key in conn._conns + + await conn.connect(req, traces, ClientTimeout()) + + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_cancellation_during_waiting_for_free_connection( + loop: asyncio.AbstractEventLoop, +) -> None: + session = mock.Mock() + trace_config_ctx = mock.Mock() + waiter_wait_stated_future = loop.create_future() + + async def on_connection_queued_start(*args: object, **kwargs: object) -> None: + waiter_wait_stated_future.set_result(None) + + trace_config = aiohttp.TraceConfig( + trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) + ) + trace_config.on_connection_queued_start.append(on_connection_queued_start) + trace_config.freeze() + traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] + + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + req = ClientRequest("GET", URL("http://host:80"), loop=loop) + key = req.connection_key + conn = aiohttp.BaseConnector(limit=1) + assert not conn._acquired + assert key not in conn._acquired_per_host + + with mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ): + resp1 = await conn.connect(req, traces, ClientTimeout()) + assert resp1 + # 2nd connect request will be queued + task = asyncio.create_task(conn.connect(req, traces, ClientTimeout())) + await waiter_wait_stated_future + list(conn._waiters[key])[0].cancel() + with pytest.raises(asyncio.CancelledError): + await task + + resp1.close() + assert not conn._waiters + assert not conn._acquired + assert key not in conn._acquired_per_host + + +async def test_close_during_connect(loop: asyncio.AbstractEventLoop) -> None: + proto = create_mocked_conn(loop) proto.is_connected.return_value = True fut = loop.create_future() @@ -1528,7 +1705,8 @@ async def test_cleanup(key) -> None: conn._conns = testset existing_handle = conn._cleanup_handle = mock.Mock() - conn._cleanup() + with mock.patch("aiohttp.connector.monotonic", return_value=300): + conn._cleanup() assert existing_handle.cancel.called assert conn._conns == {} assert conn._cleanup_handle is None @@ -1540,12 +1718,15 @@ async def test_cleanup_close_ssl_transport(ssl_key) -> None: testset = {ssl_key: [(proto, 10)]} loop = mock.Mock() - loop.time.return_value = 300 - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) + new_time = asyncio.get_event_loop().time() + 300 + loop.time.return_value = new_time + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + conn._loop = loop conn._conns = testset existing_handle = conn._cleanup_handle = mock.Mock() - conn._cleanup() + with mock.patch("aiohttp.connector.monotonic", return_value=new_time): + conn._cleanup() assert existing_handle.cancel.called assert conn._conns == {} assert conn._cleanup_closed_transports == [transport] @@ -1555,16 +1736,16 @@ async def test_cleanup2() -> None: testset = {1: [(mock.Mock(), 300)]} testset[1][0][0].is_connected.return_value = True - loop = mock.Mock() - loop.time.return_value = 300 - - conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=10) - conn._conns = testset - conn._cleanup() + conn = aiohttp.BaseConnector(keepalive_timeout=10) + conn._loop = mock.Mock() + conn._loop.time.return_value = 300 + with mock.patch("aiohttp.connector.monotonic", return_value=300): + conn._conns = testset + conn._cleanup() assert conn._conns == testset assert conn._cleanup_handle is not None - loop.call_at.assert_called_with(310, mock.ANY, mock.ANY) + conn._loop.call_at.assert_called_with(310, mock.ANY, mock.ANY) await conn.close() @@ -1578,7 +1759,9 @@ async def test_cleanup3(key) -> None: conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=10) conn._conns = testset - conn._cleanup() + with mock.patch("aiohttp.connector.monotonic", return_value=308.5): + conn._cleanup() + assert conn._conns == {key: [testset[key][1]]} assert conn._cleanup_handle is not None @@ -3079,23 +3262,16 @@ async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: assert await connector._resolve_host("", 0, traces) == [token] -async def test_connector_does_not_remove_needed_waiters(loop, key) -> None: +async def test_connector_does_not_remove_needed_waiters( + loop: asyncio.AbstractEventLoop, key: ConnectionKey +) -> None: proto = create_mocked_conn(loop) proto.is_connected.return_value = True req = ClientRequest("GET", URL("https://localhost:80"), loop=loop) connection_key = req.connection_key - connector = aiohttp.BaseConnector() - connector._available_connections = mock.Mock(return_value=0) - connector._conns[key] = [(proto, loop.time())] - connector._create_connection = create_mocked_conn(loop) - connector._create_connection.return_value = loop.create_future() - connector._create_connection.return_value.set_result(proto) - - dummy_waiter = loop.create_future() - - async def await_connection_and_check_waiters(): + async def await_connection_and_check_waiters() -> None: connection = await connector.connect(req, [], ClientTimeout()) try: assert connection_key in connector._waiters @@ -3103,21 +3279,39 @@ async def await_connection_and_check_waiters(): finally: connection.close() - async def allow_connection_and_add_dummy_waiter(): + async def allow_connection_and_add_dummy_waiter() -> None: # `asyncio.gather` may execute coroutines not in order. # Skip one event loop run cycle in such a case. if connection_key not in connector._waiters: await asyncio.sleep(0) - connector._waiters[connection_key].popleft().set_result(None) + list(connector._waiters[connection_key])[0].set_result(None) del connector._waiters[connection_key] - connector._waiters[connection_key].append(dummy_waiter) + connector._waiters[connection_key][dummy_waiter] = None - await asyncio.gather( - await_connection_and_check_waiters(), - allow_connection_and_add_dummy_waiter(), - ) + connector = aiohttp.BaseConnector() + with mock.patch.object( + connector, + "_available_connections", + autospec=True, + spec_set=True, + side_effect=[0, 1, 1, 1], + ): + connector._conns[key] = [(proto, loop.time())] + with mock.patch.object( + connector, + "_create_connection", + autospec=True, + spec_set=True, + return_value=proto, + ): + dummy_waiter = loop.create_future() + + await asyncio.gather( + await_connection_and_check_waiters(), + allow_connection_and_add_dummy_waiter(), + ) - await connector.close() + await connector.close() def test_connector_multiple_event_loop() -> None: @@ -3160,7 +3354,7 @@ def test_default_ssl_context_creation_without_ssl() -> None: def _acquired_connection( - conn: aiohttp.BaseConnector, proto: ResponseHandler, key: "ConnectionKey" + conn: aiohttp.BaseConnector, proto: ResponseHandler, key: ConnectionKey ) -> Connection: """Mark proto as acquired and wrap it in a Connection object.""" conn._acquired.add(proto) From ed81c03d1cb921e73cf8abef76e5443942589fff Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 5 Nov 2024 19:58:13 +0000 Subject: [PATCH 0831/1511] [PR #9678/7d2afcf4 backport][3.10] Remove dead code from ``WebSocketResponse.receive`` (#9681) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_ws.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index bf35f3bb1f6..d6fab5f0d16 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -490,8 +490,6 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: if self._reader is None: raise RuntimeError("Call .prepare() first") - loop = self._loop - assert loop is not None receive_timeout = timeout or self._receive_timeout while True: if self._waiting: From f769cb3c81273fe8781df5b4858973bf164ef2c3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 5 Nov 2024 20:06:57 +0000 Subject: [PATCH 0832/1511] [PR #9678/7d2afcf4 backport][3.11] Remove dead code from ``WebSocketResponse.receive`` (#9682) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_ws.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 66cd269420d..8be3d05cce7 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -509,8 +509,6 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: if self._reader is None: raise RuntimeError("Call .prepare() first") - loop = self._loop - assert loop is not None receive_timeout = timeout or self._receive_timeout while True: if self._waiting: From 130ca4d0c39c38f3028649938d137e81eea9db0c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 14:24:26 -0600 Subject: [PATCH 0833/1511] [PR #9679/3f2f4a7 backport][3.11] Return early in WebSocket `receive` if there is no processing to do (#9683) --- CHANGES/9679.misc.rst | 1 + aiohttp/client_ws.py | 7 ++++++- aiohttp/http_websocket.py | 6 ++++++ aiohttp/web_ws.py | 6 ++++++ 4 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9679.misc.rst diff --git a/CHANGES/9679.misc.rst b/CHANGES/9679.misc.rst new file mode 100644 index 00000000000..55969f6c662 --- /dev/null +++ b/CHANGES/9679.misc.rst @@ -0,0 +1 @@ +Improved performance of calling ``receive`` for WebSockets for the most common message types -- by :user:`bdraco`. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index cd79dd01f91..1ed4c8ad039 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -18,7 +18,7 @@ WSMessage, WSMsgType, ) -from .http_websocket import WebSocketWriter # WSMessage +from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter from .streams import EofStream, FlowControlDataQueue from .typedefs import ( DEFAULT_JSON_DECODER, @@ -359,6 +359,11 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.ERROR, exc, None) + if msg.type not in _INTERNAL_RECEIVE_TYPES: + # If its not a close/closing/ping/pong message + # we can return it immediately + return msg + if msg.type is WSMsgType.CLOSE: self._set_closing() self._close_code = msg.data diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 92b5f67f0c0..6b4b30e02b2 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -13,6 +13,12 @@ from ._websocket.reader import WebSocketReader from ._websocket.writer import WebSocketWriter +# Messages that the WebSocketResponse.receive needs to handle internally +_INTERNAL_RECEIVE_TYPES = frozenset( + (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG) +) + + __all__ = ( "WS_CLOSED_MESSAGE", "WS_CLOSING_MESSAGE", diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 8be3d05cce7..932cd0db0b0 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -27,6 +27,7 @@ ws_ext_gen, ws_ext_parse, ) +from .http_websocket import _INTERNAL_RECEIVE_TYPES from .log import ws_logger from .streams import EofStream, FlowControlDataQueue from .typedefs import JSONDecoder, JSONEncoder @@ -555,6 +556,11 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: await self.close() return WSMessage(WSMsgType.ERROR, exc, None) + if msg.type not in _INTERNAL_RECEIVE_TYPES: + # If its not a close/closing/ping/pong message + # we can return it immediately + return msg + if msg.type is WSMsgType.CLOSE: self._set_closing(msg.data) # Could be closed while awaiting reader. From 79437cd949ca97c4145d36b413a69fddfad4cd8f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 15:57:06 -0600 Subject: [PATCH 0834/1511] [PR #9672/afb5ebb backport][3.11] Reuse the oldest keep-alive connection first (#9680) --- CHANGES/9672.bugfix.rst | 3 ++ aiohttp/connector.py | 25 +++++----- tests/test_client_session.py | 3 +- tests/test_connector.py | 91 +++++++++++++++++++++--------------- 4 files changed, 71 insertions(+), 51 deletions(-) create mode 100644 CHANGES/9672.bugfix.rst diff --git a/CHANGES/9672.bugfix.rst b/CHANGES/9672.bugfix.rst new file mode 100644 index 00000000000..110e397e647 --- /dev/null +++ b/CHANGES/9672.bugfix.rst @@ -0,0 +1,3 @@ +Fixed the keep-alive connection pool to be FIFO instead of LIFO -- by :user:`bdraco`. + +Keep-alive connections are more likely to be reused before they disconnect. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 3307cb42ab4..03147cb3aaf 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -5,7 +5,7 @@ import sys import traceback import warnings -from collections import OrderedDict, defaultdict +from collections import OrderedDict, defaultdict, deque from contextlib import suppress from http import HTTPStatus from itertools import chain, cycle, islice @@ -17,6 +17,7 @@ Awaitable, Callable, DefaultDict, + Deque, Dict, Iterator, List, @@ -255,7 +256,12 @@ def __init__( if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) - self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {} + # Connection pool of reusable connections. + # We use a deque to store connections because it has O(1) popleft() + # and O(1) append() operations to implement a FIFO queue. + self._conns: DefaultDict[ + ConnectionKey, Deque[Tuple[ResponseHandler, float]] + ] = defaultdict(deque) self._limit = limit self._limit_per_host = limit_per_host self._acquired: Set[ResponseHandler] = set() @@ -362,10 +368,10 @@ def _cleanup(self) -> None: timeout = self._keepalive_timeout if self._conns: - connections = {} + connections = defaultdict(deque) deadline = now - timeout for key, conns in self._conns.items(): - alive: List[Tuple[ResponseHandler, float]] = [] + alive: Deque[Tuple[ResponseHandler, float]] = deque() for proto, use_time in conns: if proto.is_connected() and use_time - deadline >= 0: alive.append((proto, use_time)) @@ -590,14 +596,12 @@ async def _get( The connection will be marked as acquired. """ - try: - conns = self._conns[key] - except KeyError: + if (conns := self._conns.get(key)) is None: return None t1 = monotonic() while conns: - proto, t0 = conns.pop() + proto, t0 = conns.popleft() # We will we reuse the connection if its connected and # the keepalive timeout has not been exceeded if proto.is_connected() and t1 - t0 <= self._keepalive_timeout: @@ -689,10 +693,7 @@ def _release( self._cleanup_closed_transports.append(transport) return - conns = self._conns.get(key) - if conns is None: - conns = self._conns[key] = [] - conns.append((protocol, monotonic())) + self._conns[key].append((protocol, monotonic())) if self._cleanup_handle is None: self._cleanup_handle = helpers.weakref_handle( diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 3b442020898..783c9b11cc3 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -3,6 +3,7 @@ import gc import io import json +from collections import deque from http.cookies import SimpleCookie from typing import Any, Awaitable, Callable, List from unittest import mock @@ -32,7 +33,7 @@ async def make_conn(): conn = loop.run_until_complete(make_conn()) proto = mock.Mock() - conn._conns["a"] = [(proto, 123)] + conn._conns["a"] = deque([(proto, 123)]) yield conn loop.run_until_complete(conn.close()) diff --git a/tests/test_connector.py b/tests/test_connector.py index f542adfebfc..fe0d52eb09c 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -7,9 +7,10 @@ import ssl import sys import uuid +from collections import defaultdict, deque from concurrent import futures from contextlib import closing, suppress -from typing import Any, List, Literal, Optional, Sequence, Tuple +from typing import Any, DefaultDict, Deque, List, Literal, Optional, Sequence, Tuple from unittest import mock import pytest @@ -194,7 +195,7 @@ async def test_del_with_scheduled_cleanup(loop) -> None: loop.set_debug(True) conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=0.01) transp = mock.Mock() - conn._conns["a"] = [(transp, 123)] + conn._conns["a"] = deque([(transp, 123)]) conns_impl = conn._conns exc_handler = mock.Mock() @@ -224,7 +225,7 @@ async def make_conn(): conn = loop.run_until_complete(make_conn()) transp = mock.Mock() - conn._conns["a"] = [(transp, 123)] + conn._conns["a"] = deque([(transp, 123)]) conns_impl = conn._conns exc_handler = mock.Mock() @@ -283,7 +284,7 @@ async def test_close(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) assert not conn.closed - conn._conns[("host", 8080, False)] = [(proto, object())] + conn._conns[("host", 8080, False)] = deque([(proto, object())]) await conn.close() assert not conn._conns @@ -296,7 +297,7 @@ async def test_get(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: assert await conn._get(key, []) is None proto = create_mocked_conn(loop) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) connection = await conn._get(key, []) assert connection is not None assert connection.protocol == proto @@ -310,14 +311,14 @@ async def test_get_unconnected_proto(loop) -> None: assert await conn._get(key, []) is None proto = create_mocked_conn(loop) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) connection = await conn._get(key, []) assert connection is not None assert connection.protocol == proto connection.close() assert await conn._get(key, []) is None - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) proto.is_connected = lambda *args: False assert await conn._get(key, []) is None await conn.close() @@ -329,14 +330,14 @@ async def test_get_unconnected_proto_ssl(loop) -> None: assert await conn._get(key, []) is None proto = create_mocked_conn(loop) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) connection = await conn._get(key, []) assert connection is not None assert connection.protocol == proto connection.close() assert await conn._get(key, []) is None - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) proto.is_connected = lambda *args: False assert await conn._get(key, []) is None await conn.close() @@ -348,7 +349,7 @@ async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None: assert await conn._get(key, []) is None proto = mock.Mock() - conn._conns[key] = [(proto, loop.time() - 1000)] + conn._conns[key] = deque([(proto, loop.time() - 1000)]) assert await conn._get(key, []) is None assert not conn._conns await conn.close() @@ -361,7 +362,7 @@ async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: proto = mock.Mock() transport = proto.transport - conn._conns[key] = [(proto, loop.time() - 1000)] + conn._conns[key] = deque([(proto, loop.time() - 1000)]) assert await conn._get(key, []) is None assert not conn._conns assert conn._cleanup_closed_transports == [transport] @@ -1411,12 +1412,12 @@ async def test_release_close_do_not_delete_existing_connections(key) -> None: proto1 = mock.Mock() conn = aiohttp.BaseConnector() - conn._conns[key] = [(proto1, 1)] + conn._conns[key] = deque([(proto1, 1)]) proto = mock.Mock(should_close=True) conn._acquired.add(proto) conn._release(key, proto) - assert conn._conns[key] == [(proto1, 1)] + assert conn._conns[key] == deque([(proto1, 1)]) assert proto.close.called await conn.close() @@ -1451,7 +1452,7 @@ async def test_connect(loop, key) -> None: req = ClientRequest("GET", URL("http://localhost:80"), loop=loop) conn = aiohttp.BaseConnector(loop=loop) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -1687,12 +1688,15 @@ async def test_ctor_cleanup() -> None: assert conn._cleanup_closed_handle is not None -async def test_cleanup(key) -> None: - testset = { - key: [(mock.Mock(), 10), (mock.Mock(), 300)], - } - testset[key][0][0].is_connected.return_value = True - testset[key][1][0].is_connected.return_value = False +async def test_cleanup(key: ConnectionKey) -> None: + m1 = mock.Mock() + m2 = mock.Mock() + m1.is_connected.return_value = True + m2.is_connected.return_value = False + testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + defaultdict(deque) + ) + testset[key] = deque([(m1, 10), (m2, 300)]) loop = mock.Mock() loop.time.return_value = 300 @@ -1710,7 +1714,10 @@ async def test_cleanup(key) -> None: async def test_cleanup_close_ssl_transport(ssl_key) -> None: proto = mock.Mock() transport = proto.transport - testset = {ssl_key: [(proto, 10)]} + testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + defaultdict(deque) + ) + testset[ssl_key] = deque([(proto, 10)]) loop = mock.Mock() new_time = asyncio.get_event_loop().time() + 300 @@ -1727,9 +1734,13 @@ async def test_cleanup_close_ssl_transport(ssl_key) -> None: assert conn._cleanup_closed_transports == [transport] -async def test_cleanup2() -> None: - testset = {1: [(mock.Mock(), 300)]} - testset[1][0][0].is_connected.return_value = True +async def test_cleanup2(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: + m = create_mocked_conn() + m.is_connected.return_value = True + testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + defaultdict(deque) + ) + testset[key] = deque([(m, 300)]) conn = aiohttp.BaseConnector(keepalive_timeout=10) conn._loop = mock.Mock() @@ -1744,9 +1755,13 @@ async def test_cleanup2() -> None: await conn.close() -async def test_cleanup3(key) -> None: - testset = {key: [(mock.Mock(), 290.1), (mock.Mock(), 305.1)]} - testset[key][0][0].is_connected.return_value = True +async def test_cleanup3(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: + m = create_mocked_conn(loop) + m.is_connected.return_value = True + testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + defaultdict(deque) + ) + testset[key] = deque([(m, 290.1), (create_mocked_conn(loop), 305.1)]) loop = mock.Mock() loop.time.return_value = 308.5 @@ -1757,7 +1772,7 @@ async def test_cleanup3(key) -> None: with mock.patch("aiohttp.connector.monotonic", return_value=308.5): conn._cleanup() - assert conn._conns == {key: [testset[key][1]]} + assert conn._conns == {key: deque([testset[key][1]])} assert conn._cleanup_handle is not None loop.call_at.assert_called_with(319, mock.ANY, mock.ANY) @@ -1927,7 +1942,7 @@ async def test_close_twice(loop) -> None: proto = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) - conn._conns[1] = [(proto, object())] + conn._conns[1] = deque([(proto, object())]) await conn.close() assert not conn._conns @@ -2324,7 +2339,7 @@ async def test_connect_with_limit( ) conn = aiohttp.BaseConnector(loop=loop, limit=1) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -2380,7 +2395,7 @@ async def test_connect_queued_operation_tracing(loop, key) -> None: ) conn = aiohttp.BaseConnector(loop=loop, limit=1) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -2424,7 +2439,7 @@ async def test_connect_reuseconn_tracing(loop, key) -> None: ) conn = aiohttp.BaseConnector(loop=loop, limit=1) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn2 = await conn.connect(req, traces, ClientTimeout()) conn2.release() @@ -2441,7 +2456,7 @@ async def test_connect_with_limit_and_limit_per_host(loop, key) -> None: req = ClientRequest("GET", URL("http://localhost:80"), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1000, limit_per_host=1) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -2475,7 +2490,7 @@ async def test_connect_with_no_limit_and_limit_per_host(loop, key) -> None: req = ClientRequest("GET", URL("http://localhost1:80"), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=1) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -2507,7 +2522,7 @@ async def test_connect_with_no_limits(loop, key) -> None: req = ClientRequest("GET", URL("http://localhost:80"), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=0) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -2541,7 +2556,7 @@ async def test_connect_with_limit_cancelled(loop) -> None: conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ("host", 80, False) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -2687,7 +2702,7 @@ async def test_close_with_acquired_connection(loop) -> None: conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ("host", 80, False) - conn._conns[key] = [(proto, loop.time())] + conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) @@ -3291,7 +3306,7 @@ async def allow_connection_and_add_dummy_waiter() -> None: spec_set=True, side_effect=[0, 1, 1, 1], ): - connector._conns[key] = [(proto, loop.time())] + connector._conns[key] = deque([(proto, loop.time())]) with mock.patch.object( connector, "_create_connection", From 49f65e6ccbacc39127072bc0fa35c2efc965096e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 5 Nov 2024 16:21:39 -0600 Subject: [PATCH 0835/1511] Release 3.11.0b3 (#9684) --- CHANGES.rst | 46 ++++++++++++++++++++++++++++++++++++++++++++- aiohttp/__init__.py | 2 +- 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 01e67e6c317..2763c4b086a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0b2 (2024-11-03) +3.11.0b3 (2024-11-05) ===================== Bug fixes @@ -24,6 +24,14 @@ Bug fixes +- Modified websocket :meth:`aiohttp.ClientWebSocketResponse.receive_str`, :py:meth:`aiohttp.ClientWebSocketResponse.receive_bytes`, :py:meth:`aiohttp.web.WebSocketResponse.receive_str` & :py:meth:`aiohttp.web.WebSocketResponse.receive_bytes` methods to raise new :py:exc:`aiohttp.WSMessageTypeError` exception, instead of generic :py:exc:`TypeError`, when websocket messages of incorrect types are received -- by :user:`ara-25`. + + + *Related issues and pull requests on GitHub:* + :issue:`6800`. + + + - Made ``TestClient.app`` a ``Generic`` so type checkers will know the correct type (avoiding unneeded ``client.app is not None`` checks) -- by :user:`Dreamsorcerer`. @@ -40,6 +48,26 @@ Bug fixes +- Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. + + The connector was not cancellation-safe. + + + *Related issues and pull requests on GitHub:* + :issue:`9670`, :issue:`9671`. + + + +- Fixed the keep-alive connection pool to be FIFO instead of LIFO -- by :user:`bdraco`. + + Keep-alive connections are more likely to be reused before they disconnect. + + + *Related issues and pull requests on GitHub:* + :issue:`9672`. + + + Features -------- @@ -309,6 +337,22 @@ Miscellaneous internal changes +- Improved performance of the internal ``DataQueue`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9659`. + + + +- Improved performance of calling ``receive`` for WebSockets for the most common message types -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9679`. + + + ---- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index a26afd4f8c6..f7b55d52ca6 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0b2" +__version__ = "3.11.0b3" from typing import TYPE_CHECKING, Tuple From 74d9355b6cf9581361668ffd2d38a01b53b58297 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 6 Nov 2024 14:55:17 -0600 Subject: [PATCH 0836/1511] [PR #9508/274c54e backport][3.11] Discard non-close messages in close timeout window (#9688) Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: lenard-mosys <lenard@mo-sys.com> --- CHANGES/9506.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/web_ws.py | 14 ++--- tests/test_web_websocket_functional.py | 73 +++++++++++++++++++++++++- 4 files changed, 79 insertions(+), 10 deletions(-) create mode 100644 CHANGES/9506.bugfix.rst diff --git a/CHANGES/9506.bugfix.rst b/CHANGES/9506.bugfix.rst new file mode 100644 index 00000000000..05ad6a59375 --- /dev/null +++ b/CHANGES/9506.bugfix.rst @@ -0,0 +1 @@ +Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 60ff22d14d2..6adb3b97fb1 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -219,6 +219,7 @@ Lubomir Gelo Ludovic Gasc Luis Pedrosa Lukasz Marcin Dobrzanski +Lénárd Szolnoki Makc Belousow Manuel Miranda Marat Sharafutdinov diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 932cd0db0b0..bf93e1885e3 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -473,7 +473,11 @@ async def close( try: async with async_timeout.timeout(self._timeout): - msg = await reader.read() + while True: + msg = await reader.read() + if msg.type is WSMsgType.CLOSE: + self._set_code_close_transport(msg.data) + return True except asyncio.CancelledError: self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) raise @@ -482,14 +486,6 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True - if msg.type is WSMsgType.CLOSE: - self._set_code_close_transport(msg.data) - return True - - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() - return True - def _set_closing(self, code: WSCloseCode) -> None: """Set the close code and mark the connection as closing.""" self._closing = True diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 188694c8dce..ebd94607f24 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1192,7 +1192,11 @@ async def test_abnormal_closure_when_server_does_not_receive( """Test abnormal closure when the server closes and a message is pending.""" async def handler(request: web.Request) -> web.WebSocketResponse: - ws = web.WebSocketResponse() + # Setting close timeout to 0, otherwise the server waits for a + # close response for 10 seconds by default. + # This would make the client's autoclose in resp.receive() to succeed, + # closing the connection cleanly from both sides. + ws = web.WebSocketResponse(timeout=0) await ws.prepare(request) await ws.close() return ws @@ -1206,3 +1210,70 @@ async def handler(request: web.Request) -> web.WebSocketResponse: msg = await resp.receive() assert msg.type is aiohttp.WSMsgType.CLOSE assert resp.close_code == WSCloseCode.ABNORMAL_CLOSURE + + +async def test_abnormal_closure_when_client_does_not_close( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and the client doesn't respond.""" + close_code: Optional[WSCloseCode] = None + + async def handler(request: web.Request) -> web.WebSocketResponse: + # Setting a short close timeout + ws = web.WebSocketResponse(timeout=0.1) + await ws.prepare(request) + await ws.close() + + nonlocal close_code + assert ws.close_code is not None + close_code = WSCloseCode(ws.close_code) + + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + async with client.ws_connect("/", autoclose=False): + await asyncio.sleep(0.2) + await client.server.close() + assert close_code == WSCloseCode.ABNORMAL_CLOSURE + + +async def test_normal_closure_while_client_sends_msg( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and the client doesn't respond.""" + close_code: Optional[WSCloseCode] = None + got_close_code = asyncio.Event() + + async def handler(request: web.Request) -> web.WebSocketResponse: + # Setting a short close timeout + ws = web.WebSocketResponse(timeout=0.2) + await ws.prepare(request) + await ws.close() + + nonlocal close_code + assert ws.close_code is not None + close_code = WSCloseCode(ws.close_code) + got_close_code.set() + + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + async with client.ws_connect("/", autoclose=False) as ws: + # send text and close message during server close timeout + await asyncio.sleep(0.1) + await ws.send_str("Hello") + await ws.close() + # wait for close code to be received by server + await asyncio.wait( + [ + asyncio.create_task(asyncio.sleep(0.5)), + asyncio.create_task(got_close_code.wait()), + ], + return_when=asyncio.FIRST_COMPLETED, + ) + await client.server.close() + assert close_code == WSCloseCode.OK From b1dda8709de19b94e745dc780f41e4c5e5122d91 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 6 Nov 2024 14:55:27 -0600 Subject: [PATCH 0837/1511] [PR #9508/274c54e backport][3.10] Discard non-close messages in close timeout window (#9687) Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: lenard-mosys <lenard@mo-sys.com> --- CHANGES/9506.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/web_ws.py | 14 ++--- tests/test_web_websocket_functional.py | 73 +++++++++++++++++++++++++- 4 files changed, 79 insertions(+), 10 deletions(-) create mode 100644 CHANGES/9506.bugfix.rst diff --git a/CHANGES/9506.bugfix.rst b/CHANGES/9506.bugfix.rst new file mode 100644 index 00000000000..05ad6a59375 --- /dev/null +++ b/CHANGES/9506.bugfix.rst @@ -0,0 +1 @@ +Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 3fb6686c322..9c0978c79a2 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -216,6 +216,7 @@ Lubomir Gelo Ludovic Gasc Luis Pedrosa Lukasz Marcin Dobrzanski +Lénárd Szolnoki Makc Belousow Manuel Miranda Marat Sharafutdinov diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index d6fab5f0d16..0e8adef6f8d 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -453,7 +453,11 @@ async def close( try: async with async_timeout.timeout(self._timeout): - msg = await reader.read() + while True: + msg = await reader.read() + if msg.type is WSMsgType.CLOSE: + self._set_code_close_transport(msg.data) + return True except asyncio.CancelledError: self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) raise @@ -462,14 +466,6 @@ async def close( self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) return True - if msg.type is WSMsgType.CLOSE: - self._set_code_close_transport(msg.data) - return True - - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() - return True - def _set_closing(self, code: WSCloseCode) -> None: """Set the close code and mark the connection as closing.""" self._closing = True diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 42faff8e517..64604c076c8 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1147,7 +1147,11 @@ async def test_abnormal_closure_when_server_does_not_receive( """Test abnormal closure when the server closes and a message is pending.""" async def handler(request: web.Request) -> web.WebSocketResponse: - ws = web.WebSocketResponse() + # Setting close timeout to 0, otherwise the server waits for a + # close response for 10 seconds by default. + # This would make the client's autoclose in resp.receive() to succeed, + # closing the connection cleanly from both sides. + ws = web.WebSocketResponse(timeout=0) await ws.prepare(request) await ws.close() return ws @@ -1161,3 +1165,70 @@ async def handler(request: web.Request) -> web.WebSocketResponse: msg = await resp.receive() assert msg.type is aiohttp.WSMsgType.CLOSE assert resp.close_code == WSCloseCode.ABNORMAL_CLOSURE + + +async def test_abnormal_closure_when_client_does_not_close( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and the client doesn't respond.""" + close_code: Optional[WSCloseCode] = None + + async def handler(request: web.Request) -> web.WebSocketResponse: + # Setting a short close timeout + ws = web.WebSocketResponse(timeout=0.1) + await ws.prepare(request) + await ws.close() + + nonlocal close_code + assert ws.close_code is not None + close_code = WSCloseCode(ws.close_code) + + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + async with client.ws_connect("/", autoclose=False): + await asyncio.sleep(0.2) + await client.server.close() + assert close_code == WSCloseCode.ABNORMAL_CLOSURE + + +async def test_normal_closure_while_client_sends_msg( + aiohttp_client: AiohttpClient, +) -> None: + """Test abnormal closure when the server closes and the client doesn't respond.""" + close_code: Optional[WSCloseCode] = None + got_close_code = asyncio.Event() + + async def handler(request: web.Request) -> web.WebSocketResponse: + # Setting a short close timeout + ws = web.WebSocketResponse(timeout=0.2) + await ws.prepare(request) + await ws.close() + + nonlocal close_code + assert ws.close_code is not None + close_code = WSCloseCode(ws.close_code) + got_close_code.set() + + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + async with client.ws_connect("/", autoclose=False) as ws: + # send text and close message during server close timeout + await asyncio.sleep(0.1) + await ws.send_str("Hello") + await ws.close() + # wait for close code to be received by server + await asyncio.wait( + [ + asyncio.create_task(asyncio.sleep(0.5)), + asyncio.create_task(got_close_code.wait()), + ], + return_when=asyncio.FIRST_COMPLETED, + ) + await client.server.close() + assert close_code == WSCloseCode.OK From 686b49e98d49bee3ee49627443a7ca4b527e6fbf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 03:56:29 +0000 Subject: [PATCH 0838/1511] [PR #9689/753460da backport][3.10] Add benchmark for sending client requests (#9690) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_request.py | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index cf03dffdfdc..3f132d04d14 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -2,6 +2,7 @@ import asyncio from http.cookies import Morsel +from typing import Union from pytest_codspeed import BenchmarkFixture # type: ignore[import-untyped] from yarl import URL @@ -65,3 +66,46 @@ def _run() -> None: chunked=None, expect100=False, ) + + +def test_send_client_request_one_hundred( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + url = URL("http://python.org") + req = ClientRequest("get", url, loop=loop) + + class MockTransport(asyncio.Transport): + """Mock transport for testing that do no real I/O.""" + + def is_closing(self) -> bool: + """Swallow is_closing.""" + return False + + def write(self, data: Union[bytes, bytearray, memoryview]) -> None: + """Swallow writes.""" + + class MockProtocol(asyncio.BaseProtocol): + + def __init__(self) -> None: + self.transport = MockTransport() + + async def _drain_helper(self) -> None: + """Swallow drain.""" + + def start_timeout(self) -> None: + """Swallow start_timeout.""" + + class MockConnection: + def __init__(self) -> None: + self.transport = None + self.protocol = MockProtocol() + + conn = MockConnection() + + async def send_requests() -> None: + for _ in range(100): + await req.send(conn) # type: ignore[arg-type] + + @benchmark + def _run() -> None: + loop.run_until_complete(send_requests()) From f3a5ea20908854373811724f530e456dd1c7829b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 04:04:38 +0000 Subject: [PATCH 0839/1511] [PR #9689/753460da backport][3.11] Add benchmark for sending client requests (#9691) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_request.py | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index de019149076..17d4a6cb8c4 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -2,6 +2,7 @@ import asyncio from http.cookies import Morsel +from typing import Union from pytest_codspeed import BenchmarkFixture from yarl import URL @@ -65,3 +66,46 @@ def _run() -> None: chunked=None, expect100=False, ) + + +def test_send_client_request_one_hundred( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + url = URL("http://python.org") + req = ClientRequest("get", url, loop=loop) + + class MockTransport(asyncio.Transport): + """Mock transport for testing that do no real I/O.""" + + def is_closing(self) -> bool: + """Swallow is_closing.""" + return False + + def write(self, data: Union[bytes, bytearray, memoryview]) -> None: + """Swallow writes.""" + + class MockProtocol(asyncio.BaseProtocol): + + def __init__(self) -> None: + self.transport = MockTransport() + + async def _drain_helper(self) -> None: + """Swallow drain.""" + + def start_timeout(self) -> None: + """Swallow start_timeout.""" + + class MockConnection: + def __init__(self) -> None: + self.transport = None + self.protocol = MockProtocol() + + conn = MockConnection() + + async def send_requests() -> None: + for _ in range(100): + await req.send(conn) # type: ignore[arg-type] + + @benchmark + def _run() -> None: + loop.run_until_complete(send_requests()) From eb7811c7c05f7ddf7cc647414c60aa7157b2e480 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov <andrew.svetlov@gmail.com> Date: Thu, 7 Nov 2024 16:32:45 +0100 Subject: [PATCH 0840/1511] Relax async-timeout version to >=4.0, <6.0 (#9695) (#9696) `async-timeout` 5.0+ is backward-compatible with 4.0; so it works with aiohttp well. While `aiohttp` itself uses the standard `asyncio.Timeout` on Python 3.11+, dependent project could require the latest async-timeout version on Python 3.9-3.10. --- requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 9a199453d55..2c1249c5717 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -3,7 +3,7 @@ aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 -async-timeout >= 4.0, < 5.0 ; python_version < "3.11" +async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' diff --git a/setup.cfg b/setup.cfg index d998e736b45..9b697780a88 100644 --- a/setup.cfg +++ b/setup.cfg @@ -52,7 +52,7 @@ include_package_data = True install_requires = aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 - async-timeout >= 4.0, < 5.0 ; python_version < "3.11" + async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 From edd44a2d38e3776b118b8ff7af5efb3a56829017 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov <andrew.svetlov@gmail.com> Date: Thu, 7 Nov 2024 17:16:40 +0100 Subject: [PATCH 0841/1511] Relax async-timeout version to >=4.0, <6.0 (#9695) (#9697) --- requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 7d0f5ca3a62..50c6e41f9e4 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -3,7 +3,7 @@ aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 -async-timeout >= 4.0, < 5.0 ; python_version < "3.11" +async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' diff --git a/setup.cfg b/setup.cfg index 5f78750679f..f23e477035f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -53,7 +53,7 @@ include_package_data = True install_requires = aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 - async-timeout >= 4.0, < 5.0 ; python_version < "3.11" + async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 frozenlist >= 1.1.1 multidict >=4.5, < 7.0 From 685768a44dc6c1796757c6d1435831ae5cc4f6c3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 11:28:48 -0600 Subject: [PATCH 0842/1511] [PR #9698/422ba0c backport][3.11] Add flow control tests for WebSocket (#9701) Co-authored-by: pre-commit-ci[bot] --- tests/test_websocket_parser.py | 70 ++++++++++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 3 deletions(-) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index d034245af7c..990aafce723 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -18,6 +18,7 @@ websocket_mask, ) from aiohttp._websocket.models import WS_DEFLATE_TRAILING +from aiohttp.base_protocol import BaseProtocol from aiohttp.http import WebSocketError, WSCloseCode, WSMessage, WSMsgType from aiohttp.http_websocket import WebSocketReader @@ -83,12 +84,36 @@ def build_close_frame(code=1000, message=b"", noheader=False): @pytest.fixture() -def out(loop): - return aiohttp.DataQueue(loop) +def protocol(loop: asyncio.AbstractEventLoop) -> BaseProtocol: + transport = mock.Mock(spec_set=asyncio.Transport) + protocol = BaseProtocol(loop) + protocol.connection_made(transport) + return protocol @pytest.fixture() -def parser(out: aiohttp.DataQueue[WSMessage]) -> PatchableWebSocketReader: +def out( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> aiohttp.FlowControlDataQueue[WSMessage]: + return aiohttp.FlowControlDataQueue(protocol, 2**16, loop=loop) + + +@pytest.fixture() +def out_low_limit( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> aiohttp.FlowControlDataQueue[WSMessage]: + return aiohttp.FlowControlDataQueue(protocol, 16, loop=loop) + + +@pytest.fixture() +def parser_low_limit( + out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], +) -> PatchableWebSocketReader: + return PatchableWebSocketReader(out_low_limit, 4 * 1024 * 1024) + + +@pytest.fixture() +def parser(out: aiohttp.FlowControlDataQueue[WSMessage]) -> PatchableWebSocketReader: return PatchableWebSocketReader(out, 4 * 1024 * 1024) @@ -569,3 +594,42 @@ def test_pickle(self) -> None: assert err2.code == WSCloseCode.PROTOCOL_ERROR assert str(err2) == "Something invalid" assert err2.foo == "bar" + + +def test_flow_control_binary( + protocol: BaseProtocol, + out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + parser_low_limit: WebSocketReader, +) -> None: + large_payload = b"b" * (1 + 16 * 2) + large_payload_len = len(large_payload) + with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: + m.return_value = [(1, WSMsgType.BINARY, large_payload, False)] + + parser_low_limit.feed_data(b"") + + res = out_low_limit._buffer[0] + assert res == (WSMessage(WSMsgType.BINARY, large_payload, ""), large_payload_len) + assert protocol._reading_paused is True + + +@pytest.mark.xfail( + reason="Flow control is currently broken on master branch; see #9686" +) +def test_flow_control_multi_byte_text( + protocol: BaseProtocol, + out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + parser_low_limit: WebSocketReader, +) -> None: + large_payload_text = "𒀁" * (1 + 16 * 2) + large_payload = large_payload_text.encode("utf-8") + large_payload_len = len(large_payload) + + with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: + m.return_value = [(1, WSMsgType.TEXT, large_payload, False)] + + parser_low_limit.feed_data(b"") + + res = out_low_limit._buffer[0] + assert res == (WSMessage(WSMsgType.TEXT, large_payload_text, ""), large_payload_len) + assert protocol._reading_paused is True From 99d09b55d0fd08756fd5a6070478641f3d06d17c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 11:36:25 -0600 Subject: [PATCH 0843/1511] [PR #9698/422ba0c backport][3.10] Add flow control tests for WebSocket (#9700) Co-authored-by: pre-commit-ci[bot] --- tests/test_websocket_parser.py | 69 +++++++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 2 deletions(-) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 3bdd8108e35..01ee78ef02b 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -1,3 +1,4 @@ +import asyncio import pickle import random import struct @@ -8,6 +9,7 @@ import aiohttp from aiohttp import http_websocket +from aiohttp.base_protocol import BaseProtocol from aiohttp.http import WebSocketError, WSCloseCode, WSMessage, WSMsgType from aiohttp.http_websocket import ( _WS_DEFLATE_TRAILING, @@ -77,8 +79,32 @@ def build_close_frame(code=1000, message=b"", noheader=False): @pytest.fixture() -def out(loop): - return aiohttp.DataQueue(loop) +def protocol(loop: asyncio.AbstractEventLoop) -> BaseProtocol: + transport = mock.Mock(spec_set=asyncio.Transport) + protocol = BaseProtocol(loop) + protocol.connection_made(transport) + return protocol + + +@pytest.fixture() +def out( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> aiohttp.FlowControlDataQueue[WSMessage]: + return aiohttp.FlowControlDataQueue(protocol, 2**16, loop=loop) + + +@pytest.fixture() +def out_low_limit( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> aiohttp.FlowControlDataQueue[WSMessage]: + return aiohttp.FlowControlDataQueue(protocol, 16, loop=loop) + + +@pytest.fixture() +def parser_low_limit( + out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], +) -> WebSocketReader: + return WebSocketReader(out_low_limit, 4 * 1024 * 1024) @pytest.fixture() @@ -513,3 +539,42 @@ def test_pickle(self) -> None: assert err2.code == WSCloseCode.PROTOCOL_ERROR assert str(err2) == "Something invalid" assert err2.foo == "bar" + + +def test_flow_control_binary( + protocol: BaseProtocol, + out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + parser_low_limit: WebSocketReader, +) -> None: + large_payload = b"b" * (1 + 16 * 2) + large_payload_len = len(large_payload) + with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: + m.return_value = [(1, WSMsgType.BINARY, large_payload, False)] + + parser_low_limit.feed_data(b"") + + res = out_low_limit._buffer[0] + assert res == (WSMessage(WSMsgType.BINARY, large_payload, ""), large_payload_len) + assert protocol._reading_paused is True + + +@pytest.mark.xfail( + reason="Flow control is currently broken on master branch; see #9686" +) +def test_flow_control_multi_byte_text( + protocol: BaseProtocol, + out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + parser_low_limit: WebSocketReader, +) -> None: + large_payload_text = "𒀁" * (1 + 16 * 2) + large_payload = large_payload_text.encode("utf-8") + large_payload_len = len(large_payload) + + with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: + m.return_value = [(1, WSMsgType.TEXT, large_payload, False)] + + parser_low_limit.feed_data(b"") + + res = out_low_limit._buffer[0] + assert res == (WSMessage(WSMsgType.TEXT, large_payload_text, ""), large_payload_len) + assert protocol._reading_paused is True From b9af73d105bf93cac3c0264a47bf0c30f4bbdce4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:35:14 +0000 Subject: [PATCH 0844/1511] [PR #9693/dd0b6e37 backport][3.10] Avoid duplicate `content_length` fetch to prepare web response headers (#9702) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 2036a8d088b..57bce1b06ad 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -466,7 +466,7 @@ async def _prepare_headers(self) -> None: headers[hdrs.TRANSFER_ENCODING] = "chunked" if hdrs.CONTENT_LENGTH in headers: del headers[hdrs.CONTENT_LENGTH] - elif self._length_check: + elif self._length_check: # Disabled for WebSockets writer.length = self.content_length if writer.length is None: if version >= HttpVersion11: @@ -487,7 +487,7 @@ async def _prepare_headers(self) -> None: # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 if hdrs.TRANSFER_ENCODING in headers: del headers[hdrs.TRANSFER_ENCODING] - elif self.content_length != 0: + elif (writer.length if self._length_check else self.content_length) != 0: # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) From d4c09143937dfe92f2762976a8f0b912d7edeb52 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:43:26 +0000 Subject: [PATCH 0845/1511] [PR #9693/dd0b6e37 backport][3.11] Avoid duplicate `content_length` fetch to prepare web response headers (#9703) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index fda2137be67..229adf4e4d7 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -483,7 +483,7 @@ async def _prepare_headers(self) -> None: headers[hdrs.TRANSFER_ENCODING] = "chunked" if hdrs.CONTENT_LENGTH in headers: del headers[hdrs.CONTENT_LENGTH] - elif self._length_check: + elif self._length_check: # Disabled for WebSockets writer.length = self.content_length if writer.length is None: if version >= HttpVersion11: @@ -504,7 +504,7 @@ async def _prepare_headers(self) -> None: # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 if hdrs.TRANSFER_ENCODING in headers: del headers[hdrs.TRANSFER_ENCODING] - elif self.content_length != 0: + elif (writer.length if self._length_check else self.content_length) != 0: # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) From ea1c084d9f49e91feb2e92fea307260ad3f47376 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 13:18:07 -0600 Subject: [PATCH 0846/1511] [3.11] Fix WebSocket reader flow control size calculation for multi-byte data (#9686) --- CHANGES/9686.bugfix.rst | 1 + aiohttp/_websocket/reader_py.py | 10 +++++++--- docs/spelling_wordlist.txt | 1 + tests/test_websocket_parser.py | 3 --- 4 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 CHANGES/9686.bugfix.rst diff --git a/CHANGES/9686.bugfix.rst b/CHANGES/9686.bugfix.rst new file mode 100644 index 00000000000..397fb75ba77 --- /dev/null +++ b/CHANGES/9686.bugfix.rst @@ -0,0 +1 @@ +Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 0910a340629..a3993220b70 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -4,7 +4,7 @@ from ..compression_utils import ZLibDecompressor from ..helpers import set_exception -from ..streams import DataQueue +from ..streams import FlowControlDataQueue from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask from .models import ( WS_DEFLATE_TRAILING, @@ -42,7 +42,10 @@ class WebSocketReader: def __init__( - self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + self, + queue: FlowControlDataQueue[WSMessage], + max_msg_size: int, + compress: bool = True, ) -> None: self.queue = queue self._queue_feed_data = queue.feed_data @@ -185,7 +188,8 @@ def _feed_data(self, data: bytes) -> None: # This is not type safe, but many tests should fail in # test_client_ws_functional.py if this is wrong. self._queue_feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), len(text) + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), + len(payload_merged), ) else: self._queue_feed_data( diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c135ebd2084..a1f3d944584 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -330,6 +330,7 @@ UI un unawaited unclosed +undercounting unhandled unicode unittest diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 990aafce723..5151d87b210 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -613,9 +613,6 @@ def test_flow_control_binary( assert protocol._reading_paused is True -@pytest.mark.xfail( - reason="Flow control is currently broken on master branch; see #9686" -) def test_flow_control_multi_byte_text( protocol: BaseProtocol, out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], From a2c75b2427969c4b24cec9e8175d4a09a00b59c4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 20:10:09 +0000 Subject: [PATCH 0847/1511] [PR #9704/26d96107 backport][3.10] Simplify keep-alive logic for HTTP/1.0 in ClientRequest (#9706) --- aiohttp/client_reqrep.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 0c29e0d4594..978fd150544 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -645,10 +645,8 @@ def keep_alive(self) -> bool: # keep alive not supported at all return False if self.version == HttpVersion10: - if self.headers.get(hdrs.CONNECTION) == "keep-alive": - return True - else: # no headers means we close for Http 1.0 - return False + # no headers means we close for Http 1.0 + return self.headers.get(hdrs.CONNECTION) == "keep-alive" elif self.headers.get(hdrs.CONNECTION) == "close": return False From 0babc346a2788d05c59d14c54e6cde417dced90a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 20:17:17 +0000 Subject: [PATCH 0848/1511] [PR #9704/26d96107 backport][3.11] Simplify keep-alive logic for HTTP/1.0 in ClientRequest (#9707) --- aiohttp/client_reqrep.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index fbccfa48eb0..5aa08b81a0d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -625,10 +625,8 @@ def keep_alive(self) -> bool: # keep alive not supported at all return False if self.version == HttpVersion10: - if self.headers.get(hdrs.CONNECTION) == "keep-alive": - return True - else: # no headers means we close for Http 1.0 - return False + # no headers means we close for Http 1.0 + return self.headers.get(hdrs.CONNECTION) == "keep-alive" elif self.headers.get(hdrs.CONNECTION) == "close": return False From 210d36fc7ec4c0ddefc2cefdc4c8262152a0000e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 14:32:44 -0600 Subject: [PATCH 0849/1511] [PR #9686/ea1c084 backport][3.10] Fix WebSocket reader flow control size calculation for multi-byte data (#9709) --- CHANGES/9686.bugfix.rst | 1 + aiohttp/http_websocket.py | 2 +- docs/spelling_wordlist.txt | 1 + tests/test_websocket_parser.py | 3 --- 4 files changed, 3 insertions(+), 4 deletions(-) create mode 100644 CHANGES/9686.bugfix.rst diff --git a/CHANGES/9686.bugfix.rst b/CHANGES/9686.bugfix.rst new file mode 100644 index 00000000000..397fb75ba77 --- /dev/null +++ b/CHANGES/9686.bugfix.rst @@ -0,0 +1 @@ +Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index c9a6d49176e..0916530dbe8 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -417,7 +417,7 @@ def _feed_data(self, data: bytes) -> None: # tuple.__new__ is used to avoid the overhead of the lambda msg = tuple.__new__(WSMessage, (WSMsgType.TEXT, text, "")) - self.queue.feed_data(msg, len(text)) + self.queue.feed_data(msg, len(payload_merged)) continue # tuple.__new__ is used to avoid the overhead of the lambda diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c135ebd2084..a1f3d944584 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -330,6 +330,7 @@ UI un unawaited unclosed +undercounting unhandled unicode unittest diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 01ee78ef02b..91f09554633 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -558,9 +558,6 @@ def test_flow_control_binary( assert protocol._reading_paused is True -@pytest.mark.xfail( - reason="Flow control is currently broken on master branch; see #9686" -) def test_flow_control_multi_byte_text( protocol: BaseProtocol, out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], From 76f7722d5df35cf9e449f7175eed62994c70ef78 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 14:44:01 -0600 Subject: [PATCH 0850/1511] [PR #9692/e85db24 backport][3.11] Change `RequestInfo` to be a `NamedTuple` to improve performances (#9708) --- CHANGES/9692.breaking.rst | 1 + aiohttp/client_reqrep.py | 15 +++++++-------- docs/client_reference.rst | 4 ++-- tests/test_client_request.py | 2 +- tests/test_client_response.py | 10 +++++----- 5 files changed, 16 insertions(+), 16 deletions(-) create mode 100644 CHANGES/9692.breaking.rst diff --git a/CHANGES/9692.breaking.rst b/CHANGES/9692.breaking.rst new file mode 100644 index 00000000000..e0fdae11416 --- /dev/null +++ b/CHANGES/9692.breaking.rst @@ -0,0 +1 @@ +Changed ``ClientRequest.request_info`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 5aa08b81a0d..7a78aa86b49 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -105,16 +105,11 @@ class ContentDisposition: filename: Optional[str] -@attr.s(auto_attribs=True, frozen=True, slots=True) -class RequestInfo: +class RequestInfo(NamedTuple): url: URL method: str headers: "CIMultiDictProxy[str]" - real_url: URL = attr.ib() - - @real_url.default - def real_url_default(self) -> URL: - return self.url + real_url: URL class Fingerprint: @@ -401,7 +396,11 @@ def port(self) -> Optional[int]: @property def request_info(self) -> RequestInfo: headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) - return RequestInfo(self.url, self.method, headers, self.original_url) + # These are created on every request, so we use a NamedTuple + # for performance reasons. + return tuple.__new__( + RequestInfo, (self.url, self.method, headers, self.original_url) + ) def update_host(self, url: URL) -> None: """Update destination host, port and connection type (ssl).""" diff --git a/docs/client_reference.rst b/docs/client_reference.rst index a7484a57c5a..caa38666405 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1483,7 +1483,7 @@ Response object .. attribute:: request_info - A namedtuple with request URL and headers from :class:`~aiohttp.ClientRequest` + A :class:`typing.NamedTuple` with request URL and headers from :class:`~aiohttp.ClientRequest` object, :class:`aiohttp.RequestInfo` instance. .. method:: get_encoding() @@ -1842,7 +1842,7 @@ Utilities .. class:: RequestInfo() - A data class with request URL and headers from :class:`~aiohttp.ClientRequest` + A :class:`typing.NamedTuple` with request URL and headers from :class:`~aiohttp.ClientRequest` object, available as :attr:`ClientResponse.request_info` attribute. .. attribute:: url diff --git a/tests/test_client_request.py b/tests/test_client_request.py index d6e8b823bb6..9d4b3c46aa4 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -121,7 +121,7 @@ def test_version_default(make_request) -> None: def test_request_info(make_request) -> None: req = make_request("get", "http://python.org/") assert req.request_info == aiohttp.RequestInfo( - URL("http://python.org/"), "GET", req.headers + URL("http://python.org/"), "GET", req.headers, URL("http://python.org/") ) diff --git a/tests/test_client_response.py b/tests/test_client_response.py index ede3950a755..be25a87e425 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -987,7 +987,7 @@ def test_response_request_info() -> None: response = ClientResponse( "get", URL(url), - request_info=RequestInfo(url, "get", headers), + request_info=RequestInfo(url, "get", headers, url), writer=WriterMock(), continue100=None, timer=TimerNoop(), @@ -1006,7 +1006,7 @@ def test_request_info_in_exception() -> None: response = ClientResponse( "get", URL(url), - request_info=RequestInfo(url, "get", headers), + request_info=RequestInfo(url, "get", headers, url), writer=WriterMock(), continue100=None, timer=TimerNoop(), @@ -1027,7 +1027,7 @@ def test_no_redirect_history_in_exception() -> None: response = ClientResponse( "get", URL(url), - request_info=RequestInfo(url, "get", headers), + request_info=RequestInfo(url, "get", headers, url), writer=WriterMock(), continue100=None, timer=TimerNoop(), @@ -1050,7 +1050,7 @@ def test_redirect_history_in_exception() -> None: response = ClientResponse( "get", URL(url), - request_info=RequestInfo(url, "get", headers), + request_info=RequestInfo(url, "get", headers, url), writer=WriterMock(), continue100=None, timer=TimerNoop(), @@ -1064,7 +1064,7 @@ def test_redirect_history_in_exception() -> None: hist_response = ClientResponse( "get", URL(hist_url), - request_info=RequestInfo(url, "get", headers), + request_info=RequestInfo(url, "get", headers, url), writer=WriterMock(), continue100=None, timer=TimerNoop(), From b1bd55bde57654e0eec9c27b8745ddb2e345b1ba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 15:55:56 -0600 Subject: [PATCH 0851/1511] [PR #9710/b40265e backport][3.101] Remove redundant `protocol.should_close` check in `Connection._release` (#9711) --- aiohttp/connector.py | 4 +--- tests/test_client_connection.py | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 773a949c46e..4665b952e0f 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -186,9 +186,7 @@ def release(self) -> None: self._notify_release() if self._protocol is not None: - self._connector._release( - self._key, self._protocol, should_close=self._protocol.should_close - ) + self._connector._release(self._key, self._protocol) self._protocol = None @property diff --git a/tests/test_client_connection.py b/tests/test_client_connection.py index 5a0739b6b0c..1be3f019b0c 100644 --- a/tests/test_client_connection.py +++ b/tests/test_client_connection.py @@ -112,7 +112,7 @@ def test_release(connector, key, protocol, loop) -> None: conn.release() assert not protocol.transport.close.called assert conn._protocol is None - connector._release.assert_called_with(key, protocol, should_close=False) + connector._release.assert_called_with(key, protocol) assert conn.closed @@ -123,7 +123,7 @@ def test_release_proto_should_close(connector, key, protocol, loop) -> None: conn.release() assert not protocol.transport.close.called assert conn._protocol is None - connector._release.assert_called_with(key, protocol, should_close=True) + connector._release.assert_called_with(key, protocol) assert conn.closed From d5bbb80195513268c604a908c31870db6b49310c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 16:03:31 -0600 Subject: [PATCH 0852/1511] [PR #9710/b40265e backport][3.11] Remove redundant `protocol.should_close` check in `Connection._release` (#9712) --- aiohttp/connector.py | 4 +--- tests/test_client_connection.py | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 03147cb3aaf..282d0a97802 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -186,9 +186,7 @@ def release(self) -> None: self._notify_release() if self._protocol is not None: - self._connector._release( - self._key, self._protocol, should_close=self._protocol.should_close - ) + self._connector._release(self._key, self._protocol) self._protocol = None @property diff --git a/tests/test_client_connection.py b/tests/test_client_connection.py index 5a0739b6b0c..1be3f019b0c 100644 --- a/tests/test_client_connection.py +++ b/tests/test_client_connection.py @@ -112,7 +112,7 @@ def test_release(connector, key, protocol, loop) -> None: conn.release() assert not protocol.transport.close.called assert conn._protocol is None - connector._release.assert_called_with(key, protocol, should_close=False) + connector._release.assert_called_with(key, protocol) assert conn.closed @@ -123,7 +123,7 @@ def test_release_proto_should_close(connector, key, protocol, loop) -> None: conn.release() assert not protocol.transport.close.called assert conn._protocol is None - connector._release.assert_called_with(key, protocol, should_close=True) + connector._release.assert_called_with(key, protocol) assert conn.closed From edc0c07340ac70762b9f6af8b16cce30678df4f1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 16:58:58 -0600 Subject: [PATCH 0853/1511] [PR #9713/500a021 backport][3.10] Small cleanups to creating a ClientRequest (#9716) --- aiohttp/client_reqrep.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 978fd150544..91605f0e83d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -296,19 +296,20 @@ def __init__( ): if loop is None: loop = asyncio.get_event_loop() - - match = _CONTAINS_CONTROL_CHAR_RE.search(method) - if match: + if match := _CONTAINS_CONTROL_CHAR_RE.search(method): raise ValueError( f"Method cannot contain non-token characters {method!r} " - "(found at least {match.group()!r})" + f"(found at least {match.group()!r})" ) - - assert isinstance(url, URL), url - assert isinstance(proxy, (URL, type(None))), proxy + # URL forbids subclasses, so a simple type check is enough. + assert type(url) is URL, url + if proxy is not None: + assert type(proxy) is URL, proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None - self._session = cast("ClientSession", session) + if TYPE_CHECKING: + assert session is not None + self._session = session if params: url = url.extend_query(params) self.original_url = url @@ -343,9 +344,7 @@ def __init__( if data is not None or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) - if traces is None: - traces = [] - self._traces = traces + self._traces = [] if traces is None else traces def __reset_writer(self, _: object = None) -> None: self.__writer = None From 5914dc59d5132c959df523c0df6bd188a8a875fa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 17:05:17 -0600 Subject: [PATCH 0854/1511] [PR #9713/500a021 backport][3.11] Small cleanups to creating a ClientRequest (#9717) --- aiohttp/client_reqrep.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 7a78aa86b49..c450ebdb2a4 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -24,7 +24,6 @@ Tuple, Type, Union, - cast, ) import attr @@ -291,19 +290,20 @@ def __init__( ): if loop is None: loop = asyncio.get_event_loop() - - match = _CONTAINS_CONTROL_CHAR_RE.search(method) - if match: + if match := _CONTAINS_CONTROL_CHAR_RE.search(method): raise ValueError( f"Method cannot contain non-token characters {method!r} " - "(found at least {match.group()!r})" + f"(found at least {match.group()!r})" ) - - assert isinstance(url, URL), url - assert isinstance(proxy, (URL, type(None))), proxy + # URL forbids subclasses, so a simple type check is enough. + assert type(url) is URL, url + if proxy is not None: + assert type(proxy) is URL, proxy # FIXME: session is None in tests only, need to fix tests # assert session is not None - self._session = cast("ClientSession", session) + if TYPE_CHECKING: + assert session is not None + self._session = session if params: url = url.extend_query(params) self.original_url = url @@ -338,9 +338,7 @@ def __init__( if data is not None or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) - if traces is None: - traces = [] - self._traces = traces + self._traces = [] if traces is None else traces def __reset_writer(self, _: object = None) -> None: self.__writer = None From e56a4fd45b813a8c41d09c88aaf3c73903935c71 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 17:11:09 -0600 Subject: [PATCH 0855/1511] [PR #3753/401c256 backport][3.11] Enforce URL for `cookie_jar.filter_cookies()` call (#9715) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- aiohttp/cookiejar.py | 10 +++++++++- tests/test_cookiejar.py | 7 +++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 3ffa4198fc7..ef04bda5ad6 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -9,6 +9,7 @@ import pickle import re import time +import warnings from collections import defaultdict from http.cookies import BaseCookie, Morsel, SimpleCookie from typing import ( @@ -309,7 +310,14 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": if not self._cookies: # Skip rest of function if no non-expired cookies. return filtered - request_url = URL(request_url) + if type(request_url) is not URL: + warnings.warn( + "filter_cookies expects yarl.URL instances only," + f"and will stop working in 4.x, got {type(request_url)}", + DeprecationWarning, + stacklevel=2, + ) + request_url = URL(request_url) hostname = request_url.raw_host or "" is_not_secure = request_url.scheme not in ("https", "wss") diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 248d0d419e3..bdcf54fa796 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -313,6 +313,13 @@ async def test_filter_cookies_with_domain_path_lookup_multilevelpath( assert c in expected_cookies +async def test_filter_cookies_str_deprecated(loop: asyncio.AbstractEventLoop) -> None: + jar = CookieJar() + jar.update_cookies(SimpleCookie("shared-cookie=first; Domain=example.com;")) + with pytest.warns(DeprecationWarning): + jar.filter_cookies("http://éé.com") + + async def test_domain_filter_ip_cookie_send() -> None: jar = CookieJar() cookies = SimpleCookie( From 740112b4e86adde43db86dcda49dc6591de0c5d5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 7 Nov 2024 17:35:05 -0600 Subject: [PATCH 0856/1511] Release 3.11.0b4 (#9718) --- CHANGES.rst | 26 +++++++++++++++++++++++++- aiohttp/__init__.py | 2 +- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 2763c4b086a..07051f60ad4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0b3 (2024-11-05) +3.11.0b4 (2024-11-07) ===================== Bug fixes @@ -48,6 +48,14 @@ Bug fixes +- Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. + + + *Related issues and pull requests on GitHub:* + :issue:`9506`. + + + - Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. The connector was not cancellation-safe. @@ -68,6 +76,14 @@ Bug fixes +- Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9686`. + + + Features -------- @@ -222,6 +238,14 @@ Removals and backward incompatible breaking changes +- Changed ``ClientRequest.request_info`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9692`. + + + Packaging updates and notes for downstreams ------------------------------------------- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f7b55d52ca6..e38894a14a6 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0b3" +__version__ = "3.11.0b4" from typing import TYPE_CHECKING, Tuple From fd09b483e59c28ca009221cf4794ea280e4ab7e4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 11:58:21 +0000 Subject: [PATCH 0857/1511] [PR #9722/fbf555c backport][3.11] Simplify header and method lookups (#9724) --- CHANGES/9722.misc.rst | 1 + aiohttp/client.py | 4 ++-- aiohttp/client_proto.py | 6 ++---- aiohttp/client_reqrep.py | 6 +----- aiohttp/hdrs.py | 13 +++++++++++++ aiohttp/helpers.py | 31 +++++++++++-------------------- aiohttp/http_parser.py | 8 ++++---- aiohttp/web_response.py | 2 +- tests/test_helpers.py | 6 +++--- 9 files changed, 38 insertions(+), 39 deletions(-) create mode 100644 CHANGES/9722.misc.rst diff --git a/CHANGES/9722.misc.rst b/CHANGES/9722.misc.rst new file mode 100644 index 00000000000..998db377fa1 --- /dev/null +++ b/CHANGES/9722.misc.rst @@ -0,0 +1 @@ +Replace internal helper methods ``method_must_be_empty_body`` and ``status_code_must_be_empty_body`` with simple `set` lookups -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index f93558db29d..eaca1711e9f 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -92,10 +92,10 @@ from .helpers import ( _SENTINEL, DEBUG, + EMPTY_BODY_METHODS, BasicAuth, TimeoutHandle, get_env_proxy_for_url, - method_must_be_empty_body, sentinel, strip_auth_from_url, ) @@ -706,7 +706,7 @@ async def _request( assert conn.protocol is not None conn.protocol.set_response_params( timer=timer, - skip_payload=method_must_be_empty_body(method), + skip_payload=method in EMPTY_BODY_METHODS, read_until_eof=read_until_eof, auto_decompress=auto_decompress, read_timeout=real_timeout.sock_read, diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index a3e29c01cc6..3050ceaa54c 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -11,9 +11,9 @@ ) from .helpers import ( _EXC_SENTINEL, + EMPTY_BODY_STATUS_CODES, BaseTimerContext, set_exception, - status_code_must_be_empty_body, ) from .http import HttpResponseParser, RawResponseMessage from .http_exceptions import HttpProcessingError @@ -284,9 +284,7 @@ def data_received(self, data: bytes) -> None: self._payload = payload - if self._skip_payload or status_code_must_be_empty_body( - message.code - ): + if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: self.feed_data((message, EMPTY_PAYLOAD), 0) else: self.feed_data((message, payload), 0) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index c450ebdb2a4..4a30c402266 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -3,7 +3,6 @@ import contextlib import functools import io -import itertools import re import sys import traceback @@ -239,9 +238,6 @@ class ClientRequest: } POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - _HOST_STRINGS = frozenset( - map("".join, itertools.product(*zip("host".upper(), "host".lower()))) - ) DEFAULT_HEADERS = { hdrs.ACCEPT: "*/*", @@ -445,7 +441,7 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: for key, value in headers: # type: ignore[misc] # A special case for Host header - if key in self._HOST_STRINGS: + if key in hdrs.HOST_ALL: self.headers[key] = value else: self.headers.add(key, value) diff --git a/aiohttp/hdrs.py b/aiohttp/hdrs.py index 2f1f5e02b77..c8d6b35f33a 100644 --- a/aiohttp/hdrs.py +++ b/aiohttp/hdrs.py @@ -2,6 +2,7 @@ # After changing the file content call ./tools/gen.py # to regenerate the headers parser +import itertools from typing import Final, Set from multidict import istr @@ -106,3 +107,15 @@ X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") + +# These are the upper/lower case variants of the headers/methods +# Example: {'hOst', 'host', 'HoST', 'HOSt', 'hOsT', 'HosT', 'hoSt', ...} +METH_HEAD_ALL: Final = frozenset( + map("".join, itertools.product(*zip(METH_HEAD.upper(), METH_HEAD.lower()))) +) +METH_CONNECT_ALL: Final = frozenset( + map("".join, itertools.product(*zip(METH_CONNECT.upper(), METH_CONNECT.lower()))) +) +HOST_ALL: Final = frozenset( + map("".join, itertools.product(*zip(HOST.upper(), HOST.lower()))) +) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 0435123a1a1..522cce2972b 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -75,6 +75,12 @@ NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) +# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 +EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200))) +# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 +# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 +EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL + DEBUG = sys.flags.dev_mode or ( not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) ) @@ -919,25 +925,12 @@ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: def must_be_empty_body(method: str, code: int) -> bool: """Check if a request must return an empty body.""" return ( - status_code_must_be_empty_body(code) - or method_must_be_empty_body(method) - or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) + code in EMPTY_BODY_STATUS_CODES + or method in EMPTY_BODY_METHODS + or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL) ) -def method_must_be_empty_body(method: str) -> bool: - """Check if a method must return an empty body.""" - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 - return method.upper() == hdrs.METH_HEAD - - -def status_code_must_be_empty_body(code: int) -> bool: - """Check if a status code must return an empty body.""" - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 - return code in {204, 304} or 100 <= code < 200 - - def should_remove_content_length(method: str, code: int) -> bool: """Check if a Content-Length header should be removed. @@ -945,8 +938,6 @@ def should_remove_content_length(method: str, code: int) -> bool: """ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8 # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4 - return ( - code in {204, 304} - or 100 <= code < 200 - or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) + return code in EMPTY_BODY_STATUS_CODES or ( + 200 <= code < 300 and method in hdrs.METH_CONNECT_ALL ) diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 686a2d02e28..c20806841e7 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -30,11 +30,11 @@ from .helpers import ( _EXC_SENTINEL, DEBUG, + EMPTY_BODY_METHODS, + EMPTY_BODY_STATUS_CODES, NO_EXTENSIONS, BaseTimerContext, - method_must_be_empty_body, set_exception, - status_code_must_be_empty_body, ) from .http_exceptions import ( BadHttpMessage, @@ -376,8 +376,8 @@ def get_content_length() -> Optional[int]: assert self.protocol is not None # calculate payload - empty_body = status_code_must_be_empty_body(code) or bool( - method and method_must_be_empty_body(method) + empty_body = code in EMPTY_BODY_STATUS_CODES or bool( + method and method in EMPTY_BODY_METHODS ) if not empty_body and ( ((length is not None and length > 0) or msg.chunked) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 229adf4e4d7..bbefa500efc 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -776,7 +776,7 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: body_len = len(self._body) if self._body else "0" # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 if body_len != "0" or ( - self.status != 304 and request.method.upper() != hdrs.METH_HEAD + self.status != 304 and request.method not in hdrs.METH_HEAD_ALL ): self._headers[hdrs.CONTENT_LENGTH] = str(body_len) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 1aba1aae3bd..2a83032e557 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -15,7 +15,7 @@ from aiohttp import helpers from aiohttp.helpers import ( - method_must_be_empty_body, + EMPTY_BODY_METHODS, must_be_empty_body, parse_http_date, should_remove_content_length, @@ -897,9 +897,9 @@ def test_read_basicauth_from_empty_netrc(): def test_method_must_be_empty_body(): """Test that HEAD is the only method that unequivocally must have an empty body.""" - assert method_must_be_empty_body("HEAD") is True + assert "HEAD" in EMPTY_BODY_METHODS # CONNECT is only empty on a successful response - assert method_must_be_empty_body("CONNECT") is False + assert "CONNECT" not in EMPTY_BODY_METHODS def test_should_remove_content_length_is_subset_of_must_be_empty_body(): From 1196d4de18db065c5feaea7ff56e91aa1cef210e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 12:01:18 +0000 Subject: [PATCH 0858/1511] [PR #9723/ab9b5661 backport][3.11] Fix missing `reader_c.c` file in Makefile `.install-deps` (#9725) Co-authored-by: J. Nick Koston <nick@koston.org> --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 2fae48f2635..b0a3ef3226b 100644 --- a/Makefile +++ b/Makefile @@ -81,7 +81,7 @@ generate-llhttp: .llhttp-gen .PHONY: cythonize cythonize: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c -.install-deps: .install-cython $(PYXS:.pyx=.c) $(call to-hash,$(CYS) $(REQS)) +.install-deps: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c $(call to-hash,$(CYS) $(REQS)) @python -m pip install -r requirements/dev.in -c requirements/dev.txt @touch .install-deps From 79ab3a49aa642590a8275a0e9b3440139e920a0c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 13:00:52 +0000 Subject: [PATCH 0859/1511] [PR #9727/4d448aec backport][3.11] gitignore websocket mask and reader_c (#9729) Co-authored-by: Tambe Tabitha Achere <tambe.tabitha@socialfinance.org.uk> --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 62770ddc80a..33546191693 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,8 @@ aiohttp/_http_writer.c aiohttp/_http_writer.html aiohttp/_websocket.c aiohttp/_websocket.html +aiohttp/_websocket/mask.c +aiohttp/_websocket/reader_c.c bin build coverage.xml From 821300ba592feb23de5be2e5c2bd35d813bcb537 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 13:25:19 +0000 Subject: [PATCH 0860/1511] [PR #9728/78fcb2cc backport][3.10] Fixes issues with test_web_urldispatcher when it encounters OS AF_UNIX too long (#9730) --- tests/test_web_urldispatcher.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index eca365d2a25..92066f09b7d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -553,14 +553,15 @@ async def test_access_compressed_file_as_symlink( async def test_access_special_resource( - tmp_path_factory: pytest.TempPathFactory, aiohttp_client: AiohttpClient + unix_sockname: str, aiohttp_client: AiohttpClient ) -> None: """Test access to non-regular files is forbidden using a UNIX domain socket.""" if not getattr(socket, "AF_UNIX", None): pytest.skip("UNIX domain sockets not supported") - tmp_path = tmp_path_factory.mktemp("special") - my_special = tmp_path / "sock" + my_special = pathlib.Path(unix_sockname) + tmp_path = my_special.parent + my_socket = socket.socket(socket.AF_UNIX) my_socket.bind(str(my_special)) assert my_special.is_socket() From b188b5431db3742b50c1731eb12cb51b0d63c09e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 13:51:45 +0000 Subject: [PATCH 0861/1511] [PR #9728/78fcb2cc backport][3.11] Fixes issues with test_web_urldispatcher when it encounters OS AF_UNIX too long (#9731) --- tests/test_web_urldispatcher.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index eca365d2a25..92066f09b7d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -553,14 +553,15 @@ async def test_access_compressed_file_as_symlink( async def test_access_special_resource( - tmp_path_factory: pytest.TempPathFactory, aiohttp_client: AiohttpClient + unix_sockname: str, aiohttp_client: AiohttpClient ) -> None: """Test access to non-regular files is forbidden using a UNIX domain socket.""" if not getattr(socket, "AF_UNIX", None): pytest.skip("UNIX domain sockets not supported") - tmp_path = tmp_path_factory.mktemp("special") - my_special = tmp_path / "sock" + my_special = pathlib.Path(unix_sockname) + tmp_path = my_special.parent + my_socket = socket.socket(socket.AF_UNIX) my_socket.bind(str(my_special)) assert my_special.is_socket() From a406b18afebcd28cb136bd6de3a69ed957d6d5e7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 14:18:22 +0000 Subject: [PATCH 0862/1511] [PR #9726/c3a9c3e backport][3.10] Make ``enable_cleanup_closed`` a NOOP for Python 3.12.7+ and 3.13.1+ (#9734) --- CHANGES/9726.misc.rst | 1 + aiohttp/connector.py | 19 +++++++++++++++ docs/client_reference.rst | 6 ++++- tests/conftest.py | 13 +++++++++- tests/test_connector.py | 50 +++++++++++++++++++++++++++++++-------- tests/test_proxy.py | 4 +++- 6 files changed, 80 insertions(+), 13 deletions(-) create mode 100644 CHANGES/9726.misc.rst diff --git a/CHANGES/9726.misc.rst b/CHANGES/9726.misc.rst new file mode 100644 index 00000000000..460c48b7995 --- /dev/null +++ b/CHANGES/9726.misc.rst @@ -0,0 +1 @@ +Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 4665b952e0f..85862ac0097 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -76,6 +76,14 @@ HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET +NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( + 3, + 13, + 1, +) or sys.version_info < (3, 12, 7) +# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 +# which first appeared in Python 3.12.7 and 3.13.1 + __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") @@ -279,6 +287,17 @@ def __init__( # start cleanup closed transports task self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + + if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: + warnings.warn( + "enable_cleanup_closed ignored because " + "https://github.com/python/cpython/pull/118960 is fixed in " + f"in Python version {sys.version_info}", + DeprecationWarning, + stacklevel=2, + ) + enable_cleanup_closed = False + self._cleanup_closed_disabled = not enable_cleanup_closed self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] self._cleanup_closed() diff --git a/docs/client_reference.rst b/docs/client_reference.rst index c48e87e14cb..2ba39e37594 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -967,10 +967,14 @@ is controlled by *force_close* constructor's parameter). connection releasing (optional). :param bool enable_cleanup_closed: some SSL servers do not properly complete - SSL shutdown process, in that case asyncio leaks ssl connections. + SSL shutdown process, in that case asyncio leaks SSL connections. If this parameter is set to True, aiohttp additionally aborts underlining transport after 2 seconds. It is off by default. + For Python version 3.12.7+, or 3.13.1 and later, + this parameter is ignored because the asyncio SSL connection + leak is fixed in these versions of Python. + :param loop: :ref:`event loop<asyncio-event-loop>` used for handling connections. diff --git a/tests/conftest.py b/tests/conftest.py index 85fcac94138..cbce50c3c13 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any +from typing import Any, Generator from unittest import mock from uuid import uuid4 @@ -238,3 +238,14 @@ def key(key_data: Any): @pytest.fixture def ws_key(key: Any): return base64.b64encode(sha1(key + WS_KEY).digest()).decode() + + +@pytest.fixture +def enable_cleanup_closed() -> Generator[None, None, None]: + """Fixture to override the NEEDS_CLEANUP_CLOSED flag. + + On Python 3.12.7+ and 3.13.1+ enable_cleanup_closed is not needed, + however we still want to test that it works. + """ + with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", True): + yield diff --git a/tests/test_connector.py b/tests/test_connector.py index ca302371ef0..930a7dc0f0f 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -14,6 +14,7 @@ import pytest from aiohappyeyeballs import AddrInfoType +from pytest_mock import MockerFixture from yarl import URL import aiohttp @@ -359,6 +360,7 @@ async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None: await conn.close() +@pytest.mark.usefixtures("enable_cleanup_closed") async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.BaseConnector(enable_cleanup_closed=True) key = ConnectionKey("localhost", 80, True, False, None, None, None) @@ -425,9 +427,16 @@ async def test_release(loop, key) -> None: await conn.close() -async def test_release_ssl_transport(loop, ssl_key) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) - conn._release_waiter = mock.Mock() +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_release_ssl_transport( + loop: asyncio.AbstractEventLoop, ssl_key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + with mock.patch.object(conn, "_release_waiter", autospec=True, spec_set=True): + proto = create_mocked_conn(loop) + transport = proto.transport + conn._acquired.add(proto) + conn._acquired_per_host[ssl_key].add(proto) proto = mock.Mock() transport = proto.transport @@ -1682,6 +1691,7 @@ async def test_close_during_connect(loop: asyncio.AbstractEventLoop) -> None: assert proto.close.called +@pytest.mark.usefixtures("enable_cleanup_closed") async def test_ctor_cleanup() -> None: loop = mock.Mock() loop.time.return_value = 1.5 @@ -1712,8 +1722,11 @@ async def test_cleanup(key) -> None: assert conn._cleanup_handle is None -async def test_cleanup_close_ssl_transport(ssl_key) -> None: - proto = mock.Mock() +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_cleanup_close_ssl_transport( + loop: asyncio.AbstractEventLoop, ssl_key: ConnectionKey +) -> None: + proto = create_mocked_conn(loop) transport = proto.transport testset = {ssl_key: [(proto, 10)]} @@ -1769,7 +1782,10 @@ async def test_cleanup3(key) -> None: await conn.close() -async def test_cleanup_closed(loop, mocker) -> None: +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_cleanup_closed( + loop: asyncio.AbstractEventLoop, mocker: MockerFixture +) -> None: if not hasattr(loop, "__dict__"): pytest.skip("can not override loop attributes") @@ -1786,8 +1802,19 @@ async def test_cleanup_closed(loop, mocker) -> None: assert cleanup_closed_handle.cancel.called -async def test_cleanup_closed_disabled(loop, mocker) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=False) +async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None: + """Ensure that enable_cleanup_closed is a noop on fixed Python versions.""" + with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False), pytest.warns( + DeprecationWarning, match="cleanup_closed ignored" + ): + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + assert conn._cleanup_closed_disabled is True + + +async def test_cleanup_closed_disabled( + loop: asyncio.AbstractEventLoop, mocker: MockerFixture +) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=False) tr = mock.Mock() conn._cleanup_closed_transports = [tr] @@ -2293,8 +2320,11 @@ async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> assert conn.closed -async def test_close_cancels_cleanup_closed_handle(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_close_cancels_cleanup_closed_handle( + loop: asyncio.AbstractEventLoop, +) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) assert conn._cleanup_closed_handle is not None await conn.close() assert conn._cleanup_closed_handle is None diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 4fa5e932098..f063eb84761 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -384,7 +384,9 @@ async def make_conn(): autospec=True, spec_set=True, ) - def test_https_connect(self, start_connection: Any, ClientRequestMock: Any) -> None: + def test_https_connect( + self, start_connection: mock.Mock, ClientRequestMock: mock.Mock + ) -> None: proxy_req = ClientRequest( "GET", URL("http://proxy.example.com"), loop=self.loop ) From 68b2e8222ff09b145119117d6fff0c61975c2b01 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 14:33:42 +0000 Subject: [PATCH 0863/1511] [PR #9726/c3a9c3e backport][3.11] Make ``enable_cleanup_closed`` a NOOP for Python 3.12.7+ and 3.13.1+ (#9735) --- CHANGES/9726.misc.rst | 1 + aiohttp/connector.py | 19 +++++++++++++++ docs/client_reference.rst | 6 ++++- tests/conftest.py | 13 +++++++++- tests/test_connector.py | 50 +++++++++++++++++++++++++++++++-------- tests/test_proxy.py | 1 + 6 files changed, 78 insertions(+), 12 deletions(-) create mode 100644 CHANGES/9726.misc.rst diff --git a/CHANGES/9726.misc.rst b/CHANGES/9726.misc.rst new file mode 100644 index 00000000000..460c48b7995 --- /dev/null +++ b/CHANGES/9726.misc.rst @@ -0,0 +1 @@ +Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 282d0a97802..7f512363be2 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -76,6 +76,14 @@ HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET +NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( + 3, + 13, + 1, +) or sys.version_info < (3, 12, 7) +# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 +# which first appeared in Python 3.12.7 and 3.13.1 + __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") @@ -284,6 +292,17 @@ def __init__( # start cleanup closed transports task self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + + if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: + warnings.warn( + "enable_cleanup_closed ignored because " + "https://github.com/python/cpython/pull/118960 is fixed in " + f"in Python version {sys.version_info}", + DeprecationWarning, + stacklevel=2, + ) + enable_cleanup_closed = False + self._cleanup_closed_disabled = not enable_cleanup_closed self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] self._cleanup_closed() diff --git a/docs/client_reference.rst b/docs/client_reference.rst index caa38666405..c9031de5383 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -979,10 +979,14 @@ is controlled by *force_close* constructor's parameter). connection releasing (optional). :param bool enable_cleanup_closed: some SSL servers do not properly complete - SSL shutdown process, in that case asyncio leaks ssl connections. + SSL shutdown process, in that case asyncio leaks SSL connections. If this parameter is set to True, aiohttp additionally aborts underlining transport after 2 seconds. It is off by default. + For Python version 3.12.7+, or 3.13.1 and later, + this parameter is ignored because the asyncio SSL connection + leak is fixed in these versions of Python. + :param loop: :ref:`event loop<asyncio-event-loop>` used for handling connections. diff --git a/tests/conftest.py b/tests/conftest.py index 85fcac94138..cbce50c3c13 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any +from typing import Any, Generator from unittest import mock from uuid import uuid4 @@ -238,3 +238,14 @@ def key(key_data: Any): @pytest.fixture def ws_key(key: Any): return base64.b64encode(sha1(key + WS_KEY).digest()).decode() + + +@pytest.fixture +def enable_cleanup_closed() -> Generator[None, None, None]: + """Fixture to override the NEEDS_CLEANUP_CLOSED flag. + + On Python 3.12.7+ and 3.13.1+ enable_cleanup_closed is not needed, + however we still want to test that it works. + """ + with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", True): + yield diff --git a/tests/test_connector.py b/tests/test_connector.py index fe0d52eb09c..f5aefacf399 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -15,6 +15,7 @@ import pytest from aiohappyeyeballs import AddrInfoType +from pytest_mock import MockerFixture from yarl import URL import aiohttp @@ -355,6 +356,7 @@ async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None: await conn.close() +@pytest.mark.usefixtures("enable_cleanup_closed") async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.BaseConnector(enable_cleanup_closed=True) key = ConnectionKey("localhost", 80, True, False, None, None, None) @@ -421,9 +423,16 @@ async def test_release(loop, key) -> None: await conn.close() -async def test_release_ssl_transport(loop, ssl_key) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) - conn._release_waiter = mock.Mock() +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_release_ssl_transport( + loop: asyncio.AbstractEventLoop, ssl_key: ConnectionKey +) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + with mock.patch.object(conn, "_release_waiter", autospec=True, spec_set=True): + proto = create_mocked_conn(loop) + transport = proto.transport + conn._acquired.add(proto) + conn._acquired_per_host[ssl_key].add(proto) proto = mock.Mock() transport = proto.transport @@ -1678,6 +1687,7 @@ async def test_close_during_connect(loop: asyncio.AbstractEventLoop) -> None: assert proto.close.called +@pytest.mark.usefixtures("enable_cleanup_closed") async def test_ctor_cleanup() -> None: loop = mock.Mock() loop.time.return_value = 1.5 @@ -1711,8 +1721,11 @@ async def test_cleanup(key: ConnectionKey) -> None: assert conn._cleanup_handle is None -async def test_cleanup_close_ssl_transport(ssl_key) -> None: - proto = mock.Mock() +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_cleanup_close_ssl_transport( + loop: asyncio.AbstractEventLoop, ssl_key: ConnectionKey +) -> None: + proto = create_mocked_conn(loop) transport = proto.transport testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( defaultdict(deque) @@ -1779,7 +1792,10 @@ async def test_cleanup3(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> await conn.close() -async def test_cleanup_closed(loop, mocker) -> None: +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_cleanup_closed( + loop: asyncio.AbstractEventLoop, mocker: MockerFixture +) -> None: if not hasattr(loop, "__dict__"): pytest.skip("can not override loop attributes") @@ -1796,8 +1812,19 @@ async def test_cleanup_closed(loop, mocker) -> None: assert cleanup_closed_handle.cancel.called -async def test_cleanup_closed_disabled(loop, mocker) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=False) +async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None: + """Ensure that enable_cleanup_closed is a noop on fixed Python versions.""" + with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False), pytest.warns( + DeprecationWarning, match="cleanup_closed ignored" + ): + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) + assert conn._cleanup_closed_disabled is True + + +async def test_cleanup_closed_disabled( + loop: asyncio.AbstractEventLoop, mocker: MockerFixture +) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=False) tr = mock.Mock() conn._cleanup_closed_transports = [tr] @@ -2303,8 +2330,11 @@ async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> assert conn.closed -async def test_close_cancels_cleanup_closed_handle(loop) -> None: - conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) +@pytest.mark.usefixtures("enable_cleanup_closed") +async def test_close_cancels_cleanup_closed_handle( + loop: asyncio.AbstractEventLoop, +) -> None: + conn = aiohttp.BaseConnector(enable_cleanup_closed=True) assert conn._cleanup_closed_handle is not None await conn.close() assert conn._cleanup_closed_handle is None diff --git a/tests/test_proxy.py b/tests/test_proxy.py index c98ae7c2653..2fedafb4595 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -384,6 +384,7 @@ async def make_conn(): autospec=True, spec_set=True, ) + @pytest.mark.usefixtures("enable_cleanup_closed") def test_https_connect_fingerprint_mismatch( self, start_connection: mock.Mock, ClientRequestMock: mock.Mock ) -> None: From d0057b5fe3bfe498c67f953025600b25f00ff59f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 15:39:16 +0100 Subject: [PATCH 0864/1511] =?UTF-8?q?[PR=20#9733/523d934e=20backport][3.10?= =?UTF-8?q?]=20=F0=9F=A7=AA=F0=9F=92=85=20Load=20`pytest-cov`=20early=20(#?= =?UTF-8?q?9737)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 9b697780a88..445ca8670ae 100644 --- a/setup.cfg +++ b/setup.cfg @@ -131,6 +131,7 @@ addopts = --showlocals # `pytest-cov`: + -p pytest_cov --cov=aiohttp --cov=tests/ From b30bffb5065bad18a10792b94c0cfe32a8e6b33c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 15:39:23 +0100 Subject: [PATCH 0865/1511] =?UTF-8?q?[PR=20#9733/523d934e=20backport][3.11?= =?UTF-8?q?]=20=F0=9F=A7=AA=F0=9F=92=85=20Load=20`pytest-cov`=20early=20(#?= =?UTF-8?q?9738)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index f23e477035f..5acb71fdf55 100644 --- a/setup.cfg +++ b/setup.cfg @@ -138,6 +138,7 @@ addopts = --showlocals # `pytest-cov`: + -p pytest_cov --cov=aiohttp --cov=tests/ From 5602ebb2bd37f94f5ef5fa7a3d213a5ca7f620fa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 15:12:45 +0000 Subject: [PATCH 0866/1511] [PR #9736/5ddeb06f backport][3.10] Fix typo in enable_cleanup_closed warning message (#9739) --- CHANGES/9736.misc.rst | 1 + aiohttp/connector.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 120000 CHANGES/9736.misc.rst diff --git a/CHANGES/9736.misc.rst b/CHANGES/9736.misc.rst new file mode 120000 index 00000000000..98c0ac8ac1d --- /dev/null +++ b/CHANGES/9736.misc.rst @@ -0,0 +1 @@ +9726.misc.rst \ No newline at end of file diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 85862ac0097..5eac5d63111 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -291,7 +291,7 @@ def __init__( if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: warnings.warn( "enable_cleanup_closed ignored because " - "https://github.com/python/cpython/pull/118960 is fixed in " + "https://github.com/python/cpython/pull/118960 is fixed " f"in Python version {sys.version_info}", DeprecationWarning, stacklevel=2, From ffcf886a2c23f0f0d60fb6eea814eb82deb6b7e7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 15:37:06 +0000 Subject: [PATCH 0867/1511] [PR #9736/5ddeb06f backport][3.11] Fix typo in enable_cleanup_closed warning message (#9740) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9736.misc.rst | 1 + aiohttp/connector.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 120000 CHANGES/9736.misc.rst diff --git a/CHANGES/9736.misc.rst b/CHANGES/9736.misc.rst new file mode 120000 index 00000000000..98c0ac8ac1d --- /dev/null +++ b/CHANGES/9736.misc.rst @@ -0,0 +1 @@ +9726.misc.rst \ No newline at end of file diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 7f512363be2..928651d80f3 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -296,7 +296,7 @@ def __init__( if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: warnings.warn( "enable_cleanup_closed ignored because " - "https://github.com/python/cpython/pull/118960 is fixed in " + "https://github.com/python/cpython/pull/118960 is fixed " f"in Python version {sys.version_info}", DeprecationWarning, stacklevel=2, From 1986b0a6f1c4abd345b6786deb5242c19b4e7582 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 18:30:13 +0000 Subject: [PATCH 0868/1511] [PR #9741/acc2912 backport][3.10] Fix race getting an unused port when there are multiple test runners (#9743) --- tests/conftest.py | 17 ++++- tests/test_run_app.py | 132 +++++++++++++++++++++++++-------------- tests/test_test_utils.py | 14 ++++- tests/test_web_server.py | 21 ++++--- tests/test_worker.py | 14 ++--- 5 files changed, 128 insertions(+), 70 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index cbce50c3c13..44ae384b633 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,7 +15,7 @@ from aiohttp.client_proto import ResponseHandler from aiohttp.http import WS_KEY -from aiohttp.test_utils import loop_context +from aiohttp.test_utils import get_unused_port_socket, loop_context try: import trustme @@ -249,3 +249,18 @@ def enable_cleanup_closed() -> Generator[None, None, None]: """ with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", True): yield + + +@pytest.fixture +def unused_port_socket() -> Generator[socket.socket, None, None]: + """Return a socket that is unused on the current host. + + Unlike aiohttp_used_port, the socket is yielded so there is no + race condition between checking if the port is in use and + binding to it later in the test. + """ + s = get_unused_port_socket("127.0.0.1") + try: + yield s + finally: + s.close() diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 74d8c79bf55..9332d4aa96c 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,7 +9,16 @@ import subprocess import sys import time -from typing import AsyncIterator, Callable, NoReturn, Set +from typing import ( + AsyncIterator, + Awaitable, + Callable, + Coroutine, + NoReturn, + Optional, + Set, + Tuple, +) from unittest import mock from uuid import uuid4 @@ -440,8 +449,10 @@ def test_run_app_https(patched_loop) -> None: ) -def test_run_app_nondefault_host_port(patched_loop, aiohttp_unused_port) -> None: - port = aiohttp_unused_port() +def test_run_app_nondefault_host_port( + patched_loop: asyncio.AbstractEventLoop, unused_port_socket: socket.socket +) -> None: + port = unused_port_socket.getsockname()[1] host = "127.0.0.1" app = web.Application() @@ -454,7 +465,24 @@ def test_run_app_nondefault_host_port(patched_loop, aiohttp_unused_port) -> None ) -def test_run_app_multiple_hosts(patched_loop) -> None: +def test_run_app_with_sock( + patched_loop: asyncio.AbstractEventLoop, unused_port_socket: socket.socket +) -> None: + sock = unused_port_socket + app = web.Application() + web.run_app( + app, + sock=sock, + print=stopper(patched_loop), + loop=patched_loop, + ) + + patched_loop.create_server.assert_called_with( # type: ignore[attr-defined] + mock.ANY, sock=sock, ssl=None, backlog=128 + ) + + +def test_run_app_multiple_hosts(patched_loop: asyncio.AbstractEventLoop) -> None: hosts = ("127.0.0.1", "127.0.0.2") app = web.Application() @@ -931,8 +959,16 @@ async def stop(self, request: web.Request) -> web.Response: asyncio.get_running_loop().call_soon(self.raiser) return web.Response() - def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: + def run_app( + self, + sock: socket.socket, + timeout: int, + task: Callable[[], Coroutine[None, None, None]], + extra_test: Optional[Callable[[ClientSession], Awaitable[None]]] = None, + ) -> Tuple["asyncio.Task[None]", int]: num_connections = -1 + t = test_task = None + port = sock.getsockname()[1] class DictRecordClear(dict): def clear(self): @@ -956,7 +992,7 @@ async def test() -> None: try: with pytest.raises(asyncio.TimeoutError): async with sess.get( - f"http://localhost:{port}/", + f"http://127.0.0.1:{port}/", timeout=ClientTimeout(total=0.1), ): pass @@ -964,7 +1000,7 @@ async def test() -> None: await asyncio.sleep(0.5) else: break - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass if extra_test: @@ -989,14 +1025,12 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/stop", self.stop) with mock.patch("aiohttp.web_app.Server", ServerWithRecordClear): - web.run_app(app, port=port, shutdown_timeout=timeout) + web.run_app(app, sock=sock, shutdown_timeout=timeout) assert test_task.exception() is None return t, num_connections - def test_shutdown_wait_for_handler( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() + def test_shutdown_wait_for_handler(self, unused_port_socket: socket.socket) -> None: + sock = unused_port_socket finished = False async def task(): @@ -1004,17 +1038,15 @@ async def task(): await asyncio.sleep(2) finished = True - t, connection_count = self.run_app(port, 3, task) + t, connection_count = self.run_app(sock, 3, task) assert finished is True assert t.done() assert not t.cancelled() assert connection_count == 0 - def test_shutdown_timeout_handler( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() + def test_shutdown_timeout_handler(self, unused_port_socket: socket.socket) -> None: + sock = unused_port_socket finished = False async def task(): @@ -1022,7 +1054,7 @@ async def task(): await asyncio.sleep(2) finished = True - t, connection_count = self.run_app(port, 1, task) + t, connection_count = self.run_app(sock, 1, task) assert finished is False assert t.done() @@ -1030,9 +1062,9 @@ async def task(): assert connection_count == 1 def test_shutdown_timeout_not_reached( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket finished = False async def task(): @@ -1041,7 +1073,8 @@ async def task(): finished = True start_time = time.time() - t, connection_count = self.run_app(port, 15, task) + + t, connection_count = self.run_app(sock, 15, task) assert finished is True assert t.done() @@ -1050,9 +1083,10 @@ async def task(): assert time.time() - start_time < 10 def test_shutdown_new_conn_rejected( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] finished = False async def task() -> None: @@ -1066,25 +1100,26 @@ async def test(sess: ClientSession) -> None: with pytest.raises(ClientConnectorError): # Use a new session to try and open a new connection. async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/"): + async with sess.get(f"http://127.0.0.1:{port}/"): pass assert finished is False - t, connection_count = self.run_app(port, 10, task, test) + t, connection_count = self.run_app(sock, 10, task, test) assert finished is True assert t.done() assert connection_count == 0 def test_shutdown_pending_handler_responds( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] finished = False async def test() -> None: - async def test_resp(sess): - async with sess.get(f"http://localhost:{port}/") as resp: + async def test_resp(sess: ClientSession) -> None: + async with sess.get(f"http://127.0.0.1:{port}/") as resp: assert await resp.text() == "FOO" await asyncio.sleep(1) @@ -1092,7 +1127,7 @@ async def test_resp(sess): t = asyncio.create_task(test_resp(sess)) await asyncio.sleep(1) # Handler is in-progress while we trigger server shutdown. - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass assert finished is False @@ -1117,19 +1152,22 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/", handler) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=5) + web.run_app(app, sock=sock, shutdown_timeout=5) + assert t is not None assert t.exception() is None assert finished is True def test_shutdown_close_idle_keepalive( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] + t = None async def test() -> None: await asyncio.sleep(1) async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass # Hold on to keep-alive connection. @@ -1148,15 +1186,14 @@ async def run_test(app: web.Application) -> None: app.cleanup_ctx.append(run_test) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=10) + web.run_app(app, sock=sock, shutdown_timeout=10) # If connection closed, then test() will be cancelled in cleanup_ctx. # If not, then shutdown_timeout will allow it to sleep until complete. assert t.cancelled() - def test_shutdown_close_websockets( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() + def test_shutdown_close_websockets(self, unused_port_socket: socket.socket) -> None: + sock = unused_port_socket + port = sock.getsockname()[1] WS = web.AppKey("ws", Set[web.WebSocketResponse]) client_finished = server_finished = False @@ -1177,8 +1214,8 @@ async def close_websockets(app: web.Application) -> None: async def test() -> None: await asyncio.sleep(1) async with ClientSession() as sess: - async with sess.ws_connect(f"http://localhost:{port}/ws") as ws: - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.ws_connect(f"http://127.0.0.1:{port}/ws") as ws: + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass async for msg in ws: @@ -1203,22 +1240,23 @@ async def run_test(app: web.Application) -> None: app.router.add_get("/stop", self.stop) start = time.time() - web.run_app(app, port=port, shutdown_timeout=10) + web.run_app(app, sock=sock, shutdown_timeout=10) assert time.time() - start < 5 assert client_finished assert server_finished def test_shutdown_handler_cancellation_suppressed( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] actions = [] async def test() -> None: async def test_resp(sess): t = ClientTimeout(total=0.4) with pytest.raises(asyncio.TimeoutError): - async with sess.get(f"http://localhost:{port}/", timeout=t) as resp: + async with sess.get(f"http://127.0.0.1:{port}/", timeout=t) as resp: assert await resp.text() == "FOO" actions.append("CANCELLED") @@ -1227,7 +1265,7 @@ async def test_resp(sess): await asyncio.sleep(0.5) # Handler is in-progress while we trigger server shutdown. actions.append("PRESTOP") - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass actions.append("STOPPING") @@ -1255,6 +1293,6 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/", handler) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=2, handler_cancellation=True) + web.run_app(app, sock=sock, shutdown_timeout=2, handler_cancellation=True) assert t.exception() is None assert actions == ["CANCELLED", "SUPPRESSED", "PRESTOP", "STOPPING", "DONE"] diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index a9c5179aedc..4bbcd2e9de2 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -1,3 +1,4 @@ +import asyncio import gzip from socket import socket from typing import Any @@ -340,9 +341,16 @@ async def test_client_context_manager_response(method, app, loop) -> None: assert "Hello, world" in text -async def test_custom_port(loop, app, aiohttp_unused_port) -> None: - port = aiohttp_unused_port() - client = _TestClient(_TestServer(app, loop=loop, port=port), loop=loop) +async def test_custom_port( + loop: asyncio.AbstractEventLoop, + app: web.Application, + unused_port_socket: socket, +) -> None: + sock = unused_port_socket + port = sock.getsockname()[1] + client = _TestClient( + _TestServer(app, port=port, socket_factory=lambda *args, **kwargs: sock) + ) await client.start_server() assert client.server.port == port diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 14d78e23a85..9e2d078c1a0 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -1,4 +1,5 @@ import asyncio +import socket from contextlib import suppress from unittest import mock @@ -169,9 +170,10 @@ async def handler(request): logger.exception.assert_called_with("Error handling request", exc_info=exc) -async def test_handler_cancellation(aiohttp_unused_port) -> None: +async def test_handler_cancellation(unused_port_socket: socket.socket) -> None: event = asyncio.Event() - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] async def on_request(_: web.Request) -> web.Response: nonlocal event @@ -189,10 +191,9 @@ async def on_request(_: web.Request) -> web.Response: runner = web.AppRunner(app, handler_cancellation=True) await runner.setup() - site = web.TCPSite(runner, host="localhost", port=port) + site = web.SockSite(runner, sock=sock) await site.start() - try: assert runner.server.handler_cancellation, "Flag was not propagated" @@ -200,7 +201,7 @@ async def on_request(_: web.Request) -> web.Response: timeout=client.ClientTimeout(total=0.1) ) as sess: with pytest.raises(asyncio.TimeoutError): - await sess.get(f"http://localhost:{port}/") + await sess.get(f"http://127.0.0.1:{port}/") with suppress(asyncio.TimeoutError): await asyncio.wait_for(event.wait(), timeout=1) @@ -209,10 +210,11 @@ async def on_request(_: web.Request) -> web.Response: await asyncio.gather(runner.shutdown(), site.stop()) -async def test_no_handler_cancellation(aiohttp_unused_port) -> None: +async def test_no_handler_cancellation(unused_port_socket: socket.socket) -> None: timeout_event = asyncio.Event() done_event = asyncio.Event() - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] started = False async def on_request(_: web.Request) -> web.Response: @@ -228,16 +230,15 @@ async def on_request(_: web.Request) -> web.Response: runner = web.AppRunner(app) await runner.setup() - site = web.TCPSite(runner, host="localhost", port=port) + site = web.SockSite(runner, sock=sock) await site.start() - try: async with client.ClientSession( timeout=client.ClientTimeout(total=0.2) ) as sess: with pytest.raises(asyncio.TimeoutError): - await sess.get(f"http://localhost:{port}/") + await sess.get(f"http://127.0.0.1:{port}/") await asyncio.sleep(0.1) timeout_event.set() diff --git a/tests/test_worker.py b/tests/test_worker.py index 00d180707b1..60d1e8b088b 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -3,7 +3,7 @@ import os import socket import ssl -from typing import TYPE_CHECKING, Callable, Dict, Optional +from typing import TYPE_CHECKING, Dict, Optional from unittest import mock import pytest @@ -209,13 +209,11 @@ def test__get_valid_log_format_exc(worker: base_worker.GunicornWebWorker) -> Non async def test__run_ok_parent_changed( worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop, - aiohttp_unused_port: Callable[[], int], + unused_port_socket: socket.socket, ) -> None: worker.ppid = 0 worker.alive = True - sock = socket.socket() - addr = ("localhost", aiohttp_unused_port()) - sock.bind(addr) + sock = unused_port_socket worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop @@ -232,13 +230,11 @@ async def test__run_ok_parent_changed( async def test__run_exc( worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop, - aiohttp_unused_port: Callable[[], int], + unused_port_socket: socket.socket, ) -> None: worker.ppid = os.getppid() worker.alive = True - sock = socket.socket() - addr = ("localhost", aiohttp_unused_port()) - sock.bind(addr) + sock = unused_port_socket worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop From a59c96a33bb39b6bd5ace4ef1719e80a46f7a936 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 18:43:34 +0000 Subject: [PATCH 0869/1511] [PR #9741/acc2912 backport][3.11] Fix race getting an unused port when there are multiple test runners (#9744) --- tests/conftest.py | 17 ++++- tests/test_run_app.py | 132 +++++++++++++++++++++++++-------------- tests/test_test_utils.py | 13 +++- tests/test_web_server.py | 21 ++++--- tests/test_worker.py | 14 ++--- 5 files changed, 127 insertions(+), 70 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index cbce50c3c13..44ae384b633 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,7 +15,7 @@ from aiohttp.client_proto import ResponseHandler from aiohttp.http import WS_KEY -from aiohttp.test_utils import loop_context +from aiohttp.test_utils import get_unused_port_socket, loop_context try: import trustme @@ -249,3 +249,18 @@ def enable_cleanup_closed() -> Generator[None, None, None]: """ with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", True): yield + + +@pytest.fixture +def unused_port_socket() -> Generator[socket.socket, None, None]: + """Return a socket that is unused on the current host. + + Unlike aiohttp_used_port, the socket is yielded so there is no + race condition between checking if the port is in use and + binding to it later in the test. + """ + s = get_unused_port_socket("127.0.0.1") + try: + yield s + finally: + s.close() diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 74d8c79bf55..9332d4aa96c 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,7 +9,16 @@ import subprocess import sys import time -from typing import AsyncIterator, Callable, NoReturn, Set +from typing import ( + AsyncIterator, + Awaitable, + Callable, + Coroutine, + NoReturn, + Optional, + Set, + Tuple, +) from unittest import mock from uuid import uuid4 @@ -440,8 +449,10 @@ def test_run_app_https(patched_loop) -> None: ) -def test_run_app_nondefault_host_port(patched_loop, aiohttp_unused_port) -> None: - port = aiohttp_unused_port() +def test_run_app_nondefault_host_port( + patched_loop: asyncio.AbstractEventLoop, unused_port_socket: socket.socket +) -> None: + port = unused_port_socket.getsockname()[1] host = "127.0.0.1" app = web.Application() @@ -454,7 +465,24 @@ def test_run_app_nondefault_host_port(patched_loop, aiohttp_unused_port) -> None ) -def test_run_app_multiple_hosts(patched_loop) -> None: +def test_run_app_with_sock( + patched_loop: asyncio.AbstractEventLoop, unused_port_socket: socket.socket +) -> None: + sock = unused_port_socket + app = web.Application() + web.run_app( + app, + sock=sock, + print=stopper(patched_loop), + loop=patched_loop, + ) + + patched_loop.create_server.assert_called_with( # type: ignore[attr-defined] + mock.ANY, sock=sock, ssl=None, backlog=128 + ) + + +def test_run_app_multiple_hosts(patched_loop: asyncio.AbstractEventLoop) -> None: hosts = ("127.0.0.1", "127.0.0.2") app = web.Application() @@ -931,8 +959,16 @@ async def stop(self, request: web.Request) -> web.Response: asyncio.get_running_loop().call_soon(self.raiser) return web.Response() - def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: + def run_app( + self, + sock: socket.socket, + timeout: int, + task: Callable[[], Coroutine[None, None, None]], + extra_test: Optional[Callable[[ClientSession], Awaitable[None]]] = None, + ) -> Tuple["asyncio.Task[None]", int]: num_connections = -1 + t = test_task = None + port = sock.getsockname()[1] class DictRecordClear(dict): def clear(self): @@ -956,7 +992,7 @@ async def test() -> None: try: with pytest.raises(asyncio.TimeoutError): async with sess.get( - f"http://localhost:{port}/", + f"http://127.0.0.1:{port}/", timeout=ClientTimeout(total=0.1), ): pass @@ -964,7 +1000,7 @@ async def test() -> None: await asyncio.sleep(0.5) else: break - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass if extra_test: @@ -989,14 +1025,12 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/stop", self.stop) with mock.patch("aiohttp.web_app.Server", ServerWithRecordClear): - web.run_app(app, port=port, shutdown_timeout=timeout) + web.run_app(app, sock=sock, shutdown_timeout=timeout) assert test_task.exception() is None return t, num_connections - def test_shutdown_wait_for_handler( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() + def test_shutdown_wait_for_handler(self, unused_port_socket: socket.socket) -> None: + sock = unused_port_socket finished = False async def task(): @@ -1004,17 +1038,15 @@ async def task(): await asyncio.sleep(2) finished = True - t, connection_count = self.run_app(port, 3, task) + t, connection_count = self.run_app(sock, 3, task) assert finished is True assert t.done() assert not t.cancelled() assert connection_count == 0 - def test_shutdown_timeout_handler( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() + def test_shutdown_timeout_handler(self, unused_port_socket: socket.socket) -> None: + sock = unused_port_socket finished = False async def task(): @@ -1022,7 +1054,7 @@ async def task(): await asyncio.sleep(2) finished = True - t, connection_count = self.run_app(port, 1, task) + t, connection_count = self.run_app(sock, 1, task) assert finished is False assert t.done() @@ -1030,9 +1062,9 @@ async def task(): assert connection_count == 1 def test_shutdown_timeout_not_reached( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket finished = False async def task(): @@ -1041,7 +1073,8 @@ async def task(): finished = True start_time = time.time() - t, connection_count = self.run_app(port, 15, task) + + t, connection_count = self.run_app(sock, 15, task) assert finished is True assert t.done() @@ -1050,9 +1083,10 @@ async def task(): assert time.time() - start_time < 10 def test_shutdown_new_conn_rejected( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] finished = False async def task() -> None: @@ -1066,25 +1100,26 @@ async def test(sess: ClientSession) -> None: with pytest.raises(ClientConnectorError): # Use a new session to try and open a new connection. async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/"): + async with sess.get(f"http://127.0.0.1:{port}/"): pass assert finished is False - t, connection_count = self.run_app(port, 10, task, test) + t, connection_count = self.run_app(sock, 10, task, test) assert finished is True assert t.done() assert connection_count == 0 def test_shutdown_pending_handler_responds( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] finished = False async def test() -> None: - async def test_resp(sess): - async with sess.get(f"http://localhost:{port}/") as resp: + async def test_resp(sess: ClientSession) -> None: + async with sess.get(f"http://127.0.0.1:{port}/") as resp: assert await resp.text() == "FOO" await asyncio.sleep(1) @@ -1092,7 +1127,7 @@ async def test_resp(sess): t = asyncio.create_task(test_resp(sess)) await asyncio.sleep(1) # Handler is in-progress while we trigger server shutdown. - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass assert finished is False @@ -1117,19 +1152,22 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/", handler) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=5) + web.run_app(app, sock=sock, shutdown_timeout=5) + assert t is not None assert t.exception() is None assert finished is True def test_shutdown_close_idle_keepalive( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] + t = None async def test() -> None: await asyncio.sleep(1) async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass # Hold on to keep-alive connection. @@ -1148,15 +1186,14 @@ async def run_test(app: web.Application) -> None: app.cleanup_ctx.append(run_test) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=10) + web.run_app(app, sock=sock, shutdown_timeout=10) # If connection closed, then test() will be cancelled in cleanup_ctx. # If not, then shutdown_timeout will allow it to sleep until complete. assert t.cancelled() - def test_shutdown_close_websockets( - self, aiohttp_unused_port: Callable[[], int] - ) -> None: - port = aiohttp_unused_port() + def test_shutdown_close_websockets(self, unused_port_socket: socket.socket) -> None: + sock = unused_port_socket + port = sock.getsockname()[1] WS = web.AppKey("ws", Set[web.WebSocketResponse]) client_finished = server_finished = False @@ -1177,8 +1214,8 @@ async def close_websockets(app: web.Application) -> None: async def test() -> None: await asyncio.sleep(1) async with ClientSession() as sess: - async with sess.ws_connect(f"http://localhost:{port}/ws") as ws: - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.ws_connect(f"http://127.0.0.1:{port}/ws") as ws: + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass async for msg in ws: @@ -1203,22 +1240,23 @@ async def run_test(app: web.Application) -> None: app.router.add_get("/stop", self.stop) start = time.time() - web.run_app(app, port=port, shutdown_timeout=10) + web.run_app(app, sock=sock, shutdown_timeout=10) assert time.time() - start < 5 assert client_finished assert server_finished def test_shutdown_handler_cancellation_suppressed( - self, aiohttp_unused_port: Callable[[], int] + self, unused_port_socket: socket.socket ) -> None: - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] actions = [] async def test() -> None: async def test_resp(sess): t = ClientTimeout(total=0.4) with pytest.raises(asyncio.TimeoutError): - async with sess.get(f"http://localhost:{port}/", timeout=t) as resp: + async with sess.get(f"http://127.0.0.1:{port}/", timeout=t) as resp: assert await resp.text() == "FOO" actions.append("CANCELLED") @@ -1227,7 +1265,7 @@ async def test_resp(sess): await asyncio.sleep(0.5) # Handler is in-progress while we trigger server shutdown. actions.append("PRESTOP") - async with sess.get(f"http://localhost:{port}/stop"): + async with sess.get(f"http://127.0.0.1:{port}/stop"): pass actions.append("STOPPING") @@ -1255,6 +1293,6 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get("/", handler) app.router.add_get("/stop", self.stop) - web.run_app(app, port=port, shutdown_timeout=2, handler_cancellation=True) + web.run_app(app, sock=sock, shutdown_timeout=2, handler_cancellation=True) assert t.exception() is None assert actions == ["CANCELLED", "SUPPRESSED", "PRESTOP", "STOPPING", "DONE"] diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 241e7e8cc64..a4866c73f3e 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -375,9 +375,16 @@ async def test_client_context_manager_response(method, app, loop) -> None: assert "Hello, world" in text -async def test_custom_port(loop, app, aiohttp_unused_port) -> None: - port = aiohttp_unused_port() - client = _TestClient(TestServer(app, loop=loop, port=port), loop=loop) +async def test_custom_port( + loop: asyncio.AbstractEventLoop, + app: web.Application, + unused_port_socket: socket, +) -> None: + sock = unused_port_socket + port = sock.getsockname()[1] + client = _TestClient( + TestServer(app, port=port, socket_factory=lambda *args, **kwargs: sock) + ) await client.start_server() assert client.server.port == port diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 14d78e23a85..9e2d078c1a0 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -1,4 +1,5 @@ import asyncio +import socket from contextlib import suppress from unittest import mock @@ -169,9 +170,10 @@ async def handler(request): logger.exception.assert_called_with("Error handling request", exc_info=exc) -async def test_handler_cancellation(aiohttp_unused_port) -> None: +async def test_handler_cancellation(unused_port_socket: socket.socket) -> None: event = asyncio.Event() - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] async def on_request(_: web.Request) -> web.Response: nonlocal event @@ -189,10 +191,9 @@ async def on_request(_: web.Request) -> web.Response: runner = web.AppRunner(app, handler_cancellation=True) await runner.setup() - site = web.TCPSite(runner, host="localhost", port=port) + site = web.SockSite(runner, sock=sock) await site.start() - try: assert runner.server.handler_cancellation, "Flag was not propagated" @@ -200,7 +201,7 @@ async def on_request(_: web.Request) -> web.Response: timeout=client.ClientTimeout(total=0.1) ) as sess: with pytest.raises(asyncio.TimeoutError): - await sess.get(f"http://localhost:{port}/") + await sess.get(f"http://127.0.0.1:{port}/") with suppress(asyncio.TimeoutError): await asyncio.wait_for(event.wait(), timeout=1) @@ -209,10 +210,11 @@ async def on_request(_: web.Request) -> web.Response: await asyncio.gather(runner.shutdown(), site.stop()) -async def test_no_handler_cancellation(aiohttp_unused_port) -> None: +async def test_no_handler_cancellation(unused_port_socket: socket.socket) -> None: timeout_event = asyncio.Event() done_event = asyncio.Event() - port = aiohttp_unused_port() + sock = unused_port_socket + port = sock.getsockname()[1] started = False async def on_request(_: web.Request) -> web.Response: @@ -228,16 +230,15 @@ async def on_request(_: web.Request) -> web.Response: runner = web.AppRunner(app) await runner.setup() - site = web.TCPSite(runner, host="localhost", port=port) + site = web.SockSite(runner, sock=sock) await site.start() - try: async with client.ClientSession( timeout=client.ClientTimeout(total=0.2) ) as sess: with pytest.raises(asyncio.TimeoutError): - await sess.get(f"http://localhost:{port}/") + await sess.get(f"http://127.0.0.1:{port}/") await asyncio.sleep(0.1) timeout_event.set() diff --git a/tests/test_worker.py b/tests/test_worker.py index 00d180707b1..60d1e8b088b 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -3,7 +3,7 @@ import os import socket import ssl -from typing import TYPE_CHECKING, Callable, Dict, Optional +from typing import TYPE_CHECKING, Dict, Optional from unittest import mock import pytest @@ -209,13 +209,11 @@ def test__get_valid_log_format_exc(worker: base_worker.GunicornWebWorker) -> Non async def test__run_ok_parent_changed( worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop, - aiohttp_unused_port: Callable[[], int], + unused_port_socket: socket.socket, ) -> None: worker.ppid = 0 worker.alive = True - sock = socket.socket() - addr = ("localhost", aiohttp_unused_port()) - sock.bind(addr) + sock = unused_port_socket worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop @@ -232,13 +230,11 @@ async def test__run_ok_parent_changed( async def test__run_exc( worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop, - aiohttp_unused_port: Callable[[], int], + unused_port_socket: socket.socket, ) -> None: worker.ppid = os.getppid() worker.alive = True - sock = socket.socket() - addr = ("localhost", aiohttp_unused_port()) - sock.bind(addr) + sock = unused_port_socket worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop From 96d66a075d99ae19170b17f13972668fb14cbcdb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 19:45:17 +0000 Subject: [PATCH 0870/1511] [PR #9745/0d10447 backport][3.10] Speed up preparing headers by avoiding populating cookies when there are no cookies (#9748) Co-authored-by: Emmanuel Okedele <emmanuel@coefficient.ai> Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Emmanuel Okedele <80680311+Ok3ks@users.noreply.github.com> --- aiohttp/web_response.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 57bce1b06ad..49ca24ee21a 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -448,9 +448,10 @@ async def _prepare_headers(self) -> None: version = request.version headers = self._headers - for cookie in self._cookies.values(): - value = cookie.output(header="")[1:] - headers.add(hdrs.SET_COOKIE, value) + if self._cookies: + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) if self._compression: await self._start_compression(request) @@ -498,9 +499,8 @@ async def _prepare_headers(self) -> None: if keep_alive: if version == HttpVersion10: headers[hdrs.CONNECTION] = "keep-alive" - else: - if version == HttpVersion11: - headers[hdrs.CONNECTION] = "close" + elif version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" async def _write_headers(self) -> None: request = self._req From 8482975cf9e9f1911a756820c150410af2d8456c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 19:45:26 +0000 Subject: [PATCH 0871/1511] [PR #5431/751c3c4 backport][3.11] Re-introduce `pytest-xdist` in supported envs (#9747) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- .github/workflows/ci-cd.yml | 7 +- requirements/constraints.txt | 23 ++++- requirements/test.in | 3 +- requirements/test.txt | 10 +- setup.cfg | 3 + tests/test_client_request.py | 22 +++-- tests/test_connector.py | 165 ++++++++++++++++++++++++++++----- tests/test_imports.py | 13 ++- tests/test_proxy_functional.py | 4 + 9 files changed, 213 insertions(+), 37 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index fe266dd7e3b..ef59b56234e 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -204,14 +204,15 @@ jobs: COLOR: yes AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} run: >- # `exit 1` makes sure that the job remains red with flaky runs - pytest --no-cov -vvvvv --lf && exit 1 + pytest --no-cov --numprocesses=0 -vvvvv --lf && exit 1 shell: bash - name: Run dev_mode tests env: COLOR: yes AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} PIP_USER: 1 - run: python -X dev -m pytest -m dev_mode --cov-append + PYTHONDEVMODE: 1 + run: pytest -m dev_mode --cov-append --numprocesses=0 shell: bash - name: Turn coverage into xml env: @@ -276,7 +277,7 @@ jobs: uses: CodSpeedHQ/action@v3 with: token: ${{ secrets.CODSPEED_TOKEN }} - run: python -Im pytest --no-cov -vvvvv --codspeed + run: python -Im pytest --no-cov --numprocesses=0 -vvvvv --codspeed check: # This job does nothing and is only used for the branch protection diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ac846b0b100..20b1705e6a7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -18,6 +18,8 @@ alabaster==0.7.13 # via sphinx annotated-types==0.7.0 # via pydantic +apipkg==1.5 + # via execnet async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in @@ -71,6 +73,8 @@ filelock==3.16.1 # via # pytest-codspeed # virtualenv +execnet==2.1.1 + # via pytest-xdist freezegun==1.5.1 # via # -r requirements/lint.in @@ -144,7 +148,11 @@ propcache==0.2.0 # -r requirements/runtime-deps.in # yarl proxy-py==2.4.9 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in +py==1.11.0 + # via pytest pycares==4.4.0 # via aiodns pycparser==2.22 @@ -174,12 +182,25 @@ pytest==8.3.3 # pytest-codspeed # pytest-cov # pytest-mock + # pytest-xdist pytest-codspeed==3.0.0 # via # -r requirements/lint.in # -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in +pytest-mock==3.14.0 + # via -r requirements/test.in +pytest-xdist==3.6.1 + # via -r requirements/test.txt +python-dateutil==2.8.2 + # via freezegun +python-on-whales==0.71.0 + # via + # -r requirements/lint.in + # -r requirements/test.in +pytest-cov==5.0.0 + # via -r requirements/test.in pytest-mock==3.14.0 # via # -r requirements/lint.in diff --git a/requirements/test.in b/requirements/test.in index e16e39fe9b3..6686b373758 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -3,10 +3,11 @@ coverage freezegun mypy; implementation_name == "cpython" -proxy.py >= 2.4.4rc4 +proxy.py >= 2.4.4rc5 pytest pytest-cov pytest-mock +pytest-xdist pytest_codspeed python-on-whales re-assert diff --git a/requirements/test.txt b/requirements/test.txt index 1d28725481e..95551d58600 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -39,6 +39,8 @@ cryptography==43.0.3 # via trustme exceptiongroup==1.2.2 # via pytest +execnet==2.1.1 + # via pytest-xdist filelock==3.16.1 # via pytest-codspeed freezegun==1.5.1 @@ -80,6 +82,8 @@ propcache==0.2.0 # yarl proxy-py==2.4.9 # via -r requirements/test.in +py==1.11.0 + # via pytest pycares==4.4.0 # via aiodns pycparser==2.22 @@ -97,11 +101,15 @@ pytest==8.3.3 # pytest-cov # pytest-mock pytest-codspeed==3.0.0 - # via -r requirements/test.in + # via + # -r requirements/test.in + # pytest-xdist pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in +pytest-xdist==3.6.1 + # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.73.0 diff --git a/setup.cfg b/setup.cfg index 5acb71fdf55..2e0421f6c5d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -125,6 +125,9 @@ exclude_lines = [tool:pytest] addopts = + # `pytest-xdist`: + --numprocesses=auto + # show 10 slowest invocations: --durations=10 diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 9d4b3c46aa4..8947aa38944 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -744,14 +744,15 @@ async def test_urlencoded_formdata_charset(loop, conn) -> None: data=aiohttp.FormData({"hey": "you"}, charset="koi8-r"), loop=loop, ) - await req.send(conn) + async with await req.send(conn): + await asyncio.sleep(0) assert "application/x-www-form-urlencoded; charset=koi8-r" == req.headers.get( "CONTENT-TYPE" ) await req.close() -async def test_post_data(loop, conn) -> None: +async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: for meth in ClientRequest.POST_METHODS: req = ClientRequest( meth, URL("http://python.org/"), data={"life": "42"}, loop=loop @@ -1080,10 +1081,12 @@ async def throw_exc(): loop.create_task(throw_exc()) - await req.send(conn) - await req._writer - # assert conn.close.called - assert conn.protocol.set_exception.called + async with await req.send(conn): + assert req._writer is not None + await req._writer + # assert conn.close.called + assert conn.protocol is not None + assert conn.protocol.set_exception.called await req.close() @@ -1105,9 +1108,10 @@ async def throw_exc(): loop.create_task(throw_exc()) - await req.send(conn) - await req._writer - # assert connection.close.called + async with await req.send(conn): + assert req._writer is not None + await req._writer + # assert conn.close.called assert conn.protocol.set_exception.called outer_exc = conn.protocol.set_exception.call_args[0][0] assert isinstance(outer_exc, ClientConnectionError) diff --git a/tests/test_connector.py b/tests/test_connector.py index f5aefacf399..d956de2a2ed 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -258,6 +258,8 @@ async def test_create_conn(loop) -> None: with pytest.raises(NotImplementedError): await conn._create_connection(object(), [], object()) + await conn.close() + async def test_connector_context_manager(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) @@ -554,6 +556,8 @@ async def test_release_close(loop, key) -> None: assert not conn._conns assert proto.close.called + await conn.close() + async def test__release_acquired_per_host1( loop: asyncio.AbstractEventLoop, key: ConnectionKey @@ -562,6 +566,8 @@ async def test__release_acquired_per_host1( conn._release_acquired(key, create_mocked_conn(loop)) assert len(conn._acquired_per_host) == 0 + await conn.close() + async def test__release_acquired_per_host2( loop: asyncio.AbstractEventLoop, key: ConnectionKey @@ -572,6 +578,8 @@ async def test__release_acquired_per_host2( conn._release_acquired(key, handler) assert len(conn._acquired_per_host) == 0 + await conn.close() + async def test__release_acquired_per_host3( loop: asyncio.AbstractEventLoop, key: ConnectionKey @@ -585,6 +593,8 @@ async def test__release_acquired_per_host3( assert len(conn._acquired_per_host) == 1 assert conn._acquired_per_host[key] == {handler2} + await conn.close() + async def test_tcp_connector_certificate_error( loop: Any, start_connection: mock.AsyncMock @@ -604,6 +614,8 @@ async def certificate_error(*args, **kwargs): assert isinstance(ctx.value.certificate_error, ssl.CertificateError) assert isinstance(ctx.value, aiohttp.ClientSSLError) + await conn.close() + async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock @@ -620,6 +632,8 @@ async def test_tcp_connector_server_hostname_default( with closing(await conn.connect(req, [], ClientTimeout())): assert create_connection.call_args.kwargs["server_hostname"] == "127.0.0.1" + await conn.close() + async def test_tcp_connector_server_hostname_override( loop: Any, start_connection: mock.AsyncMock @@ -638,6 +652,8 @@ async def test_tcp_connector_server_hostname_override( with closing(await conn.connect(req, [], ClientTimeout())): assert create_connection.call_args.kwargs["server_hostname"] == "localhost" + await conn.close() + async def test_tcp_connector_multiple_hosts_errors(loop) -> None: conn = aiohttp.TCPConnector(loop=loop) @@ -791,6 +807,8 @@ def get_extra_info(param): established_connection.close() + await conn.close() + @pytest.mark.parametrize( ("happy_eyeballs_delay"), @@ -865,6 +883,8 @@ async def create_connection(*args, **kwargs): established_connection.close() + await conn.close() + async def test_tcp_connector_interleave(loop: Any) -> None: conn = aiohttp.TCPConnector(interleave=2) @@ -942,6 +962,8 @@ async def create_connection(*args, **kwargs): assert interleave == 2 established_connection.close() + await conn.close() + async def test_tcp_connector_family_is_respected(loop: Any) -> None: conn = aiohttp.TCPConnector(family=socket.AF_INET) @@ -1112,6 +1134,8 @@ async def create_connection( established_connection.close() + await conn.close() + async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector(use_dns_cache=True) @@ -1131,6 +1155,8 @@ async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> No else: assert rec["host"] == "::1" + await conn.close() + @pytest.fixture def dns_response(loop): @@ -1151,6 +1177,8 @@ async def test_tcp_connector_dns_cache_not_expired(loop, dns_response) -> None: await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) + await conn.close() + async def test_tcp_connector_dns_cache_forever(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1160,6 +1188,8 @@ async def test_tcp_connector_dns_cache_forever(loop, dns_response) -> None: await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) + await conn.close() + async def test_tcp_connector_use_dns_cache_disabled(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1174,6 +1204,8 @@ async def test_tcp_connector_use_dns_cache_disabled(loop, dns_response) -> None: ] ) + await conn.close() + async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1185,6 +1217,8 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: await asyncio.sleep(0) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) + await conn.close() + async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1200,6 +1234,8 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non assert r1.exception() == e assert r2.exception() == e + await conn.close() + async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop, dns_response @@ -1218,6 +1254,8 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( with pytest.raises(asyncio.CancelledError): await f + await conn.close() + @pytest.fixture def dns_response_error(loop): @@ -1259,6 +1297,8 @@ def exception_handler(loop, context): gc.collect() assert exception_handler_called is False + await conn.close() + async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: session = mock.Mock() @@ -1302,6 +1342,8 @@ async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost") ) + await conn.close() + async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> None: session = mock.Mock() @@ -1355,6 +1397,8 @@ async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> N ] ) + await conn.close() + async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) -> None: session = mock.Mock() @@ -1384,6 +1428,8 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - session, trace_config_ctx, aiohttp.TraceDnsCacheMissParams("localhost") ) + await conn.close() + async def test_dns_error(loop) -> None: connector = aiohttp.TCPConnector(loop=loop) @@ -1396,6 +1442,8 @@ async def test_dns_error(loop) -> None: with pytest.raises(aiohttp.ClientConnectorError): await connector.connect(req, [], ClientTimeout()) + await connector.close() + async def test_get_pop_empty_conns(loop) -> None: # see issue #473 @@ -1405,6 +1453,8 @@ async def test_get_pop_empty_conns(loop) -> None: assert await conn._get(key, []) is None assert not conn._conns + await conn.close() + async def test_release_close_do_not_add_to_pool(loop, key) -> None: # see issue #473 @@ -1416,6 +1466,8 @@ async def test_release_close_do_not_add_to_pool(loop, key) -> None: conn._release(key, proto) assert not conn._conns + await conn.close() + async def test_release_close_do_not_delete_existing_connections(key) -> None: proto1 = mock.Mock() @@ -1453,6 +1505,8 @@ async def test_release_not_opened(loop, key) -> None: conn._release(key, proto) assert proto.close.called + await conn.close() + async def test_connect(loop, key) -> None: proto = mock.Mock() @@ -1473,6 +1527,8 @@ async def test_connect(loop, key) -> None: assert isinstance(connection, Connection) connection.close() + await conn.close() + async def test_connect_tracing(loop) -> None: session = mock.Mock() @@ -1582,6 +1638,8 @@ async def test_exception_during_connection_queued_tracing( assert not conn._acquired assert key not in conn._acquired_per_host + await conn.close() + async def test_exception_during_connection_reuse_tracing( loop: asyncio.AbstractEventLoop, @@ -1697,8 +1755,18 @@ async def test_ctor_cleanup() -> None: assert conn._cleanup_handle is None assert conn._cleanup_closed_handle is not None + await conn.close() + async def test_cleanup(key: ConnectionKey) -> None: + # The test sets the clock to 300s. It starts with 2 connections in the + # pool. The first connection has use time of 10s. When cleanup reaches it, + # it computes the deadline = 300 - 15.0 = 285.0 (15s being the default + # keep-alive timeout value), then checks that it's overdue because + # 10 - 285.0 < 0, and releases it since it's in connected state. The second + # connection, though, is in disconnected state so it doesn't bother to + # check if it's past due and closes the underlying transport. + m1 = mock.Mock() m2 = mock.Mock() m1.is_connected.return_value = True @@ -1710,15 +1778,16 @@ async def test_cleanup(key: ConnectionKey) -> None: loop = mock.Mock() loop.time.return_value = 300 - conn = aiohttp.BaseConnector(loop=loop) - conn._conns = testset - existing_handle = conn._cleanup_handle = mock.Mock() + async with aiohttp.BaseConnector() as conn: + conn._loop = loop + conn._conns = testset + existing_handle = conn._cleanup_handle = mock.Mock() - with mock.patch("aiohttp.connector.monotonic", return_value=300): - conn._cleanup() - assert existing_handle.cancel.called - assert conn._conns == {} - assert conn._cleanup_handle is None + with mock.patch("aiohttp.connector.monotonic", return_value=300): + conn._cleanup() + assert existing_handle.cancel.called + assert conn._conns == {} + assert conn._cleanup_handle is None @pytest.mark.usefixtures("enable_cleanup_closed") @@ -1746,6 +1815,9 @@ async def test_cleanup_close_ssl_transport( assert conn._conns == {} assert conn._cleanup_closed_transports == [transport] + await conn.close() + await asyncio.sleep(0) # Give cleanup a chance to close transports + async def test_cleanup2(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: m = create_mocked_conn() @@ -1811,6 +1883,8 @@ async def test_cleanup_closed( assert loop.call_at.called assert cleanup_closed_handle.cancel.called + await conn.close() + async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None: """Ensure that enable_cleanup_closed is a noop on fixed Python versions.""" @@ -1832,6 +1906,8 @@ async def test_cleanup_closed_disabled( assert tr.abort.called assert not conn._cleanup_closed_transports + await conn.close() + async def test_tcp_connector_ctor() -> None: conn = aiohttp.TCPConnector() @@ -1840,6 +1916,8 @@ async def test_tcp_connector_ctor() -> None: assert conn.use_dns_cache assert conn.family == 0 + await conn.close() + async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector() @@ -1858,6 +1936,8 @@ async def test_tcp_connector_ctor_fingerprint_valid( conn = aiohttp.TCPConnector(ssl=valid, loop=loop) assert conn._ssl is valid + await conn.close() + async def test_insecure_fingerprint_md5(loop) -> None: with pytest.raises(ValueError): @@ -1893,12 +1973,16 @@ async def test_tcp_connector_clear_dns_cache(loop) -> None: with pytest.raises(KeyError): conn._cached_hosts.next_addrs(("localhost", 124)) + await conn.close() + async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn = aiohttp.TCPConnector(loop=loop) with pytest.raises(ValueError): conn.clear_dns_cache("localhost") + await conn.close() + async def test___get_ssl_context1() -> None: conn = aiohttp.TCPConnector() @@ -1906,6 +1990,8 @@ async def test___get_ssl_context1() -> None: req.is_ssl.return_value = False assert conn._get_ssl_context(req) is None + await conn.close() + async def test___get_ssl_context2(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1915,6 +2001,8 @@ async def test___get_ssl_context2(loop) -> None: req.ssl = ctx assert conn._get_ssl_context(req) is ctx + await conn.close() + async def test___get_ssl_context3(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1924,6 +2012,8 @@ async def test___get_ssl_context3(loop) -> None: req.ssl = True assert conn._get_ssl_context(req) is ctx + await conn.close() + async def test___get_ssl_context4(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1933,6 +2023,8 @@ async def test___get_ssl_context4(loop) -> None: req.ssl = False assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED + await conn.close() + async def test___get_ssl_context5(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1942,6 +2034,8 @@ async def test___get_ssl_context5(loop) -> None: req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED + await conn.close() + async def test___get_ssl_context6() -> None: conn = aiohttp.TCPConnector() @@ -1950,6 +2044,8 @@ async def test___get_ssl_context6() -> None: req.ssl = True assert conn._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + await conn.close() + async def test_ssl_context_once() -> None: """Test the ssl context is created only once and shared between connectors.""" @@ -2344,6 +2440,8 @@ async def test_ctor_with_default_loop(loop) -> None: conn = aiohttp.BaseConnector() assert loop is conn._loop + await conn.close() + async def test_base_connector_allows_high_level_protocols( loop: asyncio.AbstractEventLoop, @@ -2605,22 +2703,25 @@ async def test_connect_with_limit_cancelled(loop) -> None: await conn.close() -async def test_connect_with_capacity_release_waiters(loop) -> None: - def check_with_exc(err): - conn = aiohttp.BaseConnector(limit=1, loop=loop) - conn._create_connection = mock.Mock() - conn._create_connection.return_value = loop.create_future() - conn._create_connection.return_value.set_exception(err) +async def test_connect_with_capacity_release_waiters( + loop: asyncio.AbstractEventLoop, +) -> None: + async def check_with_exc(err: Exception) -> None: + conn = aiohttp.BaseConnector(limit=1) + with mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, side_effect=err + ): + with pytest.raises(Exception): + req = mock.Mock() + await conn.connect(req, [], ClientTimeout()) - with pytest.raises(Exception): - req = mock.Mock() - yield from conn.connect(req, None, ClientTimeout()) + assert not conn._waiters - assert not conn._waiters + await conn.close() - check_with_exc(OSError(1, "permission error")) - check_with_exc(RuntimeError()) - check_with_exc(asyncio.TimeoutError()) + await check_with_exc(OSError(1, "permission error")) + await check_with_exc(RuntimeError()) + await check_with_exc(asyncio.TimeoutError()) async def test_connect_with_limit_concurrent(loop) -> None: @@ -2700,6 +2801,8 @@ async def test_connect_waiters_cleanup(loop) -> None: await asyncio.sleep(0) assert not conn._waiters.keys() + await conn.close() + async def test_connect_waiters_cleanup_key_error(loop) -> None: proto = mock.Mock() @@ -2723,6 +2826,8 @@ async def test_connect_waiters_cleanup_key_error(loop) -> None: await asyncio.sleep(0) assert not conn._waiters.keys() == [] + await conn.close() + async def test_close_with_acquired_connection(loop) -> None: proto = mock.Mock() @@ -2754,6 +2859,8 @@ async def test_default_force_close(loop) -> None: connector = aiohttp.BaseConnector(loop=loop) assert not connector.force_close + await connector.close() + async def test_limit_property(loop) -> None: conn = aiohttp.BaseConnector(loop=loop, limit=15) @@ -2834,6 +2941,8 @@ async def create_connection(req, traces, timeout): assert proto in conn._acquired ret.release() + await conn.close() + async def test_cancelled_waiter(loop) -> None: conn = aiohttp.BaseConnector(limit=1, loop=loop) @@ -2856,6 +2965,8 @@ async def create_connection(req, traces=None): with pytest.raises(asyncio.CancelledError): await conn2 + await conn.close() + async def test_error_on_connection_with_cancelled_waiter(loop, key) -> None: conn = aiohttp.BaseConnector(limit=1, loop=loop) @@ -2906,6 +3017,8 @@ async def create_connection(req, traces, timeout): assert proto in conn._acquired ret.release() + await conn.close() + async def test_tcp_connector(aiohttp_client, loop) -> None: async def handler(request): @@ -2975,6 +3088,8 @@ async def test_default_use_dns_cache() -> None: conn = aiohttp.TCPConnector() assert conn.use_dns_cache + await conn.close() + async def test_ssl_none() -> None: conn = aiohttp.TCPConnector(ssl=None) @@ -2997,6 +3112,8 @@ async def test_resolver_not_called_with_address_is_ip(loop) -> None: resolver.resolve.assert_not_called() + await connector.close() + async def test_tcp_connector_raise_connector_ssl_error( aiohttp_server, @@ -3025,6 +3142,8 @@ async def handler(request): await session.close() + await conn.close() + @pytest.mark.parametrize( "host", @@ -3281,6 +3400,8 @@ async def send_dns_cache_hit(self, *args, **kwargs): traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] + await connector.close() + async def test_connector_throttle_trace_race(loop): key = ("", 0) @@ -3301,6 +3422,8 @@ async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] + await connector.close() + async def test_connector_does_not_remove_needed_waiters( loop: asyncio.AbstractEventLoop, key: ConnectionKey diff --git a/tests/test_imports.py b/tests/test_imports.py index 7f35f5b8cc2..2db32fa1488 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -28,12 +28,23 @@ def test_web___all__(pytester: pytest.Pytester) -> None: result.assert_outcomes(passed=0, errors=0) +_IS_CI_ENV = os.getenv("CI") == "true" +_XDIST_WORKER_COUNT = int(os.getenv("PYTEST_XDIST_WORKER_COUNT", 0)) +_IS_XDIST_RUN = _XDIST_WORKER_COUNT > 1 + _TARGET_TIMINGS_BY_PYTHON_VERSION = { - "3.12": 250, # 3.12 is expected to be a bit slower due to performance trade-offs + "3.12": ( + # 3.12 is expected to be a bit slower due to performance trade-offs, + # and even slower under pytest-xdist, especially in CI + _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) + if _IS_XDIST_RUN + else 250 + ), } @pytest.mark.internal +@pytest.mark.dev_mode @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index c15ca326288..5283b375834 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -99,6 +99,7 @@ async def handler(*args, **kwargs): # Filter out the warning from # https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 # otherwise this test will fail because the proxy will die with an error. +@pytest.mark.usefixtures("loop") async def test_secure_https_proxy_absolute_path( client_ssl_ctx: ssl.SSLContext, secure_proxy_url: URL, @@ -119,6 +120,7 @@ async def test_secure_https_proxy_absolute_path( await sess.close() await conn.close() + await asyncio.sleep(0.1) # https://docs.aiohttp.org/en/v3.8.0/client_advanced.html#graceful-shutdown await asyncio.sleep(0.1) @@ -194,6 +196,8 @@ async def test_https_proxy_unsupported_tls_in_tls( await sess.close() await conn.close() + await asyncio.sleep(0.1) + @pytest.fixture def proxy_test_server(aiohttp_raw_server, loop, monkeypatch): From 5a4a4da27ee4ff65c3196d7f3f65c574565103cb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 19:55:37 +0000 Subject: [PATCH 0872/1511] [PR #5431/751c3c4 backport][3.10] Re-introduce `pytest-xdist` in supported envs (#9746) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- .github/workflows/ci-cd.yml | 7 +- requirements/constraints.txt | 35 +++++-- requirements/test.in | 3 +- requirements/test.txt | 9 +- setup.cfg | 3 + tests/test_client_request.py | 22 ++-- tests/test_connector.py | 183 ++++++++++++++++++++++++++++----- tests/test_imports.py | 13 ++- tests/test_proxy_functional.py | 4 + 9 files changed, 227 insertions(+), 52 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 61ec5594616..d3d9a0ddfdb 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -206,14 +206,15 @@ jobs: COLOR: yes AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} run: >- # `exit 1` makes sure that the job remains red with flaky runs - pytest --no-cov -vvvvv --lf && exit 1 + pytest --no-cov --numprocesses=0 -vvvvv --lf && exit 1 shell: bash - name: Run dev_mode tests env: COLOR: yes AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} PIP_USER: 1 - run: python -X dev -m pytest -m dev_mode --cov-append + PYTHONDEVMODE: 1 + run: pytest -m dev_mode --cov-append --numprocesses=0 shell: bash - name: Turn coverage into xml env: @@ -278,7 +279,7 @@ jobs: uses: CodSpeedHQ/action@v3 with: token: ${{ secrets.CODSPEED_TOKEN }} - run: python -Im pytest --no-cov -vvvvv --codspeed + run: python -Im pytest --no-cov --numprocesses=0 -vvvvv --codspeed check: # This job does nothing and is only used for the branch protection diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 30356d04664..250cf6b978b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.8 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: # -# pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in +# pip-compile --allow-unsafe --output-file=requirements/constraints.txt --strip-extras requirements/constraints.in # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via @@ -38,10 +38,11 @@ build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests -cffi==1.17.0 +cffi==1.17.1 # via # cryptography # pycares + # pytest-codspeed cfgv==3.3.1 # via pre-commit charset-normalizer==3.2.0 @@ -72,10 +73,16 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.1.2 # via pytest -filelock==3.3.2 - # via virtualenv +execnet==2.1.1 + # via pytest-xdist +filelock==3.16.1 + # via + # pytest-codspeed + # virtualenv freezegun==1.5.1 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in frozenlist==1.4.1 # via # -r requirements/runtime-deps.in @@ -170,11 +177,21 @@ pytest==8.3.2 # via # -r requirements/lint.in # -r requirements/test.in + # pytest-codspeed # pytest-cov # pytest-mock + # pytest-xdist +pytest-codspeed==2.2.1 + # via + # -r requirements/lint.in + # -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 + # via + # -r requirements/lint.in + # -r requirements/test.in +pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun @@ -247,7 +264,9 @@ towncrier==23.11.0 tqdm==4.62.3 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in typer==0.6.1 # via python-on-whales typing-extensions==4.12.2 diff --git a/requirements/test.in b/requirements/test.in index 801189ea72c..64ebb1412d6 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -4,10 +4,11 @@ coverage freezegun mypy; implementation_name == "cpython" -proxy.py >= 2.4.4rc4 +proxy.py >= 2.4.4rc5 pytest pytest-cov pytest-mock +pytest-xdist pytest_codspeed python-on-whales re-assert diff --git a/requirements/test.txt b/requirements/test.txt index c03b33fb228..bb9c96feb3c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with Python 3.8 # by the following command: # -# pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in +# pip-compile --allow-unsafe --output-file=requirements/test.txt --strip-extras requirements/test.in # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in @@ -39,6 +39,8 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest +execnet==2.1.1 + # via pytest-xdist filelock==3.16.1 # via pytest-codspeed freezegun==1.5.1 @@ -86,12 +88,15 @@ pytest==8.3.2 # pytest-codspeed # pytest-cov # pytest-mock + # pytest-xdist pytest-codspeed==2.2.1 # via -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in +pytest-xdist==3.6.1 + # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun python-on-whales==0.72.0 diff --git a/setup.cfg b/setup.cfg index 445ca8670ae..de3fb2e9e63 100644 --- a/setup.cfg +++ b/setup.cfg @@ -118,6 +118,9 @@ exclude_lines = [tool:pytest] addopts = + # `pytest-xdist`: + --numprocesses=auto + # show 10 slowest invocations: --durations=10 diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c9d61bf1fb7..bf2fd4b7bc0 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -751,14 +751,15 @@ async def test_urlencoded_formdata_charset(loop, conn) -> None: data=aiohttp.FormData({"hey": "you"}, charset="koi8-r"), loop=loop, ) - await req.send(conn) + async with await req.send(conn): + await asyncio.sleep(0) assert "application/x-www-form-urlencoded; charset=koi8-r" == req.headers.get( "CONTENT-TYPE" ) await req.close() -async def test_post_data(loop, conn) -> None: +async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: for meth in ClientRequest.POST_METHODS: req = ClientRequest( meth, URL("http://python.org/"), data={"life": "42"}, loop=loop @@ -1087,10 +1088,12 @@ async def throw_exc(): loop.create_task(throw_exc()) - await req.send(conn) - await req._writer - # assert conn.close.called - assert conn.protocol.set_exception.called + async with await req.send(conn): + assert req._writer is not None + await req._writer + # assert conn.close.called + assert conn.protocol is not None + assert conn.protocol.set_exception.called await req.close() @@ -1112,9 +1115,10 @@ async def throw_exc(): loop.create_task(throw_exc()) - await req.send(conn) - await req._writer - # assert connection.close.called + async with await req.send(conn): + assert req._writer is not None + await req._writer + # assert conn.close.called assert conn.protocol.set_exception.called outer_exc = conn.protocol.set_exception.call_args[0][0] assert isinstance(outer_exc, ClientConnectionError) diff --git a/tests/test_connector.py b/tests/test_connector.py index 930a7dc0f0f..403e6501f13 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -7,9 +7,10 @@ import ssl import sys import uuid +from collections import defaultdict, deque from concurrent import futures from contextlib import closing, suppress -from typing import Any, List, Literal, Optional, Sequence, Tuple +from typing import Any, DefaultDict, Deque, List, Literal, Optional, Sequence, Tuple from unittest import mock import pytest @@ -262,6 +263,8 @@ async def test_create_conn(loop) -> None: with pytest.raises(NotImplementedError): await conn._create_connection(object(), [], object()) + await conn.close() + async def test_connector_context_manager(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) @@ -558,6 +561,8 @@ async def test_release_close(loop, key) -> None: assert not conn._conns assert proto.close.called + await conn.close() + async def test__release_acquired_per_host1( loop: asyncio.AbstractEventLoop, key: ConnectionKey @@ -566,6 +571,8 @@ async def test__release_acquired_per_host1( conn._release_acquired(key, create_mocked_conn(loop)) assert len(conn._acquired_per_host) == 0 + await conn.close() + async def test__release_acquired_per_host2( loop: asyncio.AbstractEventLoop, key: ConnectionKey @@ -576,6 +583,8 @@ async def test__release_acquired_per_host2( conn._release_acquired(key, handler) assert len(conn._acquired_per_host) == 0 + await conn.close() + async def test__release_acquired_per_host3( loop: asyncio.AbstractEventLoop, key: ConnectionKey @@ -589,6 +598,8 @@ async def test__release_acquired_per_host3( assert len(conn._acquired_per_host) == 1 assert conn._acquired_per_host[key] == {handler2} + await conn.close() + async def test_tcp_connector_certificate_error( loop: Any, start_connection: mock.AsyncMock @@ -608,6 +619,8 @@ async def certificate_error(*args, **kwargs): assert isinstance(ctx.value.certificate_error, ssl.CertificateError) assert isinstance(ctx.value, aiohttp.ClientSSLError) + await conn.close() + async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock @@ -624,6 +637,8 @@ async def test_tcp_connector_server_hostname_default( with closing(await conn.connect(req, [], ClientTimeout())): assert create_connection.call_args.kwargs["server_hostname"] == "127.0.0.1" + await conn.close() + async def test_tcp_connector_server_hostname_override( loop: Any, start_connection: mock.AsyncMock @@ -642,6 +657,8 @@ async def test_tcp_connector_server_hostname_override( with closing(await conn.connect(req, [], ClientTimeout())): assert create_connection.call_args.kwargs["server_hostname"] == "localhost" + await conn.close() + async def test_tcp_connector_multiple_hosts_errors(loop) -> None: conn = aiohttp.TCPConnector(loop=loop) @@ -795,6 +812,8 @@ def get_extra_info(param): established_connection.close() + await conn.close() + @pytest.mark.parametrize( ("happy_eyeballs_delay"), @@ -869,6 +888,8 @@ async def create_connection(*args, **kwargs): established_connection.close() + await conn.close() + async def test_tcp_connector_interleave(loop: Any) -> None: conn = aiohttp.TCPConnector(interleave=2) @@ -946,6 +967,8 @@ async def create_connection(*args, **kwargs): assert interleave == 2 established_connection.close() + await conn.close() + async def test_tcp_connector_family_is_respected(loop: Any) -> None: conn = aiohttp.TCPConnector(family=socket.AF_INET) @@ -1116,6 +1139,8 @@ async def create_connection( established_connection.close() + await conn.close() + async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector(use_dns_cache=True) @@ -1135,6 +1160,8 @@ async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> No else: assert rec["host"] == "::1" + await conn.close() + @pytest.fixture def dns_response(loop): @@ -1155,6 +1182,8 @@ async def test_tcp_connector_dns_cache_not_expired(loop, dns_response) -> None: await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) + await conn.close() + async def test_tcp_connector_dns_cache_forever(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1164,6 +1193,8 @@ async def test_tcp_connector_dns_cache_forever(loop, dns_response) -> None: await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) + await conn.close() + async def test_tcp_connector_use_dns_cache_disabled(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1178,6 +1209,8 @@ async def test_tcp_connector_use_dns_cache_disabled(loop, dns_response) -> None: ] ) + await conn.close() + async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1189,6 +1222,8 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: await asyncio.sleep(0) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) + await conn.close() + async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: @@ -1204,6 +1239,8 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non assert r1.exception() == e assert r2.exception() == e + await conn.close() + async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop, dns_response @@ -1222,6 +1259,8 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( with pytest.raises(asyncio.CancelledError): await f + await conn.close() + @pytest.fixture def dns_response_error(loop): @@ -1263,6 +1302,8 @@ def exception_handler(loop, context): gc.collect() assert exception_handler_called is False + await conn.close() + async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: session = mock.Mock() @@ -1306,6 +1347,8 @@ async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost") ) + await conn.close() + async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> None: session = mock.Mock() @@ -1359,6 +1402,8 @@ async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> N ] ) + await conn.close() + async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) -> None: session = mock.Mock() @@ -1388,6 +1433,8 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - session, trace_config_ctx, aiohttp.TraceDnsCacheMissParams("localhost") ) + await conn.close() + async def test_dns_error(loop) -> None: connector = aiohttp.TCPConnector(loop=loop) @@ -1400,6 +1447,8 @@ async def test_dns_error(loop) -> None: with pytest.raises(aiohttp.ClientConnectorError): await connector.connect(req, [], ClientTimeout()) + await connector.close() + async def test_get_pop_empty_conns(loop) -> None: # see issue #473 @@ -1409,6 +1458,8 @@ async def test_get_pop_empty_conns(loop) -> None: assert await conn._get(key, []) is None assert not conn._conns + await conn.close() + async def test_release_close_do_not_add_to_pool(loop, key) -> None: # see issue #473 @@ -1420,6 +1471,8 @@ async def test_release_close_do_not_add_to_pool(loop, key) -> None: conn._release(key, proto) assert not conn._conns + await conn.close() + async def test_release_close_do_not_delete_existing_connections(key) -> None: proto1 = mock.Mock() @@ -1457,6 +1510,8 @@ async def test_release_not_opened(loop, key) -> None: conn._release(key, proto) assert proto.close.called + await conn.close() + async def test_connect(loop, key) -> None: proto = mock.Mock() @@ -1477,6 +1532,8 @@ async def test_connect(loop, key) -> None: assert isinstance(connection, Connection) connection.close() + await conn.close() + async def test_connect_tracing(loop) -> None: session = mock.Mock() @@ -1586,6 +1643,8 @@ async def test_exception_during_connection_queued_tracing( assert not conn._acquired assert key not in conn._acquired_per_host + await conn.close() + async def test_exception_during_connection_reuse_tracing( loop: asyncio.AbstractEventLoop, @@ -1701,25 +1760,39 @@ async def test_ctor_cleanup() -> None: assert conn._cleanup_handle is None assert conn._cleanup_closed_handle is not None + await conn.close() -async def test_cleanup(key) -> None: - testset = { - key: [(mock.Mock(), 10), (mock.Mock(), 300)], - } - testset[key][0][0].is_connected.return_value = True - testset[key][1][0].is_connected.return_value = False + +async def test_cleanup(key: ConnectionKey) -> None: + # The test sets the clock to 300s. It starts with 2 connections in the + # pool. The first connection has use time of 10s. When cleanup reaches it, + # it computes the deadline = 300 - 15.0 = 285.0 (15s being the default + # keep-alive timeout value), then checks that it's overdue because + # 10 - 285.0 < 0, and releases it since it's in connected state. The second + # connection, though, is in disconnected state so it doesn't bother to + # check if it's past due and closes the underlying transport. + + m1 = mock.Mock() + m2 = mock.Mock() + m1.is_connected.return_value = True + m2.is_connected.return_value = False + testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + defaultdict(deque) + ) + testset[key] = deque([(m1, 10), (m2, 300)]) loop = mock.Mock() loop.time.return_value = 300 - conn = aiohttp.BaseConnector(loop=loop) - conn._conns = testset - existing_handle = conn._cleanup_handle = mock.Mock() + async with aiohttp.BaseConnector() as conn: + conn._loop = loop + conn._conns = testset + existing_handle = conn._cleanup_handle = mock.Mock() - with mock.patch("aiohttp.connector.monotonic", return_value=300): - conn._cleanup() - assert existing_handle.cancel.called - assert conn._conns == {} - assert conn._cleanup_handle is None + with mock.patch("aiohttp.connector.monotonic", return_value=300): + conn._cleanup() + assert existing_handle.cancel.called + assert conn._conns == {} + assert conn._cleanup_handle is None @pytest.mark.usefixtures("enable_cleanup_closed") @@ -1744,6 +1817,9 @@ async def test_cleanup_close_ssl_transport( assert conn._conns == {} assert conn._cleanup_closed_transports == [transport] + await conn.close() + await asyncio.sleep(0) # Give cleanup a chance to close transports + async def test_cleanup2() -> None: testset = {1: [(mock.Mock(), 300)]} @@ -1801,6 +1877,8 @@ async def test_cleanup_closed( assert loop.call_at.called assert cleanup_closed_handle.cancel.called + await conn.close() + async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None: """Ensure that enable_cleanup_closed is a noop on fixed Python versions.""" @@ -1822,6 +1900,8 @@ async def test_cleanup_closed_disabled( assert tr.abort.called assert not conn._cleanup_closed_transports + await conn.close() + async def test_tcp_connector_ctor() -> None: conn = aiohttp.TCPConnector() @@ -1830,6 +1910,8 @@ async def test_tcp_connector_ctor() -> None: assert conn.use_dns_cache assert conn.family == 0 + await conn.close() + async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector() @@ -1848,6 +1930,8 @@ async def test_tcp_connector_ctor_fingerprint_valid( conn = aiohttp.TCPConnector(ssl=valid, loop=loop) assert conn._ssl is valid + await conn.close() + async def test_insecure_fingerprint_md5(loop) -> None: with pytest.raises(ValueError): @@ -1883,12 +1967,16 @@ async def test_tcp_connector_clear_dns_cache(loop) -> None: with pytest.raises(KeyError): conn._cached_hosts.next_addrs(("localhost", 124)) + await conn.close() + async def test_tcp_connector_clear_dns_cache_bad_args(loop) -> None: conn = aiohttp.TCPConnector(loop=loop) with pytest.raises(ValueError): conn.clear_dns_cache("localhost") + await conn.close() + async def test___get_ssl_context1() -> None: conn = aiohttp.TCPConnector() @@ -1896,6 +1984,8 @@ async def test___get_ssl_context1() -> None: req.is_ssl.return_value = False assert conn._get_ssl_context(req) is None + await conn.close() + async def test___get_ssl_context2(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1905,6 +1995,8 @@ async def test___get_ssl_context2(loop) -> None: req.ssl = ctx assert conn._get_ssl_context(req) is ctx + await conn.close() + async def test___get_ssl_context3(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1914,6 +2006,8 @@ async def test___get_ssl_context3(loop) -> None: req.ssl = True assert conn._get_ssl_context(req) is ctx + await conn.close() + async def test___get_ssl_context4(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1923,6 +2017,8 @@ async def test___get_ssl_context4(loop) -> None: req.ssl = False assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED + await conn.close() + async def test___get_ssl_context5(loop) -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -1932,6 +2028,8 @@ async def test___get_ssl_context5(loop) -> None: req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest()) assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED + await conn.close() + async def test___get_ssl_context6() -> None: conn = aiohttp.TCPConnector() @@ -1940,6 +2038,8 @@ async def test___get_ssl_context6() -> None: req.ssl = True assert conn._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED + await conn.close() + async def test_ssl_context_once() -> None: """Test the ssl context is created only once and shared between connectors.""" @@ -2334,6 +2434,8 @@ async def test_ctor_with_default_loop(loop) -> None: conn = aiohttp.BaseConnector() assert loop is conn._loop + await conn.close() + async def test_base_connector_allows_high_level_protocols( loop: asyncio.AbstractEventLoop, @@ -2595,22 +2697,25 @@ async def test_connect_with_limit_cancelled(loop) -> None: await conn.close() -async def test_connect_with_capacity_release_waiters(loop) -> None: - def check_with_exc(err): - conn = aiohttp.BaseConnector(limit=1, loop=loop) - conn._create_connection = mock.Mock() - conn._create_connection.return_value = loop.create_future() - conn._create_connection.return_value.set_exception(err) +async def test_connect_with_capacity_release_waiters( + loop: asyncio.AbstractEventLoop, +) -> None: + async def check_with_exc(err: Exception) -> None: + conn = aiohttp.BaseConnector(limit=1) + with mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, side_effect=err + ): + with pytest.raises(Exception): + req = mock.Mock() + await conn.connect(req, [], ClientTimeout()) - with pytest.raises(Exception): - req = mock.Mock() - yield from conn.connect(req, None, ClientTimeout()) + assert not conn._waiters - assert not conn._waiters + await conn.close() - check_with_exc(OSError(1, "permission error")) - check_with_exc(RuntimeError()) - check_with_exc(asyncio.TimeoutError()) + await check_with_exc(OSError(1, "permission error")) + await check_with_exc(RuntimeError()) + await check_with_exc(asyncio.TimeoutError()) async def test_connect_with_limit_concurrent(loop) -> None: @@ -2690,6 +2795,8 @@ async def test_connect_waiters_cleanup(loop) -> None: await asyncio.sleep(0) assert not conn._waiters.keys() + await conn.close() + async def test_connect_waiters_cleanup_key_error(loop) -> None: proto = mock.Mock() @@ -2713,6 +2820,8 @@ async def test_connect_waiters_cleanup_key_error(loop) -> None: await asyncio.sleep(0) assert not conn._waiters.keys() == [] + await conn.close() + async def test_close_with_acquired_connection(loop) -> None: proto = mock.Mock() @@ -2744,6 +2853,8 @@ async def test_default_force_close(loop) -> None: connector = aiohttp.BaseConnector(loop=loop) assert not connector.force_close + await connector.close() + async def test_limit_property(loop) -> None: conn = aiohttp.BaseConnector(loop=loop, limit=15) @@ -2824,6 +2935,8 @@ async def create_connection(req, traces, timeout): assert proto in conn._acquired ret.release() + await conn.close() + async def test_cancelled_waiter(loop) -> None: conn = aiohttp.BaseConnector(limit=1, loop=loop) @@ -2846,6 +2959,8 @@ async def create_connection(req, traces=None): with pytest.raises(asyncio.CancelledError): await conn2 + await conn.close() + async def test_error_on_connection_with_cancelled_waiter(loop, key) -> None: conn = aiohttp.BaseConnector(limit=1, loop=loop) @@ -2896,6 +3011,8 @@ async def create_connection(req, traces, timeout): assert proto in conn._acquired ret.release() + await conn.close() + async def test_tcp_connector(aiohttp_client, loop) -> None: async def handler(request): @@ -2965,6 +3082,8 @@ async def test_default_use_dns_cache() -> None: conn = aiohttp.TCPConnector() assert conn.use_dns_cache + await conn.close() + async def test_ssl_none() -> None: conn = aiohttp.TCPConnector(ssl=None) @@ -2987,6 +3106,8 @@ async def test_resolver_not_called_with_address_is_ip(loop) -> None: resolver.resolve.assert_not_called() + await connector.close() + async def test_tcp_connector_raise_connector_ssl_error( aiohttp_server, @@ -3015,6 +3136,8 @@ async def handler(request): await session.close() + await conn.close() + @pytest.mark.parametrize( "host", @@ -3271,6 +3394,8 @@ async def send_dns_cache_hit(self, *args, **kwargs): traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] + await connector.close() + async def test_connector_throttle_trace_race(loop): key = ("", 0) @@ -3291,6 +3416,8 @@ async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: traces = [DummyTracer()] assert await connector._resolve_host("", 0, traces) == [token] + await connector.close() + async def test_connector_does_not_remove_needed_waiters( loop: asyncio.AbstractEventLoop, key: ConnectionKey diff --git a/tests/test_imports.py b/tests/test_imports.py index 7f35f5b8cc2..ff63f8fb0f6 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -28,8 +28,18 @@ def test_web___all__(pytester: pytest.Pytester) -> None: result.assert_outcomes(passed=0, errors=0) +_IS_CI_ENV = os.getenv("CI") == "true" +_XDIST_WORKER_COUNT = int(os.getenv("PYTEST_XDIST_WORKER_COUNT", 0)) +_IS_XDIST_RUN = _XDIST_WORKER_COUNT > 1 + _TARGET_TIMINGS_BY_PYTHON_VERSION = { - "3.12": 250, # 3.12 is expected to be a bit slower due to performance trade-offs + "3.12": ( + # 3.12 is expected to be a bit slower due to performance trade-offs, + # and even slower under pytest-xdist, especially in CI + _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) + if _IS_XDIST_RUN + else 250 + ), } @@ -38,6 +48,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", ) +@pytest.mark.dev_mode def test_import_time(pytester: pytest.Pytester) -> None: """Check that importing aiohttp doesn't take too long. diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 4b11d11e3a7..722fb51ae5c 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -100,6 +100,7 @@ async def handler(*args, **kwargs): # Filter out the warning from # https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 # otherwise this test will fail because the proxy will die with an error. +@pytest.mark.usefixtures("loop") async def test_secure_https_proxy_absolute_path( client_ssl_ctx: ssl.SSLContext, secure_proxy_url: URL, @@ -127,6 +128,7 @@ async def test_secure_https_proxy_absolute_path( await sess.close() await conn.close() + await asyncio.sleep(0.1) # https://docs.aiohttp.org/en/v3.8.0/client_advanced.html#graceful-shutdown await asyncio.sleep(0.1) @@ -202,6 +204,8 @@ async def test_https_proxy_unsupported_tls_in_tls( await sess.close() await conn.close() + await asyncio.sleep(0.1) + @pytest.fixture def proxy_test_server(aiohttp_raw_server, loop, monkeypatch): From 8922ab87b30b2daa9e4c47fe72a0bf09bfc633b9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 9 Nov 2024 20:05:48 +0000 Subject: [PATCH 0873/1511] [PR #9745/0d10447 backport][3.11] Speed up preparing headers by avoiding populating cookies when there are no cookies (#9749) Co-authored-by: Emmanuel Okedele <emmanuel@coefficient.ai> Co-authored-by: pre-commit-ci[bot] Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Emmanuel Okedele <80680311+Ok3ks@users.noreply.github.com> --- aiohttp/web_response.py | 12 ++++++------ tests/test_imports.py | 1 + 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index bbefa500efc..96f688e2ddc 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -465,9 +465,10 @@ async def _prepare_headers(self) -> None: version = request.version headers = self._headers - for cookie in self._cookies.values(): - value = cookie.output(header="")[1:] - headers.add(hdrs.SET_COOKIE, value) + if self._cookies: + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) if self._compression: await self._start_compression(request) @@ -515,9 +516,8 @@ async def _prepare_headers(self) -> None: if keep_alive: if version == HttpVersion10: headers[hdrs.CONNECTION] = "keep-alive" - else: - if version == HttpVersion11: - headers[hdrs.CONNECTION] = "close" + elif version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" async def _write_headers(self) -> None: request = self._req diff --git a/tests/test_imports.py b/tests/test_imports.py index 2db32fa1488..fcd12c011b9 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -49,6 +49,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", ) +@pytest.mark.dev_mode def test_import_time(pytester: pytest.Pytester) -> None: """Check that importing aiohttp doesn't take too long. From df9c65725afb220d9fa9d616feea3400203b16e1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 20:33:09 +0000 Subject: [PATCH 0874/1511] [PR #9751/7c0b4637 backport][3.11] Add codspeed badge (#9753) Co-authored-by: J. Nick Koston <nick@koston.org> --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index 470ced9b29c..554627a42e7 100644 --- a/README.rst +++ b/README.rst @@ -17,6 +17,10 @@ Async http client/server framework :target: https://codecov.io/gh/aio-libs/aiohttp :alt: codecov.io status for master branch +.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json + :target: https://codspeed.io/aio-libs/aiohttp + :alt: Codspeed.io status for aiohttp + .. image:: https://badge.fury.io/py/aiohttp.svg :target: https://pypi.org/project/aiohttp :alt: Latest PyPI package version From 0124f7ccb27f54011069219fa8b0b5ec72a555e8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 20:58:34 +0000 Subject: [PATCH 0875/1511] [PR #9751/7c0b4637 backport][3.10] Add codspeed badge (#9752) Co-authored-by: J. Nick Koston <nick@koston.org> --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index 470ced9b29c..554627a42e7 100644 --- a/README.rst +++ b/README.rst @@ -17,6 +17,10 @@ Async http client/server framework :target: https://codecov.io/gh/aio-libs/aiohttp :alt: codecov.io status for master branch +.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json + :target: https://codspeed.io/aio-libs/aiohttp + :alt: Codspeed.io status for aiohttp + .. image:: https://badge.fury.io/py/aiohttp.svg :target: https://pypi.org/project/aiohttp :alt: Latest PyPI package version From 1d761e172bcb5314a9f9dbcf3e0fb1e1ea8c995b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 10 Nov 2024 00:13:06 +0000 Subject: [PATCH 0876/1511] [PR #9756/44d809f backport][3.11] Avoid tracking connections per host when there is no limit per host (#9758) --- CHANGES/9756.misc.rst | 1 + aiohttp/connector.py | 15 +++++++++------ tests/test_connector.py | 18 +++++++++--------- 3 files changed, 19 insertions(+), 15 deletions(-) create mode 100644 CHANGES/9756.misc.rst diff --git a/CHANGES/9756.misc.rst b/CHANGES/9756.misc.rst new file mode 100644 index 00000000000..54f232ac403 --- /dev/null +++ b/CHANGES/9756.misc.rst @@ -0,0 +1 @@ +Improved performance of :py:class:`aiohttp.BaseConnector` when there is no ``limit_per_host`` -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 928651d80f3..f0e7ae86b13 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -531,7 +531,8 @@ async def connect( placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) + if self._limit_per_host: + self._acquired_per_host[key].add(placeholder) try: # Traces are done inside the try block to ensure that the @@ -557,11 +558,12 @@ async def connect( # be no awaits after the proto is added to the acquired set # to ensure that the connection is not left in the acquired set # on cancellation. - acquired_per_host = self._acquired_per_host[key] self._acquired.remove(placeholder) - acquired_per_host.remove(placeholder) self._acquired.add(proto) - acquired_per_host.add(proto) + if self._limit_per_host: + acquired_per_host = self._acquired_per_host[key] + acquired_per_host.remove(placeholder) + acquired_per_host.add(proto) return Connection(self, key, proto, self._loop) async def _wait_for_available_connection( @@ -626,7 +628,8 @@ async def _get( # The very last connection was reclaimed: drop the key del self._conns[key] self._acquired.add(proto) - self._acquired_per_host[key].add(proto) + if self._limit_per_host: + self._acquired_per_host[key].add(proto) if traces: for trace in traces: try: @@ -680,7 +683,7 @@ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> Non return self._acquired.discard(proto) - if conns := self._acquired_per_host.get(key): + if self._limit_per_host and (conns := self._acquired_per_host.get(key)): conns.discard(proto) if not conns: del self._acquired_per_host[key] diff --git a/tests/test_connector.py b/tests/test_connector.py index d956de2a2ed..32184130dda 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -375,7 +375,7 @@ async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: async def test_release_acquired(loop, key) -> None: proto = mock.Mock() - conn = aiohttp.BaseConnector(loop=loop, limit=5) + conn = aiohttp.BaseConnector(loop=loop, limit=5, limit_per_host=10) conn._release_waiter = mock.Mock() conn._acquired.add(proto) @@ -562,7 +562,7 @@ async def test_release_close(loop, key) -> None: async def test__release_acquired_per_host1( loop: asyncio.AbstractEventLoop, key: ConnectionKey ) -> None: - conn = aiohttp.BaseConnector() + conn = aiohttp.BaseConnector(limit_per_host=10) conn._release_acquired(key, create_mocked_conn(loop)) assert len(conn._acquired_per_host) == 0 @@ -572,7 +572,7 @@ async def test__release_acquired_per_host1( async def test__release_acquired_per_host2( loop: asyncio.AbstractEventLoop, key: ConnectionKey ) -> None: - conn = aiohttp.BaseConnector() + conn = aiohttp.BaseConnector(limit_per_host=10) handler = create_mocked_conn(loop) conn._acquired_per_host[key].add(handler) conn._release_acquired(key, handler) @@ -584,7 +584,7 @@ async def test__release_acquired_per_host2( async def test__release_acquired_per_host3( loop: asyncio.AbstractEventLoop, key: ConnectionKey ) -> None: - conn = aiohttp.BaseConnector() + conn = aiohttp.BaseConnector(limit_per_host=10) handler = create_mocked_conn(loop) handler2 = create_mocked_conn(loop) conn._acquired_per_host[key].add(handler) @@ -2466,7 +2466,7 @@ async def test_connect_with_limit( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - conn = aiohttp.BaseConnector(loop=loop, limit=1) + conn = aiohttp.BaseConnector(loop=loop, limit=1, limit_per_host=10) conn._conns[key] = deque([(proto, loop.time())]) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() @@ -2663,7 +2663,7 @@ async def f(): connection2 = await conn.connect(req, None, ClientTimeout()) acquired = True assert 1 == len(conn._acquired) - assert 1 == len(conn._acquired_per_host[key]) + assert not conn._acquired_per_host connection2.release() task = loop.create_task(f()) @@ -2810,7 +2810,7 @@ async def test_connect_waiters_cleanup_key_error(loop) -> None: req = ClientRequest("GET", URL("http://host:80"), loop=loop) - conn = aiohttp.BaseConnector(loop=loop, limit=1) + conn = aiohttp.BaseConnector(loop=loop, limit=1, limit_per_host=10) conn._available_connections = mock.Mock(return_value=0) t = loop.create_task(conn.connect(req, None, ClientTimeout())) @@ -2901,7 +2901,7 @@ async def test_force_close_and_explicit_keep_alive(loop) -> None: async def test_error_on_connection(loop, key) -> None: - conn = aiohttp.BaseConnector(limit=1, loop=loop) + conn = aiohttp.BaseConnector(limit=1, loop=loop, limit_per_host=10) req = mock.Mock() req.connection_key = key @@ -2969,7 +2969,7 @@ async def create_connection(req, traces=None): async def test_error_on_connection_with_cancelled_waiter(loop, key) -> None: - conn = aiohttp.BaseConnector(limit=1, loop=loop) + conn = aiohttp.BaseConnector(limit=1, loop=loop, limit_per_host=10) req = mock.Mock() req.connection_key = key From afb225fc0640fc86503c1428bb7fd60cc1a2c62d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 01:03:53 +0000 Subject: [PATCH 0877/1511] [PR #9757/b2c845bb backport][3.11] Avoid creating body writer task when there is no body (#9759) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9757.misc.rst | 1 + aiohttp/base_protocol.py | 4 ++++ aiohttp/client_reqrep.py | 32 +++++++++++++++---------- aiohttp/http_writer.py | 4 ++++ tests/test_base_protocol.py | 2 ++ tests/test_benchmarks_client_request.py | 4 ++++ tests/test_client_functional.py | 27 ++++++++++++++++----- tests/test_client_session.py | 7 ++---- tests/test_http_writer.py | 18 ++++++++++++++ 9 files changed, 75 insertions(+), 24 deletions(-) create mode 100644 CHANGES/9757.misc.rst diff --git a/CHANGES/9757.misc.rst b/CHANGES/9757.misc.rst new file mode 100644 index 00000000000..824b1d88447 --- /dev/null +++ b/CHANGES/9757.misc.rst @@ -0,0 +1 @@ +Improved performance of sending HTTP requests when there is no body -- by :user:`bdraco`. diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index 2fc2fa65885..c8ec57786b9 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -29,6 +29,10 @@ def connected(self) -> bool: """Return True if the connection is open.""" return self.transport is not None + @property + def writing_paused(self) -> bool: + return self._paused + def pause_writing(self) -> None: assert not self._paused self._paused = True diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4a30c402266..3226d19f9da 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -736,22 +736,28 @@ async def send(self, conn: "Connection") -> "ClientResponse": v = self.version status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) - coro = self.write_bytes(writer, conn) - task: Optional["asyncio.Task[None]"] - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to write - # bytes immediately to avoid having to schedule - # the task on the event loop. - task = asyncio.Task(coro, loop=self.loop, eager_start=True) + if self.body or self._continue is not None or protocol.writing_paused: + coro = self.write_bytes(writer, conn) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to write + # bytes immediately to avoid having to schedule + # the task on the event loop. + task = asyncio.Task(coro, loop=self.loop, eager_start=True) + else: + task = self.loop.create_task(coro) + if task.done(): + task = None + else: + self._writer = task else: - task = self.loop.create_task(coro) - - if task.done(): + # We have nothing to write because + # - there is no body + # - the protocol does not have writing paused + # - we are not waiting for a 100-continue response + protocol.start_timeout() + writer.set_eof() task = None - else: - self._writer = task - response_class = self.response_class assert response_class is not None self.response = response_class( diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index dc07a358c70..4f1b2fec88e 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -130,6 +130,10 @@ async def write_headers( buf = _serialize_headers(status_line, headers) self._write(buf) + def set_eof(self) -> None: + """Indicate that the message is complete.""" + self._eof = True + async def write_eof(self, chunk: bytes = b"") -> None: if self._eof: return diff --git a/tests/test_base_protocol.py b/tests/test_base_protocol.py index 72c8c7c6b63..4866ea37576 100644 --- a/tests/test_base_protocol.py +++ b/tests/test_base_protocol.py @@ -18,8 +18,10 @@ async def test_pause_writing() -> None: loop = asyncio.get_event_loop() pr = BaseProtocol(loop) assert not pr._paused + assert pr.writing_paused is False pr.pause_writing() assert pr._paused + assert pr.writing_paused is True # type: ignore[unreachable] async def test_pause_reading_no_transport() -> None: diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 17d4a6cb8c4..c08e7571c48 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -89,6 +89,10 @@ class MockProtocol(asyncio.BaseProtocol): def __init__(self) -> None: self.transport = MockTransport() + @property + def writing_paused(self) -> bool: + return False + async def _drain_helper(self) -> None: """Swallow drain.""" diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 082c4db4d89..73e5929ee3b 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,7 +12,7 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Awaitable, Callable, List, Optional, Type +from typing import Any, AsyncIterator, Awaitable, Callable, List, Type from unittest import mock import pytest @@ -1503,10 +1503,7 @@ async def handler(request): assert 200 == resp.status -@pytest.mark.parametrize("data", (None, b"")) -async def test_GET_DEFLATE( - aiohttp_client: AiohttpClient, data: Optional[bytes] -) -> None: +async def test_GET_DEFLATE(aiohttp_client: AiohttpClient) -> None: async def handler(request: web.Request) -> web.Response: return web.json_response({"ok": True}) @@ -1529,7 +1526,7 @@ async def write_bytes( app.router.add_get("/", handler) client = await aiohttp_client(app) - async with client.get("/", data=data, compress=True) as resp: + async with client.get("/", data=b"", compress=True) as resp: assert resp.status == 200 content = await resp.json() assert content == {"ok": True} @@ -1539,6 +1536,24 @@ async def write_bytes( write_mock.assert_not_called() +async def test_GET_DEFLATE_no_body(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.json_response({"ok": True}) + + with mock.patch.object(ClientRequest, "write_bytes") as mock_write_bytes: + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", data=None, compress=True) as resp: + assert resp.status == 200 + content = await resp.json() + assert content == {"ok": True} + + # No chunks should have been sent for an empty body. + mock_write_bytes.assert_not_called() + + async def test_POST_DATA_DEFLATE(aiohttp_client: AiohttpClient) -> None: async def handler(request: web.Request) -> web.Response: data = await request.post() diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 783c9b11cc3..d5752d0fd97 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -836,7 +836,7 @@ def to_url(path: str) -> URL: assert to_trace_urls(on_request_redirect) == [] assert to_trace_urls(on_request_end) == [to_url("/?x=0")] assert to_trace_urls(on_request_exception) == [] - assert to_trace_urls(on_request_chunk_sent) == [to_url("/?x=0")] + assert to_trace_urls(on_request_chunk_sent) == [] assert to_trace_urls(on_response_chunk_received) == [to_url("/?x=0")] assert to_trace_urls(on_request_headers_sent) == [to_url("/?x=0")] @@ -852,10 +852,7 @@ def to_url(path: str) -> URL: assert to_trace_urls(on_request_redirect) == [to_url("/redirect?x=0")] assert to_trace_urls(on_request_end) == [to_url("/")] assert to_trace_urls(on_request_exception) == [] - assert to_trace_urls(on_request_chunk_sent) == [ - to_url("/redirect?x=0"), - to_url("/"), - ] + assert to_trace_urls(on_request_chunk_sent) == [] assert to_trace_urls(on_response_chunk_received) == [to_url("/")] assert to_trace_urls(on_request_headers_sent) == [ to_url("/redirect?x=0"), diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index ed853c8744a..d330da48df7 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -1,11 +1,13 @@ # Tests for aiohttp/http_writer.py import array +import asyncio from unittest import mock import pytest from multidict import CIMultiDict from aiohttp import ClientConnectionResetError, http +from aiohttp.base_protocol import BaseProtocol from aiohttp.test_utils import make_mocked_coro @@ -275,3 +277,19 @@ async def test_write_headers_prevents_injection(protocol, transport, loop) -> No wrong_headers = CIMultiDict({"Content-Length": "256\r\nSet-Cookie: abc=123"}) with pytest.raises(ValueError): await msg.write_headers(status_line, wrong_headers) + + +async def test_set_eof_after_write_headers( + protocol: BaseProtocol, + transport: mock.Mock, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + status_line = "HTTP/1.1 200 OK" + good_headers = CIMultiDict({"Set-Cookie": "abc=123"}) + await msg.write_headers(status_line, good_headers) + assert transport.write.called + transport.write.reset_mock() + msg.set_eof() + await msg.write_eof() + assert not transport.write.called From f55822ef1fc1fa397e749c82d08bd4bbd4b0e629 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 10 Nov 2024 01:17:45 +0000 Subject: [PATCH 0878/1511] Release 3.11.0b5 (#9760) --- CHANGES.rst | 34 +++++++++++++++++++++++++++++++++- aiohttp/__init__.py | 2 +- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 07051f60ad4..9490fcc6133 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0b4 (2024-11-07) +3.11.0b5 (2024-11-10) ===================== Bug fixes @@ -377,6 +377,38 @@ Miscellaneous internal changes +- Replace internal helper methods ``method_must_be_empty_body`` and ``status_code_must_be_empty_body`` with simple `set` lookups -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9722`. + + + +- Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9726`, :issue:`9736`. + + + +- Improved performance of :py:class:`aiohttp.BaseConnector` when there is no ``limit_per_host`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9756`. + + + +- Improved performance of sending HTTP requests when there is no body -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9757`. + + + ---- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e38894a14a6..6ca79aa5ef1 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0b4" +__version__ = "3.11.0b5" from typing import TYPE_CHECKING, Tuple From c7674b0e0fa7e893d6102bd4b25e658aa354aeb0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 09:39:47 +0000 Subject: [PATCH 0879/1511] [PR #9761/50cccb38 backport][3.10] Add benchmarks for a simple POST request (#9763) --- tests/test_benchmarks_client.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index fdddef462ca..77c9108a657 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -31,3 +31,28 @@ async def run_client_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_simple_post_requests( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple POST requests.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.post("/", data=b"any") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From 51145aad138d03fc9f462e59b9c9398a75905899 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 09:49:57 +0000 Subject: [PATCH 0880/1511] [PR #9761/50cccb38 backport][3.11] Add benchmarks for a simple POST request (#9764) --- tests/test_benchmarks_client.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index fdddef462ca..77c9108a657 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -31,3 +31,28 @@ async def run_client_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_simple_post_requests( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple POST requests.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.post("/", data=b"any") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From e39f20a042aea88be275db1186cec94ff88c95e9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 10:50:22 +0000 Subject: [PATCH 0881/1511] [PR #9762/c5d6b84f backport][3.11] Refactor payload registry to avoid linear searches for common types (#9767) Co-authored-by: J. Nick Koston <nick@koston.org> closes #9419 --- aiohttp/payload.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 27636977774..151f9dd497b 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -101,6 +101,7 @@ def __init__(self) -> None: self._first: List[_PayloadRegistryItem] = [] self._normal: List[_PayloadRegistryItem] = [] self._last: List[_PayloadRegistryItem] = [] + self._normal_lookup: Dict[Any, PayloadType] = {} def get( self, @@ -109,12 +110,20 @@ def get( _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, **kwargs: Any, ) -> "Payload": + if self._first: + for factory, type_ in self._first: + if isinstance(data, type_): + return factory(data, *args, **kwargs) + # Try the fast lookup first + if lookup_factory := self._normal_lookup.get(type(data)): + return lookup_factory(data, *args, **kwargs) + # Bail early if its already a Payload if isinstance(data, Payload): return data - for factory, type in _CHAIN(self._first, self._normal, self._last): - if isinstance(data, type): + # Fallback to the slower linear search + for factory, type_ in _CHAIN(self._normal, self._last): + if isinstance(data, type_): return factory(data, *args, **kwargs) - raise LookupError() def register( @@ -124,6 +133,11 @@ def register( self._first.append((factory, type)) elif order is Order.normal: self._normal.append((factory, type)) + if isinstance(type, Iterable): + for t in type: + self._normal_lookup[t] = factory + else: + self._normal_lookup[type] = factory elif order is Order.try_last: self._last.append((factory, type)) else: @@ -159,7 +173,8 @@ def __init__( self._headers[hdrs.CONTENT_TYPE] = content_type else: self._headers[hdrs.CONTENT_TYPE] = self._default_content_type - self._headers.update(headers or {}) + if headers: + self._headers.update(headers) @property def size(self) -> Optional[int]: @@ -228,9 +243,6 @@ class BytesPayload(Payload): def __init__( self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any ) -> None: - if not isinstance(value, (bytes, bytearray, memoryview)): - raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") - if "content_type" not in kwargs: kwargs["content_type"] = "application/octet-stream" @@ -238,8 +250,10 @@ def __init__( if isinstance(value, memoryview): self._size = value.nbytes - else: + elif isinstance(value, (bytes, bytearray)): self._size = len(value) + else: + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") if self._size > TOO_LARGE_BYTES_BODY: kwargs = {"source": self} From cc9a14aa3a29e54e2da3045083cca865654e3ff9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 10:50:25 +0000 Subject: [PATCH 0882/1511] [PR #9765/e06dec37 backport][3.10] Avoid duplicate len calls in StreamReader.feed_data (#9768) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/streams.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index c927cfbb1b3..030c6735947 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -245,9 +245,10 @@ def feed_data(self, data: bytes, size: int = 0) -> None: if not data: return - self._size += len(data) + data_len = len(data) + self._size += data_len self._buffer.append(data) - self.total_bytes += len(data) + self.total_bytes += data_len waiter = self._waiter if waiter is not None: From 2e0721c812f4d49278346245cfe17eb3c003d393 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 10:58:51 +0000 Subject: [PATCH 0883/1511] [PR #9765/e06dec37 backport][3.11] Avoid duplicate len calls in StreamReader.feed_data (#9769) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/streams.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index bec547aa10f..d95dbb3590b 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -274,9 +274,10 @@ def feed_data(self, data: bytes, size: int = 0) -> None: if not data: return - self._size += len(data) + data_len = len(data) + self._size += data_len self._buffer.append(data) - self.total_bytes += len(data) + self.total_bytes += data_len waiter = self._waiter if waiter is not None: From 9d9d76b77b2e90c31bd74fd416565b58275ed32a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 11:18:28 +0000 Subject: [PATCH 0884/1511] [PR #9762/c5d6b84f backport][3.10] Refactor payload registry to avoid linear searches for common types (#9766) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #9762 as merged into master (c5d6b84febe26ee1c5fdcd5f5adc61da0bb5a5fa).** closes #9419 Looks to be a ~2.4% speed up so probably not worth mentioning in a changelog message <img width="253" alt="Screenshot 2024-11-10 at 9 57 48 AM" src="https://github.com/user-attachments/assets/a2684638-9687-4883-bf1e-1e114b188363"> <img width="248" alt="Screenshot 2024-11-10 at 9 57 54 AM" src="https://github.com/user-attachments/assets/9c3b2d89-bf25-4ca2-bb2a-327131b78001"> <img width="250" alt="Screenshot 2024-11-10 at 9 58 09 AM" src="https://github.com/user-attachments/assets/5ef03a39-adeb-46e2-86d9-614a4c100fa9"> <img width="245" alt="Screenshot 2024-11-10 at 9 58 15 AM" src="https://github.com/user-attachments/assets/8ec7341e-cf2a-41a4-b8df-e5086e8d5df9"> Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/payload.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 27636977774..151f9dd497b 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -101,6 +101,7 @@ def __init__(self) -> None: self._first: List[_PayloadRegistryItem] = [] self._normal: List[_PayloadRegistryItem] = [] self._last: List[_PayloadRegistryItem] = [] + self._normal_lookup: Dict[Any, PayloadType] = {} def get( self, @@ -109,12 +110,20 @@ def get( _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, **kwargs: Any, ) -> "Payload": + if self._first: + for factory, type_ in self._first: + if isinstance(data, type_): + return factory(data, *args, **kwargs) + # Try the fast lookup first + if lookup_factory := self._normal_lookup.get(type(data)): + return lookup_factory(data, *args, **kwargs) + # Bail early if its already a Payload if isinstance(data, Payload): return data - for factory, type in _CHAIN(self._first, self._normal, self._last): - if isinstance(data, type): + # Fallback to the slower linear search + for factory, type_ in _CHAIN(self._normal, self._last): + if isinstance(data, type_): return factory(data, *args, **kwargs) - raise LookupError() def register( @@ -124,6 +133,11 @@ def register( self._first.append((factory, type)) elif order is Order.normal: self._normal.append((factory, type)) + if isinstance(type, Iterable): + for t in type: + self._normal_lookup[t] = factory + else: + self._normal_lookup[type] = factory elif order is Order.try_last: self._last.append((factory, type)) else: @@ -159,7 +173,8 @@ def __init__( self._headers[hdrs.CONTENT_TYPE] = content_type else: self._headers[hdrs.CONTENT_TYPE] = self._default_content_type - self._headers.update(headers or {}) + if headers: + self._headers.update(headers) @property def size(self) -> Optional[int]: @@ -228,9 +243,6 @@ class BytesPayload(Payload): def __init__( self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any ) -> None: - if not isinstance(value, (bytes, bytearray, memoryview)): - raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") - if "content_type" not in kwargs: kwargs["content_type"] = "application/octet-stream" @@ -238,8 +250,10 @@ def __init__( if isinstance(value, memoryview): self._size = value.nbytes - else: + elif isinstance(value, (bytes, bytearray)): self._size = len(value) + else: + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") if self._size > TOO_LARGE_BYTES_BODY: kwargs = {"source": self} From 14fd4ca259f2d78e40761ef335fe1c5908acd11b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 10 Nov 2024 11:43:57 +0000 Subject: [PATCH 0885/1511] Release 3.11.0rc0 (#9770) --- CHANGES.rst | 4 ++-- aiohttp/__init__.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9490fcc6133..00e957fdcb4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,8 +10,8 @@ .. towncrier release notes start -3.11.0b5 (2024-11-10) -===================== +3.11.0rc0 (2024-11-10) +====================== Bug fixes --------- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6ca79aa5ef1..8a75f048e41 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0b5" +__version__ = "3.11.0rc0" from typing import TYPE_CHECKING, Tuple From 602552e85293bf799ab44f2532da122856f80839 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 19:01:01 +0000 Subject: [PATCH 0886/1511] [PR #9772/eaa10860 backport][3.11] Combine duplicate logic in `Response.write_eof` (#9774) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 96f688e2ddc..63e92f0537f 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -753,16 +753,13 @@ async def write_eof(self, data: bytes = b"") -> None: assert not data, f"data arg is not supported, got {data!r}" assert self._req is not None assert self._payload_writer is not None - if body is not None: - if self._must_be_empty_body: - await super().write_eof() - elif isinstance(self._body, Payload): - await self._body.write(self._payload_writer) - await super().write_eof() - else: - await super().write_eof(cast(bytes, body)) - else: + if body is None or self._must_be_empty_body: await super().write_eof() + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: if hdrs.CONTENT_LENGTH in self._headers: From c206fafbdb7dc1b218b19a1262cab0a7de9a1988 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 19:29:29 +0000 Subject: [PATCH 0887/1511] [PR #9771/e80d5854 backport][3.11] Small cleanups to enabling compression in web_response (#9775) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 80 +++++++++++++++++++------------------- tests/test_web_response.py | 18 +++++++++ 2 files changed, 58 insertions(+), 40 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 63e92f0537f..315224dddf9 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -43,6 +43,7 @@ from .typedefs import JSONEncoder, LooseHeaders REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} +LARGE_BODY_SIZE = 1024**2 __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") @@ -412,27 +413,28 @@ def _generate_content_type_header( self._headers[CONTENT_TYPE] = ctype async def _do_start_compression(self, coding: ContentCoding) -> None: - if coding != ContentCoding.identity: - assert self._payload_writer is not None - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._payload_writer.enable_compression( - coding.value, self._compression_strategy - ) - # Compressed payload may have different content length, - # remove the header - self._headers.popall(hdrs.CONTENT_LENGTH, None) + if coding is ContentCoding.identity: + return + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression( + coding.value, self._compression_strategy + ) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) async def _start_compression(self, request: "BaseRequest") -> None: if self._compression_force: await self._do_start_compression(self._compression_force) - else: - # Encoding comparisons should be case-insensitive - # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for value, coding in CONTENT_CODINGS.items(): - if value in accept_encoding: - await self._do_start_compression(coding) - return + return + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for value, coding in CONTENT_CODINGS.items(): + if value in accept_encoding: + await self._do_start_compression(coding) + return async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: if self._eof_sent: @@ -782,30 +784,28 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: async def _do_start_compression(self, coding: ContentCoding) -> None: if self._chunked or isinstance(self._body, Payload): return await super()._do_start_compression(coding) - - if coding != ContentCoding.identity: - # Instead of using _payload_writer.enable_compression, - # compress the whole body - compressor = ZLibCompressor( - encoding=str(coding.value), - max_sync_chunk_size=self._zlib_executor_size, - executor=self._zlib_executor, - ) - assert self._body is not None - if self._zlib_executor_size is None and len(self._body) > 1024 * 1024: - warnings.warn( - "Synchronous compression of large response bodies " - f"({len(self._body)} bytes) might block the async event loop. " - "Consider providing a custom value to zlib_executor_size/" - "zlib_executor response properties or disabling compression on it." - ) - self._compressed_body = ( - await compressor.compress(self._body) + compressor.flush() + if coding is ContentCoding.identity: + return + # Instead of using _payload_writer.enable_compression, + # compress the whole body + compressor = ZLibCompressor( + encoding=coding.value, + max_sync_chunk_size=self._zlib_executor_size, + executor=self._zlib_executor, + ) + assert self._body is not None + if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE: + warnings.warn( + "Synchronous compression of large response bodies " + f"({len(self._body)} bytes) might block the async event loop. " + "Consider providing a custom value to zlib_executor_size/" + "zlib_executor response properties or disabling compression on it." ) - assert self._compressed_body is not None - - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) + self._compressed_body = ( + await compressor.compress(self._body) + compressor.flush() + ) + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) def json_response( diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 080edaf57c3..330b4b33939 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -481,6 +481,24 @@ async def test_force_compression_deflate() -> None: assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) +async def test_force_compression_deflate_large_payload() -> None: + """Make sure a warning is thrown for large payloads compressed in the event loop.""" + req = make_request( + "GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"}) + ) + resp = Response(body=b"large") + + resp.enable_compression(ContentCoding.deflate) + assert resp.compression + + with pytest.warns( + Warning, match="Synchronous compression of large response bodies" + ), mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2): + msg = await resp.prepare(req) + assert msg is not None + assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) + + async def test_force_compression_no_accept_deflate() -> None: req = make_request("GET", "/") resp = StreamResponse() From 3426707e6d7cb4a2385850243fccb75acc8c3580 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 19:34:32 +0000 Subject: [PATCH 0888/1511] [PR #9772/eaa10860 backport][3.10] Combine duplicate logic in `Response.write_eof` (#9773) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 49ca24ee21a..4aefa9c35c7 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -736,16 +736,13 @@ async def write_eof(self, data: bytes = b"") -> None: assert not data, f"data arg is not supported, got {data!r}" assert self._req is not None assert self._payload_writer is not None - if body is not None: - if self._must_be_empty_body: - await super().write_eof() - elif isinstance(self._body, Payload): - await self._body.write(self._payload_writer) - await super().write_eof() - else: - await super().write_eof(cast(bytes, body)) - else: + if body is None or self._must_be_empty_body: await super().write_eof() + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: if hdrs.CONTENT_LENGTH in self._headers: From d2ec42eafabc4f943b4d1cebfdaab3852a9b235c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 19:55:50 +0000 Subject: [PATCH 0889/1511] [PR #9777/c5bf5772 backport][3.11] Collapse if block nesting in web.Response (#9779) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 315224dddf9..73063890f9a 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -647,19 +647,17 @@ def __init__( real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset body = text.encode(charset) text = None - else: - if hdrs.CONTENT_TYPE in real_headers: - if content_type is not None or charset is not None: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - if content_type is not None: - if charset is not None: - content_type += "; charset=" + charset - real_headers[hdrs.CONTENT_TYPE] = content_type + elif hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + elif content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type super().__init__(status=status, reason=reason, headers=real_headers) From 5a9c2fb77b41278c81d69946fdc6305db47977ae Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 10 Nov 2024 20:01:33 +0000 Subject: [PATCH 0890/1511] [PR #9771/e80d5854 backport][3.10] Small cleanups to enabling compression in web_response (#9776) --- aiohttp/web_response.py | 76 +++++++++++++++++++------------------- tests/test_web_response.py | 18 +++++++++ 2 files changed, 56 insertions(+), 38 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 4aefa9c35c7..285e7ab99f3 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -42,6 +42,7 @@ from .typedefs import JSONEncoder, LooseHeaders REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} +LARGE_BODY_SIZE = 1024**2 __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") @@ -397,25 +398,26 @@ def _generate_content_type_header( self._headers[CONTENT_TYPE] = ctype async def _do_start_compression(self, coding: ContentCoding) -> None: - if coding != ContentCoding.identity: - assert self._payload_writer is not None - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._payload_writer.enable_compression(coding.value) - # Compressed payload may have different content length, - # remove the header - self._headers.popall(hdrs.CONTENT_LENGTH, None) + if coding is ContentCoding.identity: + return + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression(coding.value) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) async def _start_compression(self, request: "BaseRequest") -> None: if self._compression_force: await self._do_start_compression(self._compression_force) - else: - # Encoding comparisons should be case-insensitive - # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for value, coding in CONTENT_CODINGS.items(): - if value in accept_encoding: - await self._do_start_compression(coding) - return + return + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for value, coding in CONTENT_CODINGS.items(): + if value in accept_encoding: + await self._do_start_compression(coding) + return async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: if self._eof_sent: @@ -765,30 +767,28 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: async def _do_start_compression(self, coding: ContentCoding) -> None: if self._chunked or isinstance(self._body, Payload): return await super()._do_start_compression(coding) - - if coding != ContentCoding.identity: - # Instead of using _payload_writer.enable_compression, - # compress the whole body - compressor = ZLibCompressor( - encoding=str(coding.value), - max_sync_chunk_size=self._zlib_executor_size, - executor=self._zlib_executor, - ) - assert self._body is not None - if self._zlib_executor_size is None and len(self._body) > 1024 * 1024: - warnings.warn( - "Synchronous compression of large response bodies " - f"({len(self._body)} bytes) might block the async event loop. " - "Consider providing a custom value to zlib_executor_size/" - "zlib_executor response properties or disabling compression on it." - ) - self._compressed_body = ( - await compressor.compress(self._body) + compressor.flush() + if coding is ContentCoding.identity: + return + # Instead of using _payload_writer.enable_compression, + # compress the whole body + compressor = ZLibCompressor( + encoding=coding.value, + max_sync_chunk_size=self._zlib_executor_size, + executor=self._zlib_executor, + ) + assert self._body is not None + if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE: + warnings.warn( + "Synchronous compression of large response bodies " + f"({len(self._body)} bytes) might block the async event loop. " + "Consider providing a custom value to zlib_executor_size/" + "zlib_executor response properties or disabling compression on it." ) - assert self._compressed_body is not None - - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) + self._compressed_body = ( + await compressor.compress(self._body) + compressor.flush() + ) + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) def json_response( diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 25e464f7bed..d31d9f7c293 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -480,6 +480,24 @@ async def test_force_compression_deflate() -> None: assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) +async def test_force_compression_deflate_large_payload() -> None: + """Make sure a warning is thrown for large payloads compressed in the event loop.""" + req = make_request( + "GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"}) + ) + resp = Response(body=b"large") + + resp.enable_compression(ContentCoding.deflate) + assert resp.compression + + with pytest.warns( + Warning, match="Synchronous compression of large response bodies" + ), mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2): + msg = await resp.prepare(req) + assert msg is not None + assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) + + async def test_force_compression_no_accept_deflate() -> None: req = make_request("GET", "/") resp = StreamResponse() From e499d552eca4a32079440dc7e0865e42baf8fc7b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 21:05:15 +0000 Subject: [PATCH 0891/1511] [PR #9781/81162572 backport][3.11] Ensure Cython WebSocket parser can handle frames of 2**23 in size (#9782) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9781.feature.rst | 1 + aiohttp/_websocket/reader_c.pxd | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 120000 CHANGES/9781.feature.rst diff --git a/CHANGES/9781.feature.rst b/CHANGES/9781.feature.rst new file mode 120000 index 00000000000..a93584bccd8 --- /dev/null +++ b/CHANGES/9781.feature.rst @@ -0,0 +1 @@ +9543.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 2a60f327061..ffd53a940f6 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -46,12 +46,12 @@ cdef class WebSocketReader: cdef object _frame_fin cdef object _frame_opcode cdef object _frame_payload - cdef unsigned int _frame_payload_len + cdef unsigned long long _frame_payload_len cdef bytes _tail cdef bint _has_mask cdef bytes _frame_mask - cdef unsigned int _payload_length + cdef unsigned long long _payload_length cdef unsigned int _payload_length_flag cdef object _compressed cdef object _decompressobj From ee14b331d74d631854b2b4ff765e2cbc80093208 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 21:11:10 +0000 Subject: [PATCH 0892/1511] [PR #9777/c5bf5772 backport][3.10] Collapse if block nesting in web.Response (#9778) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_response.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 285e7ab99f3..dbdbcda5d70 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -630,19 +630,17 @@ def __init__( real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset body = text.encode(charset) text = None - else: - if hdrs.CONTENT_TYPE in real_headers: - if content_type is not None or charset is not None: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - if content_type is not None: - if charset is not None: - content_type += "; charset=" + charset - real_headers[hdrs.CONTENT_TYPE] = content_type + elif hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + elif content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type super().__init__(status=status, reason=reason, headers=real_headers) From fb22e1eb583a191dc89081c63cfe7a128d07a0cc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 10 Nov 2024 22:29:13 +0000 Subject: [PATCH 0893/1511] [PR #9784/2f6ff6d backport][3.11] Add benchmark for reading WebSocket payloads larger than 2**16 (#9786) --- tests/test_benchmarks_http_websocket.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 05379b6e38f..5091085e40c 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -6,7 +6,7 @@ from pytest_codspeed import BenchmarkFixture from aiohttp import DataQueue -from aiohttp._websocket.helpers import MSG_SIZE +from aiohttp._websocket.helpers import MSG_SIZE, PACK_LEN3 from aiohttp.base_protocol import BaseProtocol from aiohttp.http_websocket import ( WebSocketReader, @@ -16,6 +16,27 @@ ) +def test_read_large_binary_websocket_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Read one hundred large binary websocket messages.""" + queue: DataQueue[WSMessage] = DataQueue(loop=loop) + reader = WebSocketReader(queue, max_msg_size=2**18) + + # PACK3 has a minimum message length of 2**16 bytes. + message = b"x" * ((2**16) + 1) + msg_length = len(message) + first_byte = 0x80 | 0 | WSMsgType.BINARY.value + header = PACK_LEN3(first_byte, 127, msg_length) + raw_message = header + message + feed_data = reader.feed_data + + @benchmark + def _run() -> None: + for _ in range(100): + feed_data(raw_message) + + def test_read_one_hundred_websocket_text_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: From bb1c0c430f8b3bda0fc9f3169e5697835c03612f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 22:43:05 +0000 Subject: [PATCH 0894/1511] [PR #9780/eac8fb84 backport][3.10] Replace get_event_loop with get_running_loop in the compressor (#9788) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/compression_utils.py | 22 ++++++++++++++++++---- tests/test_compression_utils.py | 23 +++++++++++++++++++++++ 2 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 tests/test_compression_utils.py diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index ab4a2f1cc84..ebe8857f487 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -70,6 +70,14 @@ def compress_sync(self, data: bytes) -> bytes: return self._compressor.compress(data) async def compress(self, data: bytes) -> bytes: + """Compress the data and returned the compressed bytes. + + Note that flush() must be called after the last call to compress() + + If the data size is large than the max_sync_chunk_size, the compression + will be done in the executor. Otherwise, the compression will be done + in the event loop. + """ async with self._compress_lock: # To ensure the stream is consistent in the event # there are multiple writers, we need to lock @@ -79,8 +87,8 @@ async def compress(self, data: bytes) -> bytes: self._max_sync_chunk_size is not None and len(data) > self._max_sync_chunk_size ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.compress_sync, data + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._compressor.compress, data ) return self.compress_sync(data) @@ -107,12 +115,18 @@ def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: return self._decompressor.decompress(data, max_length) async def decompress(self, data: bytes, max_length: int = 0) -> bytes: + """Decompress the data and return the decompressed bytes. + + If the data size is large than the max_sync_chunk_size, the decompression + will be done in the executor. Otherwise, the decompression will be done + in the event loop. + """ if ( self._max_sync_chunk_size is not None and len(data) > self._max_sync_chunk_size ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.decompress_sync, data, max_length + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._decompressor.decompress, data, max_length ) return self.decompress_sync(data, max_length) diff --git a/tests/test_compression_utils.py b/tests/test_compression_utils.py new file mode 100644 index 00000000000..047a4ff7cf0 --- /dev/null +++ b/tests/test_compression_utils.py @@ -0,0 +1,23 @@ +"""Tests for compression utils.""" + +from aiohttp.compression_utils import ZLibCompressor, ZLibDecompressor + + +async def test_compression_round_trip_in_executor() -> None: + """Ensure that compression and decompression work correctly in the executor.""" + compressor = ZLibCompressor(max_sync_chunk_size=1) + decompressor = ZLibDecompressor(max_sync_chunk_size=1) + data = b"Hi" * 100 + compressed_data = await compressor.compress(data) + compressor.flush() + decompressed_data = await decompressor.decompress(compressed_data) + assert data == decompressed_data + + +async def test_compression_round_trip_in_event_loop() -> None: + """Ensure that compression and decompression work correctly in the event loop.""" + compressor = ZLibCompressor(max_sync_chunk_size=10000) + decompressor = ZLibDecompressor(max_sync_chunk_size=10000) + data = b"Hi" * 100 + compressed_data = await compressor.compress(data) + compressor.flush() + decompressed_data = await decompressor.decompress(compressed_data) + assert data == decompressed_data From 5cf39d90d513be5fdfac1578b2e7d5f928faaebd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 22:43:09 +0000 Subject: [PATCH 0895/1511] [PR #9780/eac8fb84 backport][3.11] Replace get_event_loop with get_running_loop in the compressor (#9789) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/compression_utils.py | 22 ++++++++++++++++++---- tests/test_compression_utils.py | 23 +++++++++++++++++++++++ 2 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 tests/test_compression_utils.py diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index ab4a2f1cc84..ebe8857f487 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -70,6 +70,14 @@ def compress_sync(self, data: bytes) -> bytes: return self._compressor.compress(data) async def compress(self, data: bytes) -> bytes: + """Compress the data and returned the compressed bytes. + + Note that flush() must be called after the last call to compress() + + If the data size is large than the max_sync_chunk_size, the compression + will be done in the executor. Otherwise, the compression will be done + in the event loop. + """ async with self._compress_lock: # To ensure the stream is consistent in the event # there are multiple writers, we need to lock @@ -79,8 +87,8 @@ async def compress(self, data: bytes) -> bytes: self._max_sync_chunk_size is not None and len(data) > self._max_sync_chunk_size ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.compress_sync, data + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._compressor.compress, data ) return self.compress_sync(data) @@ -107,12 +115,18 @@ def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: return self._decompressor.decompress(data, max_length) async def decompress(self, data: bytes, max_length: int = 0) -> bytes: + """Decompress the data and return the decompressed bytes. + + If the data size is large than the max_sync_chunk_size, the decompression + will be done in the executor. Otherwise, the decompression will be done + in the event loop. + """ if ( self._max_sync_chunk_size is not None and len(data) > self._max_sync_chunk_size ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.decompress_sync, data, max_length + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._decompressor.decompress, data, max_length ) return self.decompress_sync(data, max_length) diff --git a/tests/test_compression_utils.py b/tests/test_compression_utils.py new file mode 100644 index 00000000000..047a4ff7cf0 --- /dev/null +++ b/tests/test_compression_utils.py @@ -0,0 +1,23 @@ +"""Tests for compression utils.""" + +from aiohttp.compression_utils import ZLibCompressor, ZLibDecompressor + + +async def test_compression_round_trip_in_executor() -> None: + """Ensure that compression and decompression work correctly in the executor.""" + compressor = ZLibCompressor(max_sync_chunk_size=1) + decompressor = ZLibDecompressor(max_sync_chunk_size=1) + data = b"Hi" * 100 + compressed_data = await compressor.compress(data) + compressor.flush() + decompressed_data = await decompressor.decompress(compressed_data) + assert data == decompressed_data + + +async def test_compression_round_trip_in_event_loop() -> None: + """Ensure that compression and decompression work correctly in the event loop.""" + compressor = ZLibCompressor(max_sync_chunk_size=10000) + decompressor = ZLibDecompressor(max_sync_chunk_size=10000) + data = b"Hi" * 100 + compressed_data = await compressor.compress(data) + compressor.flush() + decompressed_data = await decompressor.decompress(compressed_data) + assert data == decompressed_data From 4377d78e89139f690146eee9700f3da431331bfc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 22:43:17 +0000 Subject: [PATCH 0896/1511] [PR #9787/d1eeceab backport][3.10] Fix flakey test_import_time with Python 3.13 (#9791) --- tests/test_imports.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index ff63f8fb0f6..f82eec41086 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -34,13 +34,14 @@ def test_web___all__(pytester: pytest.Pytester) -> None: _TARGET_TIMINGS_BY_PYTHON_VERSION = { "3.12": ( - # 3.12 is expected to be a bit slower due to performance trade-offs, + # 3.12+ is expected to be a bit slower due to performance trade-offs, # and even slower under pytest-xdist, especially in CI _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) if _IS_XDIST_RUN else 250 ), } +_TARGET_TIMINGS_BY_PYTHON_VERSION["3.13"] = _TARGET_TIMINGS_BY_PYTHON_VERSION["3.12"] @pytest.mark.internal From bd3a3be90b04d3df2fed46aaa4b1e6e0c03ebd01 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Nov 2024 22:53:18 +0000 Subject: [PATCH 0897/1511] [PR #9787/d1eeceab backport][3.11] Fix flakey test_import_time with Python 3.13 (#9792) --- tests/test_imports.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index fcd12c011b9..30d5a5d43f1 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -34,13 +34,14 @@ def test_web___all__(pytester: pytest.Pytester) -> None: _TARGET_TIMINGS_BY_PYTHON_VERSION = { "3.12": ( - # 3.12 is expected to be a bit slower due to performance trade-offs, + # 3.12+ is expected to be a bit slower due to performance trade-offs, # and even slower under pytest-xdist, especially in CI _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) if _IS_XDIST_RUN else 250 ), } +_TARGET_TIMINGS_BY_PYTHON_VERSION["3.13"] = _TARGET_TIMINGS_BY_PYTHON_VERSION["3.12"] @pytest.mark.internal From 1ce99538efcbc03ca3a4d3f47d9b7fbb196a2261 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 11 Nov 2024 01:42:12 +0000 Subject: [PATCH 0898/1511] [PR #9784/2f6ff6d backport][3.10] Add benchmark for reading WebSocket payloads larger than 2**16 (#9785) --- tests/test_benchmarks_http_websocket.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 7d773d18bb7..becb0e568bd 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -9,6 +9,7 @@ from aiohttp.base_protocol import BaseProtocol from aiohttp.http_websocket import ( MSG_SIZE, + PACK_LEN3, WebSocketReader, WebSocketWriter, WSMessage, @@ -16,6 +17,27 @@ ) +def test_read_large_binary_websocket_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Read one hundred large binary websocket messages.""" + queue: DataQueue[WSMessage] = DataQueue(loop=loop) + reader = WebSocketReader(queue, max_msg_size=2**18) + + # PACK3 has a minimum message length of 2**16 bytes. + message = b"x" * ((2**16) + 1) + msg_length = len(message) + first_byte = 0x80 | 0 | WSMsgType.BINARY.value + header = PACK_LEN3(first_byte, 127, msg_length) + raw_message = header + message + feed_data = reader.feed_data + + @benchmark + def _run() -> None: + for _ in range(100): + feed_data(raw_message) + + def test_read_one_hundred_websocket_text_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: From 996c9292dadfe4dea2e1e014bb7f12d19d2e80d9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 11 Nov 2024 02:05:14 +0000 Subject: [PATCH 0899/1511] [PR #9685/5241897 backport][3.11] Fix WebSocket reader flow control calculations (#9793) Co-authored-by: pre-commit-ci[bot] --- CHANGES/9685.breaking.rst | 1 + aiohttp/__init__.py | 2 - aiohttp/_websocket/reader.py | 14 ++- aiohttp/_websocket/reader_c.pxd | 22 +++- aiohttp/_websocket/reader_py.py | 103 ++++++++++++++-- aiohttp/client.py | 8 +- aiohttp/client_proto.py | 2 +- aiohttp/client_ws.py | 7 +- aiohttp/streams.py | 56 ++------- aiohttp/web_ws.py | 11 +- tests/test_benchmarks_http_websocket.py | 13 +- tests/test_client_ws_functional.py | 37 +++--- tests/test_flowcontrol_streams.py | 22 +--- tests/test_http_parser.py | 150 ++++++++++++++---------- tests/test_web_websocket.py | 8 +- tests/test_websocket_data_queue.py | 36 ++++++ tests/test_websocket_parser.py | 27 +++-- tests/test_websocket_writer.py | 6 +- 18 files changed, 318 insertions(+), 207 deletions(-) create mode 100644 CHANGES/9685.breaking.rst create mode 100644 tests/test_websocket_data_queue.py diff --git a/CHANGES/9685.breaking.rst b/CHANGES/9685.breaking.rst new file mode 100644 index 00000000000..7ea7cd68e76 --- /dev/null +++ b/CHANGES/9685.breaking.rst @@ -0,0 +1 @@ +``FlowControlDataQueue`` has been replaced with the ``WebSocketDataQueue`` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 8a75f048e41..1e906fe1751 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -93,7 +93,6 @@ EMPTY_PAYLOAD as EMPTY_PAYLOAD, DataQueue as DataQueue, EofStream as EofStream, - FlowControlDataQueue as FlowControlDataQueue, StreamReader as StreamReader, ) from .tracing import ( @@ -216,7 +215,6 @@ "DataQueue", "EMPTY_PAYLOAD", "EofStream", - "FlowControlDataQueue", "StreamReader", # tracing "TraceConfig", diff --git a/aiohttp/_websocket/reader.py b/aiohttp/_websocket/reader.py index 254288ac7e7..23f32265cfc 100644 --- a/aiohttp/_websocket/reader.py +++ b/aiohttp/_websocket/reader.py @@ -5,17 +5,27 @@ from ..helpers import NO_EXTENSIONS if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover - from .reader_py import WebSocketReader as WebSocketReaderPython + from .reader_py import ( + WebSocketDataQueue as WebSocketDataQueuePython, + WebSocketReader as WebSocketReaderPython, + ) WebSocketReader = WebSocketReaderPython + WebSocketDataQueue = WebSocketDataQueuePython else: try: from .reader_c import ( # type: ignore[import-not-found] + WebSocketDataQueue as WebSocketDataQueueCython, WebSocketReader as WebSocketReaderCython, ) WebSocketReader = WebSocketReaderCython + WebSocketDataQueue = WebSocketDataQueueCython except ImportError: # pragma: no cover - from .reader_py import WebSocketReader as WebSocketReaderPython + from .reader_py import ( + WebSocketDataQueue as WebSocketDataQueuePython, + WebSocketReader as WebSocketReaderPython, + ) WebSocketReader = WebSocketReaderPython + WebSocketDataQueue = WebSocketDataQueuePython diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index ffd53a940f6..461e658e116 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -31,11 +31,29 @@ cdef set MESSAGE_TYPES_WITH_CONTENT cdef tuple EMPTY_FRAME cdef tuple EMPTY_FRAME_ERROR +cdef class WebSocketDataQueue: + + cdef unsigned int _size + cdef public object _protocol + cdef unsigned int _limit + cdef object _loop + cdef bint _eof + cdef object _waiter + cdef object _exception + cdef public object _buffer + cdef object _get_buffer + cdef object _put_buffer + + cdef void _release_waiter(self) + + cpdef void feed_data(self, object data, unsigned int size) + + @cython.locals(size="unsigned int") + cdef _read_from_buffer(self) cdef class WebSocketReader: - cdef object queue - cdef object _queue_feed_data + cdef WebSocketDataQueue queue cdef unsigned int _max_msg_size cdef Exception _exc diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index a3993220b70..2295a255148 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -1,10 +1,14 @@ """Reader for WebSocket protocol versions 13 and 8.""" -from typing import Final, List, Optional, Set, Tuple, Union +import asyncio +import builtins +from collections import deque +from typing import Deque, Final, List, Optional, Set, Tuple, Union +from ..base_protocol import BaseProtocol from ..compression_utils import ZLibDecompressor -from ..helpers import set_exception -from ..streams import FlowControlDataQueue +from ..helpers import _EXC_SENTINEL, set_exception +from ..streams import EofStream from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask from .models import ( WS_DEFLATE_TRAILING, @@ -39,16 +43,89 @@ TUPLE_NEW = tuple.__new__ +int_ = int # Prevent Cython from converting to PyInt + + +class WebSocketDataQueue: + """WebSocketDataQueue resumes and pauses an underlying stream. + + It is a destination for WebSocket data. + """ -class WebSocketReader: def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + self._size = 0 + self._protocol = protocol + self._limit = limit * 2 + self._loop = loop + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._exception: Union[BaseException, None] = None + self._buffer: Deque[Tuple[WSMessage, int]] = deque() + self._get_buffer = self._buffer.popleft + self._put_buffer = self._buffer.append + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception( self, - queue: FlowControlDataQueue[WSMessage], - max_msg_size: int, - compress: bool = True, + exc: "BaseException", + exc_cause: builtins.BaseException = _EXC_SENTINEL, + ) -> None: + self._eof = True + self._exception = exc + if (waiter := self._waiter) is not None: + self._waiter = None + set_exception(waiter, exc, exc_cause) + + def _release_waiter(self) -> None: + if (waiter := self._waiter) is None: + return + self._waiter = None + if not waiter.done(): + waiter.set_result(None) + + def feed_eof(self) -> None: + self._eof = True + self._release_waiter() + + def feed_data(self, data: "WSMessage", size: "int_") -> None: + self._size += size + self._put_buffer((data, size)) + self._release_waiter() + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> WSMessage: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + return self._read_from_buffer() + + def _read_from_buffer(self) -> WSMessage: + if self._buffer: + data, size = self._get_buffer() + self._size -= size + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + if self._exception is not None: + raise self._exception + raise EofStream + + +class WebSocketReader: + def __init__( + self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True ) -> None: self.queue = queue - self._queue_feed_data = queue.feed_data self._max_msg_size = max_msg_size self._exc: Optional[Exception] = None @@ -187,12 +264,12 @@ def _feed_data(self, data: bytes) -> None: # bottleneck, so we use tuple.__new__ to improve performance. # This is not type safe, but many tests should fail in # test_client_ws_functional.py if this is wrong. - self._queue_feed_data( + self.queue.feed_data( TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), len(payload_merged), ) else: - self._queue_feed_data( + self.queue.feed_data( TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), len(payload_merged), ) @@ -221,14 +298,14 @@ def _feed_data(self, data: bytes) -> None: else: msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) - self._queue_feed_data(msg, 0) + self.queue.feed_data(msg, 0) elif opcode == OP_CODE_PING: msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) - self._queue_feed_data(msg, len(payload)) + self.queue.feed_data(msg, len(payload)) elif opcode == OP_CODE_PONG: msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) - self._queue_feed_data(msg, len(payload)) + self.queue.feed_data(msg, len(payload)) else: raise WebSocketError( diff --git a/aiohttp/client.py b/aiohttp/client.py index eaca1711e9f..31c76ff98af 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -37,6 +37,7 @@ from yarl import URL from . import hdrs, http, payload +from ._websocket.reader import WebSocketDataQueue from .abc import AbstractCookieJar from .client_exceptions import ( ClientConnectionError, @@ -100,8 +101,7 @@ strip_auth_from_url, ) from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter -from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse -from .streams import FlowControlDataQueue +from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse from .tracing import Trace, TraceConfig from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL @@ -1098,9 +1098,7 @@ async def _ws_connect( transport = conn.transport assert transport is not None - reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( - conn_proto, 2**16, loop=self._loop - ) + reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop) conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) writer = WebSocketWriter( conn_proto, diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 3050ceaa54c..0eee826f255 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -151,7 +151,7 @@ def set_exception( def set_parser(self, parser: Any, payload: Any) -> None: # TODO: actual types are: # parser: WebSocketReader - # payload: FlowControlDataQueue + # payload: WebSocketDataQueue # but they are not generi enough # Need an ABC for both types self._payload = payload diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 1ed4c8ad039..f4cfa1bffe8 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -7,6 +7,7 @@ import attr +from ._websocket.reader import WebSocketDataQueue from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError from .client_reqrep import ClientResponse from .helpers import calculate_timeout_when, set_result @@ -19,7 +20,7 @@ WSMsgType, ) from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter -from .streams import EofStream, FlowControlDataQueue +from .streams import EofStream from .typedefs import ( DEFAULT_JSON_DECODER, DEFAULT_JSON_ENCODER, @@ -45,7 +46,7 @@ class ClientWSTimeout: class ClientWebSocketResponse: def __init__( self, - reader: "FlowControlDataQueue[WSMessage]", + reader: WebSocketDataQueue, writer: WebSocketWriter, protocol: Optional[str], response: ClientResponse, @@ -173,7 +174,7 @@ def _handle_ping_pong_exception(self, exc: BaseException) -> None: self._exception = exc self._response.close() if self._waiting and not self._closing: - self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0) def _set_closed(self) -> None: """Set the connection to closed. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index d95dbb3590b..59aa1dd0c3b 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -28,7 +28,6 @@ "EofStream", "StreamReader", "DataQueue", - "FlowControlDataQueue", ) _T = TypeVar("_T") @@ -660,18 +659,15 @@ def feed_eof(self) -> None: self._waiter = None set_result(waiter, None) - async def _wait_for_data(self) -> None: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - async def read(self) -> _T: if not self._buffer and not self._eof: - await self._wait_for_data() + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise if self._buffer: data, _ = self._buffer.popleft() return data @@ -681,41 +677,3 @@ async def read(self) -> _T: def __aiter__(self) -> AsyncStreamIterator[_T]: return AsyncStreamIterator(self.read) - - -class FlowControlDataQueue(DataQueue[_T]): - """FlowControlDataQueue resumes and pauses an underlying stream. - - It is a destination for parsed data. - """ - - def __init__( - self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop - ) -> None: - super().__init__(loop=loop) - self._size = 0 - self._protocol = protocol - self._limit = limit * 2 - self._buffer: Deque[Tuple[_T, int]] = collections.deque() - - def feed_data(self, data: _T, size: int = 0) -> None: - self._size += size - self._buffer.append((data, size)) - if (waiter := self._waiter) is not None: - self._waiter = None - set_result(waiter, None) - if self._size > self._limit and not self._protocol._reading_paused: - self._protocol.pause_reading() - - async def read(self) -> _T: - if not self._buffer and not self._eof: - await self._wait_for_data() - if self._buffer: - data, size = self._buffer.popleft() - self._size -= size - if self._size < self._limit and self._protocol._reading_paused: - self._protocol.resume_reading() - return data - if self._exception is not None: - raise self._exception - raise EofStream diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index bf93e1885e3..4a35fa90da1 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -10,6 +10,7 @@ from multidict import CIMultiDict from . import hdrs +from ._websocket.reader import WebSocketDataQueue from ._websocket.writer import DEFAULT_LIMIT from .abc import AbstractStreamWriter from .client_exceptions import WSMessageTypeError @@ -29,7 +30,7 @@ ) from .http_websocket import _INTERNAL_RECEIVE_TYPES from .log import ws_logger -from .streams import EofStream, FlowControlDataQueue +from .streams import EofStream from .typedefs import JSONDecoder, JSONEncoder from .web_exceptions import HTTPBadRequest, HTTPException from .web_request import BaseRequest @@ -79,7 +80,7 @@ def __init__( self._protocols = protocols self._ws_protocol: Optional[str] = None self._writer: Optional[WebSocketWriter] = None - self._reader: Optional[FlowControlDataQueue[WSMessage]] = None + self._reader: Optional[WebSocketDataQueue] = None self._closed = False self._closing = False self._conn_lost = 0 @@ -191,7 +192,7 @@ def _handle_ping_pong_exception(self, exc: BaseException) -> None: self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) self._exception = exc if self._waiting and not self._closing and self._reader is not None: - self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None)) + self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0) def _set_closed(self) -> None: """Set the connection to closed. @@ -329,7 +330,7 @@ def _post_start( loop = self._loop assert loop is not None - self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop) + self._reader = WebSocketDataQueue(request._protocol, 2**16, loop=loop) request.protocol.set_parser( WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) ) @@ -464,7 +465,7 @@ async def close( assert self._loop is not None assert self._close_wait is None self._close_wait = self._loop.create_future() - reader.feed_data(WS_CLOSING_MESSAGE) + reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._close_wait if self._closing: diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 5091085e40c..cc1006c7381 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -5,22 +5,17 @@ from pytest_codspeed import BenchmarkFixture -from aiohttp import DataQueue from aiohttp._websocket.helpers import MSG_SIZE, PACK_LEN3 +from aiohttp._websocket.reader import WebSocketDataQueue from aiohttp.base_protocol import BaseProtocol -from aiohttp.http_websocket import ( - WebSocketReader, - WebSocketWriter, - WSMessage, - WSMsgType, -) +from aiohttp.http_websocket import WebSocketReader, WebSocketWriter, WSMsgType def test_read_large_binary_websocket_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: """Read one hundred large binary websocket messages.""" - queue: DataQueue[WSMessage] = DataQueue(loop=loop) + queue = WebSocketDataQueue(BaseProtocol(loop), 2**16, loop=loop) reader = WebSocketReader(queue, max_msg_size=2**18) # PACK3 has a minimum message length of 2**16 bytes. @@ -41,7 +36,7 @@ def test_read_one_hundred_websocket_text_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: """Benchmark reading 100 WebSocket text messages.""" - queue: DataQueue[WSMessage] = DataQueue(loop=loop) + queue = WebSocketDataQueue(BaseProtocol(loop), 2**16, loop=loop) reader = WebSocketReader(queue, max_msg_size=2**16) raw_message = ( b'\x81~\x01!{"id":1,"src":"shellyplugus-c049ef8c30e4","dst":"aios-1453812500' diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 9ee498e0075..e4b57bd199d 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -14,6 +14,7 @@ hdrs, web, ) +from aiohttp._websocket.reader import WebSocketDataQueue from aiohttp.client_ws import ClientWSTimeout from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -24,8 +25,12 @@ import async_timeout -async def test_send_recv_text(aiohttp_client) -> None: - async def handler(request): +class PatchableWebSocketDataQueue(WebSocketDataQueue): + """A WebSocketDataQueue that can be patched.""" + + +async def test_send_recv_text(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) @@ -870,7 +875,10 @@ async def test_heartbeat_no_pong_concurrent_receive( async def handler(request): nonlocal ping_received ws = web.WebSocketResponse(autoping=False) - await ws.prepare(request) + with mock.patch( + "aiohttp.web_ws.WebSocketDataQueue", PatchableWebSocketDataQueue + ): + await ws.prepare(request) msg = await ws.receive() ping_received = msg.type is aiohttp.WSMsgType.PING ws._reader.feed_eof = lambda: None @@ -879,17 +887,18 @@ async def handler(request): app = web.Application() app.router.add_route("GET", "/", handler) - client = await aiohttp_client(app) - resp = await client.ws_connect("/", heartbeat=0.1) - resp._reader.feed_eof = lambda: None - - # Connection should be closed roughly after 1.5x heartbeat. - msg = await resp.receive(5.0) - assert ping_received - assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE - assert msg - assert msg.type is WSMsgType.ERROR - assert isinstance(msg.data, ServerTimeoutError) + with mock.patch("aiohttp.client.WebSocketDataQueue", PatchableWebSocketDataQueue): + client = await aiohttp_client(app) + resp = await client.ws_connect("/", heartbeat=0.1) + with mock.patch.object( + resp._reader, "feed_eof", autospec=True, spec_set=True, return_value=None + ): + # Connection should be closed roughly after 1.5x heartbeat. + msg = await resp.receive(5.0) + assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE + assert msg.type is WSMsgType.ERROR + assert isinstance(msg.data, ServerTimeoutError) async def test_close_websocket_while_ping_inflight( diff --git a/tests/test_flowcontrol_streams.py b/tests/test_flowcontrol_streams.py index 70e9b9b012c..08f6be21a2c 100644 --- a/tests/test_flowcontrol_streams.py +++ b/tests/test_flowcontrol_streams.py @@ -15,11 +15,6 @@ def stream(loop, protocol): return streams.StreamReader(protocol, limit=1, loop=loop) -@pytest.fixture -def buffer(loop, protocol): - return streams.FlowControlDataQueue(protocol, limit=1, loop=loop) - - class TestFlowControlStreamReader: async def test_read(self, stream) -> None: stream.feed_data(b"da", 2) @@ -107,19 +102,4 @@ async def test_read_nowait(self, stream) -> None: stream._protocol._reading_paused = False res = stream.read_nowait(5) assert res == b"" - assert stream._protocol.resume_reading.call_count == 1 - - -class TestFlowControlDataQueue: - def test_feed_pause(self, buffer) -> None: - buffer._protocol._reading_paused = False - buffer.feed_data(object(), 100) - - assert buffer._protocol.pause_reading.called - - async def test_resume_on_read(self, buffer) -> None: - buffer.feed_data(object(), 100) - - buffer._protocol._reading_paused = True - await buffer.read() - assert buffer._protocol.resume_reading.called + assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index edd676190f6..2524bf34b3a 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -13,6 +13,7 @@ import aiohttp from aiohttp import http_exceptions, streams +from aiohttp.base_protocol import BaseProtocol from aiohttp.http_parser import ( NO_EXTENSIONS, DeflateBuffer, @@ -1561,17 +1562,17 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol): class TestParsePayload: - async def test_parse_eof_payload(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out) p.feed_data(b"data") p.feed_eof() assert out.is_eof() - assert [(bytearray(b"data"), 4)] == list(out._buffer) + assert [(bytearray(b"data"))] == list(out._buffer) - async def test_parse_length_payload_eof(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_parse_length_payload_eof(self, protocol: BaseProtocol) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=4) p.feed_data(b"da") @@ -1579,15 +1580,19 @@ async def test_parse_length_payload_eof(self, stream) -> None: with pytest.raises(http_exceptions.ContentLengthError): p.feed_eof() - async def test_parse_chunked_payload_size_error(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_parse_chunked_payload_size_error( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) with pytest.raises(http_exceptions.TransferEncodingError): p.feed_data(b"blah\r\n") assert isinstance(out.exception(), http_exceptions.TransferEncodingError) - async def test_parse_chunked_payload_split_end(self, protocol) -> None: - out = aiohttp.StreamReader(protocol, 2**16, loop=None) + async def test_parse_chunked_payload_split_end( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n") p.feed_data(b"\r\n") @@ -1595,8 +1600,10 @@ async def test_parse_chunked_payload_split_end(self, protocol) -> None: assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end2(self, protocol) -> None: - out = aiohttp.StreamReader(protocol, 2**16, loop=None) + async def test_parse_chunked_payload_split_end2( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n\r") p.feed_data(b"\n") @@ -1604,8 +1611,10 @@ async def test_parse_chunked_payload_split_end2(self, protocol) -> None: assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers(self, protocol) -> None: - out = aiohttp.StreamReader(protocol, 2**16, loop=None) + async def test_parse_chunked_payload_split_end_trailers( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n") p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n") @@ -1614,8 +1623,10 @@ async def test_parse_chunked_payload_split_end_trailers(self, protocol) -> None: assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers2(self, protocol) -> None: - out = aiohttp.StreamReader(protocol, 2**16, loop=None) + async def test_parse_chunked_payload_split_end_trailers2( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n") p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r") @@ -1624,8 +1635,10 @@ async def test_parse_chunked_payload_split_end_trailers2(self, protocol) -> None assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers3(self, protocol) -> None: - out = aiohttp.StreamReader(protocol, 2**16, loop=None) + async def test_parse_chunked_payload_split_end_trailers3( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\nContent-MD5: ") p.feed_data(b"912ec803b2ce49e4a541068d495ab570\r\n\r\n") @@ -1633,8 +1646,10 @@ async def test_parse_chunked_payload_split_end_trailers3(self, protocol) -> None assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None: - out = aiohttp.StreamReader(protocol, 2**16, loop=None) + async def test_parse_chunked_payload_split_end_trailers4( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\nC") p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n") @@ -1642,88 +1657,99 @@ async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_http_payload_parser_length(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_http_payload_parser_length(self, protocol: BaseProtocol) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=2) eof, tail = p.feed_data(b"1245") assert eof - assert b"12" == b"".join(d for d, _ in out._buffer) + assert b"12" == out._buffer[0] assert b"45" == tail - async def test_http_payload_parser_deflate(self, stream) -> None: + async def test_http_payload_parser_deflate(self, protocol: BaseProtocol) -> None: # c=compressobj(wbits=15); b''.join([c.compress(b'data'), c.flush()]) COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) - assert b"data" == b"".join(d for d, _ in out._buffer) + assert b"data" == out._buffer[0] assert out.is_eof() - async def test_http_payload_parser_deflate_no_hdrs(self, stream: Any) -> None: + async def test_http_payload_parser_deflate_no_hdrs( + self, protocol: BaseProtocol + ) -> None: """Tests incorrectly formed data (no zlib headers).""" # c=compressobj(wbits=-15); b''.join([c.compress(b'data'), c.flush()]) COMPRESSED = b"KI,I\x04\x00" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) - assert b"data" == b"".join(d for d, _ in out._buffer) + assert b"data" == out._buffer[0] assert out.is_eof() - async def test_http_payload_parser_deflate_light(self, stream) -> None: + async def test_http_payload_parser_deflate_light( + self, protocol: BaseProtocol + ) -> None: # c=compressobj(wbits=9); b''.join([c.compress(b'data'), c.flush()]) COMPRESSED = b"\x18\x95KI,I\x04\x00\x04\x00\x01\x9b" length = len(COMPRESSED) - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=length, compression="deflate") p.feed_data(COMPRESSED) - assert b"data" == b"".join(d for d, _ in out._buffer) + + assert b"data" == out._buffer[0] assert out.is_eof() - async def test_http_payload_parser_deflate_split(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_http_payload_parser_deflate_split( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, compression="deflate") # Feeding one correct byte should be enough to choose exact # deflate decompressor - p.feed_data(b"x", 1) - p.feed_data(b"\x9cKI,I\x04\x00\x04\x00\x01\x9b", 11) + p.feed_data(b"x") + p.feed_data(b"\x9cKI,I\x04\x00\x04\x00\x01\x9b") p.feed_eof() - assert b"data" == b"".join(d for d, _ in out._buffer) + assert b"data" == out._buffer[0] - async def test_http_payload_parser_deflate_split_err(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_http_payload_parser_deflate_split_err( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, compression="deflate") # Feeding one wrong byte should be enough to choose exact # deflate decompressor - p.feed_data(b"K", 1) - p.feed_data(b"I,I\x04\x00", 5) + p.feed_data(b"K") + p.feed_data(b"I,I\x04\x00") p.feed_eof() - assert b"data" == b"".join(d for d, _ in out._buffer) + assert b"data" == out._buffer[0] - async def test_http_payload_parser_length_zero(self, stream) -> None: - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_http_payload_parser_length_zero( + self, protocol: BaseProtocol + ) -> None: + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=0) assert p.done assert out.is_eof() @pytest.mark.skipif(brotli is None, reason="brotli is not installed") - async def test_http_payload_brotli(self, stream) -> None: + async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None: compressed = brotli.compress(b"brotli data") - out = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) p = HttpPayloadParser(out, length=len(compressed), compression="br") p.feed_data(compressed) - assert b"brotli data" == b"".join(d for d, _ in out._buffer) + assert b"brotli data" == out._buffer[0] assert out.is_eof() class TestDeflateBuffer: async def test_feed_data(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + buf = aiohttp.StreamReader(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1731,10 +1757,10 @@ async def test_feed_data(self, stream) -> None: # First byte should be b'x' in order code not to change the decoder. dbuf.feed_data(b"xxxx", 4) - assert [b"line"] == list(d for d, _ in buf._buffer) + assert [b"line"] == list(buf._buffer) async def test_feed_data_err(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + buf = aiohttp.StreamReader(stream, 2**16, loop=asyncio.get_event_loop()) dbuf = DeflateBuffer(buf, "deflate") exc = ValueError() @@ -1746,19 +1772,19 @@ async def test_feed_data_err(self, stream) -> None: # Should start with b'x', otherwise code switch mocked decoder. dbuf.feed_data(b"xsomedata", 9) - async def test_feed_eof(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_feed_eof(self, protocol: BaseProtocol) -> None: + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() dbuf.decompressor.flush.return_value = b"line" dbuf.feed_eof() - assert [b"line"] == list(d for d, _ in buf._buffer) + assert [b"line"] == list(buf._buffer) assert buf._eof - async def test_feed_eof_err_deflate(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_feed_eof_err_deflate(self, protocol: BaseProtocol) -> None: + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.decompressor = mock.Mock() @@ -1768,8 +1794,8 @@ async def test_feed_eof_err_deflate(self, stream) -> None: with pytest.raises(http_exceptions.ContentEncodingError): dbuf.feed_eof() - async def test_feed_eof_no_err_gzip(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_feed_eof_no_err_gzip(self, protocol: BaseProtocol) -> None: + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) dbuf = DeflateBuffer(buf, "gzip") dbuf.decompressor = mock.Mock() @@ -1777,10 +1803,10 @@ async def test_feed_eof_no_err_gzip(self, stream) -> None: dbuf.decompressor.eof = False dbuf.feed_eof() - assert [b"line"] == list(d for d, _ in buf._buffer) + assert [b"line"] == list(buf._buffer) - async def test_feed_eof_no_err_brotli(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_feed_eof_no_err_brotli(self, protocol: BaseProtocol) -> None: + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) dbuf = DeflateBuffer(buf, "br") dbuf.decompressor = mock.Mock() @@ -1788,10 +1814,10 @@ async def test_feed_eof_no_err_brotli(self, stream) -> None: dbuf.decompressor.eof = False dbuf.feed_eof() - assert [b"line"] == list(d for d, _ in buf._buffer) + assert [b"line"] == list(buf._buffer) - async def test_empty_body(self, stream) -> None: - buf = aiohttp.FlowControlDataQueue(stream, 2**16, loop=asyncio.get_event_loop()) + async def test_empty_body(self, protocol: BaseProtocol) -> None: + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) dbuf = DeflateBuffer(buf, "deflate") dbuf.feed_eof() diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index e728c6a1504..f9a92d0587f 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -258,7 +258,7 @@ async def test_raise_writer_limit(make_request) -> None: assert ws._reader is not None assert ws._writer is not None assert ws._writer._limit == 1234567 - ws._reader.feed_data(WS_CLOSED_MESSAGE) + ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() @@ -279,7 +279,7 @@ async def test_recv_str_closed(make_request) -> None: ws = web.WebSocketResponse() await ws.prepare(req) assert ws._reader is not None - ws._reader.feed_data(WS_CLOSED_MESSAGE) + ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() with pytest.raises( @@ -305,7 +305,7 @@ async def test_recv_bytes_closed(make_request) -> None: ws = web.WebSocketResponse() await ws.prepare(req) assert ws._reader is not None - ws._reader.feed_data(WS_CLOSED_MESSAGE) + ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() with pytest.raises( @@ -331,7 +331,7 @@ async def test_send_frame_closed(make_request) -> None: ws = WebSocketResponse() await ws.prepare(req) assert ws._reader is not None - ws._reader.feed_data(WS_CLOSED_MESSAGE) + ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() with pytest.raises(ConnectionError): diff --git a/tests/test_websocket_data_queue.py b/tests/test_websocket_data_queue.py new file mode 100644 index 00000000000..105010ba2dd --- /dev/null +++ b/tests/test_websocket_data_queue.py @@ -0,0 +1,36 @@ +import asyncio +from unittest import mock + +import pytest + +from aiohttp._websocket.models import WSMessage, WSMsgType +from aiohttp._websocket.reader import WebSocketDataQueue +from aiohttp.base_protocol import BaseProtocol + + +@pytest.fixture +def protocol() -> BaseProtocol: + return mock.create_autospec(BaseProtocol, spec_set=True, instance=True, _reading_paused=False) # type: ignore[no-any-return] + + +@pytest.fixture +def buffer( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> WebSocketDataQueue: + return WebSocketDataQueue(protocol, limit=1, loop=loop) + + +class TestWebSocketDataQueue: + def test_feed_pause(self, buffer: WebSocketDataQueue) -> None: + buffer._protocol._reading_paused = False + for _ in range(3): + buffer.feed_data(WSMessage(data=b"x", type=WSMsgType.BINARY, extra=""), 1) + + assert buffer._protocol.pause_reading.called # type: ignore[attr-defined] + + async def test_resume_on_read(self, buffer: WebSocketDataQueue) -> None: + buffer.feed_data(WSMessage(data=b"x", type=WSMsgType.BINARY, extra=""), 1) + + buffer._protocol._reading_paused = True + await buffer.read() + assert buffer._protocol.resume_reading.called # type: ignore[attr-defined] diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 5151d87b210..7f8b98d4566 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -18,6 +18,7 @@ websocket_mask, ) from aiohttp._websocket.models import WS_DEFLATE_TRAILING +from aiohttp._websocket.reader import WebSocketDataQueue from aiohttp.base_protocol import BaseProtocol from aiohttp.http import WebSocketError, WSCloseCode, WSMessage, WSMsgType from aiohttp.http_websocket import WebSocketReader @@ -92,28 +93,26 @@ def protocol(loop: asyncio.AbstractEventLoop) -> BaseProtocol: @pytest.fixture() -def out( - loop: asyncio.AbstractEventLoop, protocol: BaseProtocol -) -> aiohttp.FlowControlDataQueue[WSMessage]: - return aiohttp.FlowControlDataQueue(protocol, 2**16, loop=loop) +def out(loop: asyncio.AbstractEventLoop) -> WebSocketDataQueue: + return WebSocketDataQueue(mock.Mock(_reading_paused=False), 2**16, loop=loop) @pytest.fixture() def out_low_limit( loop: asyncio.AbstractEventLoop, protocol: BaseProtocol -) -> aiohttp.FlowControlDataQueue[WSMessage]: - return aiohttp.FlowControlDataQueue(protocol, 16, loop=loop) +) -> WebSocketDataQueue: + return WebSocketDataQueue(protocol, 16, loop=loop) @pytest.fixture() def parser_low_limit( - out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + out_low_limit: WebSocketDataQueue, ) -> PatchableWebSocketReader: return PatchableWebSocketReader(out_low_limit, 4 * 1024 * 1024) @pytest.fixture() -def parser(out: aiohttp.FlowControlDataQueue[WSMessage]) -> PatchableWebSocketReader: +def parser(out: WebSocketDataQueue) -> PatchableWebSocketReader: return PatchableWebSocketReader(out, 4 * 1024 * 1024) @@ -226,7 +225,7 @@ def test_parse_frame_header_payload_size(out, parser) -> None: ids=["bytes", "bytearray", "memoryview"], ) def test_ping_frame( - out: aiohttp.DataQueue[WSMessage], + out: WebSocketDataQueue, parser: WebSocketReader, data: Union[bytes, bytearray, memoryview], ) -> None: @@ -546,8 +545,10 @@ def test_parse_compress_error_frame(parser) -> None: assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -async def test_parse_no_compress_frame_single(loop: asyncio.AbstractEventLoop) -> None: - parser_no_compress = WebSocketReader(aiohttp.DataQueue(loop), 0, compress=False) +async def test_parse_no_compress_frame_single( + loop: asyncio.AbstractEventLoop, out: WebSocketDataQueue +) -> None: + parser_no_compress = WebSocketReader(out, 0, compress=False) with pytest.raises(WebSocketError) as ctx: parser_no_compress.parse_frame(struct.pack("!BB", 0b11000001, 0b00000001)) parser_no_compress.parse_frame(b"1") @@ -598,7 +599,7 @@ def test_pickle(self) -> None: def test_flow_control_binary( protocol: BaseProtocol, - out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + out_low_limit: WebSocketDataQueue, parser_low_limit: WebSocketReader, ) -> None: large_payload = b"b" * (1 + 16 * 2) @@ -615,7 +616,7 @@ def test_flow_control_binary( def test_flow_control_multi_byte_text( protocol: BaseProtocol, - out_low_limit: aiohttp.FlowControlDataQueue[WSMessage], + out_low_limit: WebSocketDataQueue, parser_low_limit: WebSocketReader, ) -> None: large_payload_text = "𒀁" * (1 + 16 * 2) diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index 93bd7064cb4..77eaa2f74b8 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -5,7 +5,8 @@ import pytest -from aiohttp import DataQueue, WSMessage, WSMsgType +from aiohttp import WSMsgType +from aiohttp._websocket.reader import WebSocketDataQueue from aiohttp.http import WebSocketReader, WebSocketWriter from aiohttp.test_utils import make_mocked_coro @@ -144,7 +145,8 @@ async def test_concurrent_messages( "aiohttp._websocket.writer.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", max_sync_chunk_size ): writer = WebSocketWriter(protocol, transport, compress=15) - queue: DataQueue[WSMessage] = DataQueue(asyncio.get_running_loop()) + loop = asyncio.get_running_loop() + queue = WebSocketDataQueue(mock.Mock(_reading_paused=False), 2**16, loop=loop) reader = WebSocketReader(queue, 50000) writers = [] payloads = [] From 7db1b2651ce2fe5af3fe44573c46bf6fe490706d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 11 Nov 2024 02:19:52 +0000 Subject: [PATCH 0900/1511] Release 3.11.0rc1 (#9794) --- CHANGES.rst | 12 ++++++++++-- aiohttp/__init__.py | 2 +- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 00e957fdcb4..30996a47fba 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0rc0 (2024-11-10) +3.11.0rc1 (2024-11-10) ====================== Bug fixes @@ -168,7 +168,7 @@ Features *Related issues and pull requests on GitHub:* - :issue:`9543`, :issue:`9554`, :issue:`9556`, :issue:`9558`, :issue:`9636`, :issue:`9649`. + :issue:`9543`, :issue:`9554`, :issue:`9556`, :issue:`9558`, :issue:`9636`, :issue:`9649`, :issue:`9781`. @@ -238,6 +238,14 @@ Removals and backward incompatible breaking changes +- ``FlowControlDataQueue`` has been replaced with the ``WebSocketDataQueue`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9685`. + + + - Changed ``ClientRequest.request_info`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 1e906fe1751..3efa6ffe54b 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0rc0" +__version__ = "3.11.0rc1" from typing import TYPE_CHECKING, Tuple From c6270f2568635fe5af843b920c142df06e553743 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 03:57:26 +0000 Subject: [PATCH 0901/1511] [PR #9796/b947e476 backport][3.11] Avoid calling the drain helper in `WebSocketWriter` if the protocol is not paused (#9797) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9796.misc.rst | 1 + aiohttp/_websocket/writer.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9796.misc.rst diff --git a/CHANGES/9796.misc.rst b/CHANGES/9796.misc.rst new file mode 100644 index 00000000000..8cf4d621fb8 --- /dev/null +++ b/CHANGES/9796.misc.rst @@ -0,0 +1 @@ +Improved performance of the ``WebsocketWriter`` when the protocol is not paused -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index e49b6224aec..4935690f339 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -155,7 +155,8 @@ async def send_frame( # if the writer is not paused. if self._output_size > self._limit: self._output_size = 0 - await self.protocol._drain_helper() + if self.protocol._paused: + await self.protocol._drain_helper() def _make_compress_obj(self, compress: int) -> ZLibCompressor: return ZLibCompressor( From 569fa139a1bd04e3787eb26901b4079fb7e8bda1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 14:52:28 +0000 Subject: [PATCH 0902/1511] [PR #9799/d40b1839 backport][3.10] Add compressed WebSocketWriter benchmark (#9801) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_websocket.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index becb0e568bd..f6db82b6d29 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -121,3 +121,19 @@ async def _send_one_hundred_websocket_text_messages() -> None: @benchmark def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) + + +def test_send_one_hundred_websocket_compressed_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 WebSocket compressed messages.""" + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport(), compress=15) + raw_message = b"Hello, World!" * 100 + + async def _send_one_hundred_websocket_compressed_messages() -> None: + for _ in range(100): + await writer._send_frame(raw_message, WSMsgType.BINARY) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_compressed_messages()) From 2490e4a9b7269d886183fb4ddcd00513e289eb24 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 15:04:30 +0000 Subject: [PATCH 0903/1511] [PR #9799/d40b1839 backport][3.11] Add compressed WebSocketWriter benchmark (#9802) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_http_websocket.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index cc1006c7381..7ff04199d24 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -115,3 +115,19 @@ async def _send_one_hundred_websocket_text_messages() -> None: @benchmark def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) + + +def test_send_one_hundred_websocket_compressed_messages( + loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture +) -> None: + """Benchmark sending 100 WebSocket compressed messages.""" + writer = WebSocketWriter(MockProtocol(loop=loop), MockTransport(), compress=15) + raw_message = b"Hello, World!" * 100 + + async def _send_one_hundred_websocket_compressed_messages() -> None: + for _ in range(100): + await writer.send_frame(raw_message, WSMsgType.BINARY) + + @benchmark + def _run() -> None: + loop.run_until_complete(_send_one_hundred_websocket_compressed_messages()) From 255bf5c4bcacbec9527aa0666852b3411eeee09b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 15:29:07 +0000 Subject: [PATCH 0904/1511] [PR #9795/fcb790b1 backport][3.11] Switch WebSocketWriter to use `removesuffix` to drop compression trailer (#9805) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/_websocket/writer.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index 4935690f339..fc2cf32b934 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -92,16 +92,16 @@ async def send_frame( self._compressobj = self._make_compress_obj(self.compress) compressobj = self._compressobj - message = await compressobj.compress(message) + message = ( + await compressobj.compress(message) + + compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ) + ).removesuffix(WS_DEFLATE_TRAILING) # Its critical that we do not return control to the event # loop until we have finished sending all the compressed # data. Otherwise we could end up mixing compressed frames # if there are multiple coroutines compressing data. - message += compressobj.flush( - zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH - ) - if message.endswith(WS_DEFLATE_TRAILING): - message = message[:-4] msg_length = len(message) From 61c7eefa454f7768d79ee9bfd2bb3447e2bba4ae Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 16:17:44 +0000 Subject: [PATCH 0905/1511] [PR #9803/fc912352 backport][3.11] Small performance improvement to drain (#9807) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/base_protocol.py | 2 +- aiohttp/http_writer.py | 5 +++-- tests/test_benchmarks_client_request.py | 1 + 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index c8ec57786b9..b0a67ed6ff6 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -89,7 +89,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: ) async def _drain_helper(self) -> None: - if not self.connected: + if self.transport is None: raise ClientConnectionResetError("Connection lost") if not self._paused: return diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index 4f1b2fec88e..a1a9860b48d 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -172,8 +172,9 @@ async def drain(self) -> None: await w.write(data) await w.drain() """ - if self._protocol.transport is not None: - await self._protocol._drain_helper() + protocol = self._protocol + if protocol.transport is not None and protocol._paused: + await protocol._drain_helper() def _safe_header(string: str) -> str: diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index c08e7571c48..0cdf1f2d776 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -88,6 +88,7 @@ class MockProtocol(asyncio.BaseProtocol): def __init__(self) -> None: self.transport = MockTransport() + self._paused = False @property def writing_paused(self) -> bool: From 4c4f3f271b0bc8682f757d498439f10dff96e858 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 11 Nov 2024 10:20:00 -0600 Subject: [PATCH 0906/1511] [PR #9800/f0e26eb backport][3.11] Fix incorrect typing on `WebSocketResponse._handshake` (#9811) --- aiohttp/web_ws.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 4a35fa90da1..c18f88eaf00 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -4,7 +4,7 @@ import hashlib import json import sys -from typing import Any, Final, Iterable, Optional, Tuple, cast +from typing import Any, Final, Iterable, Optional, Tuple, Union, cast import attr from multidict import CIMultiDict @@ -99,7 +99,7 @@ def __init__( if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 self._pong_response_cb: Optional[asyncio.TimerHandle] = None - self._compress = compress + self._compress: Union[bool, int] = compress self._max_msg_size = max_msg_size self._ping_task: Optional[asyncio.Task[None]] = None self._writer_limit = writer_limit @@ -216,7 +216,7 @@ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: def _handshake( self, request: BaseRequest - ) -> Tuple["CIMultiDict[str]", str, bool, bool]: + ) -> Tuple["CIMultiDict[str]", Optional[str], int, bool]: headers = request.headers if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): raise HTTPBadRequest( @@ -234,7 +234,7 @@ def _handshake( ) # find common sub-protocol between client and server - protocol = None + protocol: Optional[str] = None if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: req_protocols = [ str(proto.strip()) @@ -297,9 +297,9 @@ def _handshake( protocol, compress, notakeover, - ) # type: ignore[return-value] + ) - def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: + def _pre_start(self, request: BaseRequest) -> Tuple[Optional[str], WebSocketWriter]: self._loop = request._loop headers, protocol, compress, notakeover = self._handshake(request) @@ -321,7 +321,7 @@ def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: return protocol, writer def _post_start( - self, request: BaseRequest, protocol: str, writer: WebSocketWriter + self, request: BaseRequest, protocol: Optional[str], writer: WebSocketWriter ) -> None: self._ws_protocol = protocol self._writer = writer @@ -332,7 +332,9 @@ def _post_start( assert loop is not None self._reader = WebSocketDataQueue(request._protocol, 2**16, loop=loop) request.protocol.set_parser( - WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) + WebSocketReader( + self._reader, self._max_msg_size, compress=bool(self._compress) + ) ) # disable HTTP keepalive for WebSocket request.protocol.keep_alive(False) @@ -360,7 +362,7 @@ def ws_protocol(self) -> Optional[str]: return self._ws_protocol @property - def compress(self) -> bool: + def compress(self) -> Union[int, bool]: return self._compress def get_extra_info(self, name: str, default: Any = None) -> Any: From 28fba50047626fcb560121c3a40437c25c6a88eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 16:32:46 +0000 Subject: [PATCH 0907/1511] [PR #9809/c9c08748 backport][3.11] Add `__slots__` to `UrlMappingMatchInfo` (#9813) --- aiohttp/abc.py | 3 +++ aiohttp/web_urldispatcher.py | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 59a7976ec06..e56f18b528a 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -60,6 +60,9 @@ async def resolve(self, request: Request) -> "AbstractMatchInfo": class AbstractMatchInfo(ABC): + + __slots__ = () + @property # pragma: no branch @abstractmethod def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 8c1eef9094a..5153e5b73b0 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -249,7 +249,10 @@ async def handle_expect_header(self, request: Request) -> Optional[StreamRespons class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): - def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): + + __slots__ = ("_route", "_apps", "_current_app", "_frozen") + + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None: super().__init__(match_dict) self._route = route self._apps: List[Application] = [] @@ -311,6 +314,9 @@ def __repr__(self) -> str: class MatchInfoError(UrlMappingMatchInfo): + + __slots__ = ("_exception",) + def __init__(self, http_exception: HTTPException) -> None: self._exception = http_exception super().__init__({}, SystemRoute(self._exception)) From a6767a0dc029de3a0187122c32def7eb36b29414 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 16:55:14 +0000 Subject: [PATCH 0908/1511] [PR #9812/3a1bd0df backport][3.11] Add `__slots__` to `PayloadRegistry` (#9814) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/payload.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 151f9dd497b..c8c01814698 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -97,6 +97,8 @@ class PayloadRegistry: note: we need zope.interface for more efficient adapter search """ + __slots__ = ("_first", "_normal", "_last", "_normal_lookup") + def __init__(self) -> None: self._first: List[_PayloadRegistryItem] = [] self._normal: List[_PayloadRegistryItem] = [] From 0b647f60df5bb86195cd888460e62446ffde03e3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 17:44:46 +0000 Subject: [PATCH 0909/1511] [PR #9816/26f096d8 backport][3.11] Avoid duplicate bool cast in should_close (#9819) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_proto.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 0eee826f255..2c1fc6af3ef 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -50,14 +50,14 @@ def upgraded(self) -> bool: @property def should_close(self) -> bool: - return ( + return bool( self._should_close or (self._payload is not None and not self._payload.is_eof()) or self._upgraded or self._exception is not None or self._payload_parser is not None - or bool(self._buffer) - or bool(self._tail) + or self._buffer + or self._tail ) def close(self) -> None: From 0ca67bb2573a09df6bb284a60b61f811efa03a18 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 18:11:53 +0000 Subject: [PATCH 0910/1511] [PR #9817/5be7256c backport][3.11] Simplify keep_alive logic in `ClientRequest` (#9821) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_reqrep.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 3226d19f9da..e33d3d4b1dd 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -614,16 +614,13 @@ def update_proxy( self.proxy_headers = proxy_headers def keep_alive(self) -> bool: - if self.version < HttpVersion10: - # keep alive not supported at all - return False + if self.version >= HttpVersion11: + return self.headers.get(hdrs.CONNECTION) != "close" if self.version == HttpVersion10: # no headers means we close for Http 1.0 return self.headers.get(hdrs.CONNECTION) == "keep-alive" - elif self.headers.get(hdrs.CONNECTION) == "close": - return False - - return True + # keep alive not supported at all + return False async def write_bytes( self, writer: AbstractStreamWriter, conn: "Connection" From 71aa84972f0a26f8f8338ee7a32e87e4e2a0c847 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 18:15:42 +0000 Subject: [PATCH 0911/1511] [PR #9815/49dd7146 backport][3.11] Remove unneeded assignment in BaseConnector._release (#9820) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/connector.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/aiohttp/connector.py b/aiohttp/connector.py index f0e7ae86b13..93bc2513b20 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -702,10 +702,7 @@ def _release( self._release_acquired(key, protocol) - if self._force_close: - should_close = True - - if should_close or protocol.should_close: + if self._force_close or should_close or protocol.should_close: transport = protocol.transport protocol.close() From ed343332915ef3ccfbc4faef42f54871b424410e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 04:26:46 +0000 Subject: [PATCH 0912/1511] [PR #9824/dc7eee65 backport][3.10] Add benchmark for GET requests with reading a payload (#9825) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 81 +++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 77c9108a657..8c141581266 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -33,6 +33,87 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_get_requests_with_2048_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + message_count = 100 + payload = b"a" * 2048 + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_32768_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 32768 bytes.""" + message_count = 100 + payload = b"a" * 32768 + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_1mib_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + message_count = 100 + payload = b"a" * 1024**2 + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_simple_post_requests( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From 6894d4e19badbac41684933ab25d65763c5d527c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 04:41:27 +0000 Subject: [PATCH 0913/1511] [PR #9824/dc7eee65 backport][3.11] Add benchmark for GET requests with reading a payload (#9826) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 81 +++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 77c9108a657..8c141581266 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -33,6 +33,87 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_get_requests_with_2048_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + message_count = 100 + payload = b"a" * 2048 + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_32768_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 32768 bytes.""" + message_count = 100 + payload = b"a" * 32768 + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_1mib_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + message_count = 100 + payload = b"a" * 1024**2 + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_simple_post_requests( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From 9adc6c15055b3cdc854af55fb5b8ab4f9adc0b89 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 15:23:51 +0000 Subject: [PATCH 0914/1511] [PR #9827/14fcfd4c backport][3.11] Adjust client GET read benchmarks to include chunked and content-length (#9830) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 104 +++++++++++++++++++++++++++++--- 1 file changed, 97 insertions(+), 7 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 8c141581266..7daddcf3db2 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -4,7 +4,7 @@ from pytest_codspeed import BenchmarkFixture -from aiohttp import web +from aiohttp import hdrs, web from aiohttp.pytest_plugin import AiohttpClient @@ -33,7 +33,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_2048_payload( +def test_one_hundred_get_requests_with_2048_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, @@ -43,7 +43,9 @@ def test_one_hundred_get_requests_with_2048_payload( payload = b"a" * 2048 async def handler(request: web.Request) -> web.Response: - return web.Response(body=payload) + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp app = web.Application() app.router.add_route("GET", "/", handler) @@ -60,7 +62,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_32768_payload( +def test_one_hundred_get_requests_with_32768_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, @@ -70,7 +72,9 @@ def test_one_hundred_get_requests_with_32768_payload( payload = b"a" * 32768 async def handler(request: web.Request) -> web.Response: - return web.Response(body=payload) + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp app = web.Application() app.router.add_route("GET", "/", handler) @@ -87,7 +91,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_1mib_payload( +def test_one_hundred_get_requests_with_1mib_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, @@ -97,7 +101,93 @@ def test_one_hundred_get_requests_with_1mib_payload( payload = b"a" * 1024**2 async def handler(request: web.Request) -> web.Response: - return web.Response(body=payload) + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_2048_content_length_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + message_count = 100 + payload = b"a" * 2048 + headers = {hdrs.CONTENT_LENGTH: str(len(payload))} + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload, headers=headers) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_32768_content_length_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 32768 bytes.""" + message_count = 100 + payload = b"a" * 32768 + headers = {hdrs.CONTENT_LENGTH: str(len(payload))} + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload, headers=headers) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_1mib_content_length_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + message_count = 100 + payload = b"a" * 1024**2 + headers = {hdrs.CONTENT_LENGTH: str(len(payload))} + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload, headers=headers) app = web.Application() app.router.add_route("GET", "/", handler) From f2aab2e40336848d6a53ea03dc6d072a38c5e7f9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 09:36:12 -0600 Subject: [PATCH 0915/1511] [PR #9827/14fcfd4c backport][3.10] Adjust client GET read benchmarks to include chunked and content-length (#9829) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 104 +++++++++++++++++++++++++++++--- 1 file changed, 97 insertions(+), 7 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 8c141581266..7daddcf3db2 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -4,7 +4,7 @@ from pytest_codspeed import BenchmarkFixture -from aiohttp import web +from aiohttp import hdrs, web from aiohttp.pytest_plugin import AiohttpClient @@ -33,7 +33,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_2048_payload( +def test_one_hundred_get_requests_with_2048_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, @@ -43,7 +43,9 @@ def test_one_hundred_get_requests_with_2048_payload( payload = b"a" * 2048 async def handler(request: web.Request) -> web.Response: - return web.Response(body=payload) + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp app = web.Application() app.router.add_route("GET", "/", handler) @@ -60,7 +62,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_32768_payload( +def test_one_hundred_get_requests_with_32768_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, @@ -70,7 +72,9 @@ def test_one_hundred_get_requests_with_32768_payload( payload = b"a" * 32768 async def handler(request: web.Request) -> web.Response: - return web.Response(body=payload) + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp app = web.Application() app.router.add_route("GET", "/", handler) @@ -87,7 +91,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_1mib_payload( +def test_one_hundred_get_requests_with_1mib_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, @@ -97,7 +101,93 @@ def test_one_hundred_get_requests_with_1mib_payload( payload = b"a" * 1024**2 async def handler(request: web.Request) -> web.Response: - return web.Response(body=payload) + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_2048_content_length_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + message_count = 100 + payload = b"a" * 2048 + headers = {hdrs.CONTENT_LENGTH: str(len(payload))} + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload, headers=headers) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_32768_content_length_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 32768 bytes.""" + message_count = 100 + payload = b"a" * 32768 + headers = {hdrs.CONTENT_LENGTH: str(len(payload))} + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload, headers=headers) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_get_requests_with_1mib_content_length_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + message_count = 100 + payload = b"a" * 1024**2 + headers = {hdrs.CONTENT_LENGTH: str(len(payload))} + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=payload, headers=headers) app = web.Application() app.router.add_route("GET", "/", handler) From 7c9b7b9f54e1b649af9ae55c4bab1a0ce8dbc836 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 12 Nov 2024 09:55:58 -0600 Subject: [PATCH 0916/1511] [PR #9822/249df6e backport][3.11] Improve performance of the access logger when its disabled (#9828) --- CHANGES/9822.feature.rst | 1 + aiohttp/abc.py | 7 +++++++ aiohttp/web_log.py | 9 ++++++--- aiohttp/web_protocol.py | 2 +- docs/abc.rst | 9 +++++++++ docs/logging.rst | 10 ++++++++++ tests/test_web_log.py | 35 +++++++++++++++++++++++++++++++++-- 7 files changed, 67 insertions(+), 6 deletions(-) create mode 100644 CHANGES/9822.feature.rst diff --git a/CHANGES/9822.feature.rst b/CHANGES/9822.feature.rst new file mode 100644 index 00000000000..f361c586499 --- /dev/null +++ b/CHANGES/9822.feature.rst @@ -0,0 +1 @@ +Added an :attr:`~aiohttp.abc.AbstractAccessLogger.enabled` property to :class:`aiohttp.abc.AbstractAccessLogger` to dynamically check if logging is enabled -- by :user:`bdraco`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index e56f18b528a..868f0e94898 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -231,6 +231,8 @@ async def write_headers( class AbstractAccessLogger(ABC): """Abstract writer to access log.""" + __slots__ = ("logger", "log_format") + def __init__(self, logger: logging.Logger, log_format: str) -> None: self.logger = logger self.log_format = log_format @@ -238,3 +240,8 @@ def __init__(self, logger: logging.Logger, log_format: str) -> None: @abstractmethod def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: """Emit log to logger.""" + + @property + def enabled(self) -> bool: + """Check if logger is enabled.""" + return True diff --git a/aiohttp/web_log.py b/aiohttp/web_log.py index 633e9e3ae6b..d5ea2beeb15 100644 --- a/aiohttp/web_log.py +++ b/aiohttp/web_log.py @@ -188,10 +188,13 @@ def _format_line( ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: return [(key, method(request, response, time)) for key, method in self._methods] + @property + def enabled(self) -> bool: + """Check if logger is enabled.""" + # Avoid formatting the log line if it will not be emitted. + return self.logger.isEnabledFor(logging.INFO) + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - if not self.logger.isEnabledFor(logging.INFO): - # Avoid formatting the log line if it will not be emitted. - return try: fmt_info = self._format_line(request, response, time) diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index f5e4a0c5b68..2201eef30ad 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -438,7 +438,7 @@ def force_close(self) -> None: def log_access( self, request: BaseRequest, response: StreamResponse, time: float ) -> None: - if self.access_logger is not None: + if self.access_logger is not None and self.access_logger.enabled: self.access_logger.log(request, response, self._loop.time() - time) def log_debug(self, *args: Any, **kw: Any) -> None: diff --git a/docs/abc.rst b/docs/abc.rst index 4eea6715991..6fe7d7a809e 100644 --- a/docs/abc.rst +++ b/docs/abc.rst @@ -182,6 +182,15 @@ Abstract Access Logger :param float time: Time taken to serve the request. + .. attribute:: enabled + + Return True if logger is enabled. + + Override this property if logging is disabled to avoid the + overhead of calculating details to feed the logger. + + This property may be omitted if logging is always enabled. + Abstract Resolver ------------------------------- diff --git a/docs/logging.rst b/docs/logging.rst index 916a7feff67..c415fa224ee 100644 --- a/docs/logging.rst +++ b/docs/logging.rst @@ -115,6 +115,16 @@ Example of a drop-in replacement for the default access logger:: f'"{request.method} {request.path} ' f'done in {time}s: {response.status}') + @property + def enabled(self): + """Return True if logger is enabled. + + Override this property if logging is disabled to avoid the + overhead of calculating details to feed the logger. + + This property may be omitted if logging is always enabled. + """ + return self.logger.isEnabledFor(logging.INFO) .. _gunicorn-accesslog: diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 543ccb9e556..0896c41c9e1 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -9,6 +9,7 @@ import aiohttp from aiohttp import web from aiohttp.abc import AbstractAccessLogger +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer from aiohttp.typedefs import Handler from aiohttp.web_log import AccessLogger @@ -215,8 +216,8 @@ def log(self, request, response, time): assert msg == "contextvars: uuid" -def test_logger_does_nothing_when_disabled(caplog: pytest.LogCaptureFixture) -> None: - """Test that the logger does nothing when the log level is disabled.""" +def test_access_logger_feeds_logger(caplog: pytest.LogCaptureFixture) -> None: + """Test that the logger still works.""" mock_logger = logging.getLogger("test.aiohttp.log") mock_logger.setLevel(logging.INFO) access_logger = AccessLogger(mock_logger, "%b") @@ -224,3 +225,33 @@ def test_logger_does_nothing_when_disabled(caplog: pytest.LogCaptureFixture) -> mock.Mock(name="mock_request"), mock.Mock(name="mock_response"), 42 ) assert "mock_response" in caplog.text + + +async def test_logger_does_not_log_when_not_enabled( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test logger does nothing when not enabled.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + class Logger(AbstractAccessLogger): + + def log( + self, request: web.BaseRequest, response: web.StreamResponse, time: float + ) -> None: + self.logger.critical("This should not be logged") # pragma: no cover + + @property + def enabled(self) -> bool: + return False + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, access_log_class=Logger) + client = await aiohttp_client(server) + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text From 9ef790224814e8f6d20c3e00cb0d9688a68daf68 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 16:12:02 +0000 Subject: [PATCH 0917/1511] [PR #9832/006f4070 backport][3.11] Increase allowed import time for Python 3.12/3.13 to 265 (#9834) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_imports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index 30d5a5d43f1..5a2bb76b03c 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -38,7 +38,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: # and even slower under pytest-xdist, especially in CI _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) if _IS_XDIST_RUN - else 250 + else 265 ), } _TARGET_TIMINGS_BY_PYTHON_VERSION["3.13"] = _TARGET_TIMINGS_BY_PYTHON_VERSION["3.12"] From f3dd0f9fece79dc3cd9d00e2ffddd49c36598361 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 16:49:03 +0000 Subject: [PATCH 0918/1511] [PR #9832/006f4070 backport][3.10] Increase allowed import time for Python 3.12/3.13 to 265 (#9833) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_imports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index f82eec41086..9d08b19a85d 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -38,7 +38,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: # and even slower under pytest-xdist, especially in CI _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) if _IS_XDIST_RUN - else 250 + else 265 ), } _TARGET_TIMINGS_BY_PYTHON_VERSION["3.13"] = _TARGET_TIMINGS_BY_PYTHON_VERSION["3.12"] From 0dfa21d4244f8e1e23cfa1406c4178a6565e9957 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 16:57:21 +0000 Subject: [PATCH 0919/1511] [PR #9835/32ccfc9a backport][3.11] Adjust client payload benchmarks to better represent real world cases (#9837) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 36 ++++++++++++++++----------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 7daddcf3db2..7292e4d647f 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -33,14 +33,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_2048_chunked_payload( +def test_one_hundred_get_requests_with_1024_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + """Benchmark 100 GET requests with a small payload of 1024 bytes.""" message_count = 100 - payload = b"a" * 2048 + payload = b"a" * 1024 async def handler(request: web.Request) -> web.Response: resp = web.Response(body=payload) @@ -62,14 +62,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_32768_chunked_payload( +def test_one_hundred_get_requests_with_30000_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 32768 bytes.""" + """Benchmark 100 GET requests with a payload of 30000 bytes.""" message_count = 100 - payload = b"a" * 32768 + payload = b"a" * 30000 async def handler(request: web.Request) -> web.Response: resp = web.Response(body=payload) @@ -91,14 +91,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_1mib_chunked_payload( +def test_one_hundred_get_requests_with_512kib_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + """Benchmark 100 GET requests with a payload of 512KiB.""" message_count = 100 - payload = b"a" * 1024**2 + payload = b"a" * (2**19) async def handler(request: web.Request) -> web.Response: resp = web.Response(body=payload) @@ -120,14 +120,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_2048_content_length_payload( +def test_one_hundred_get_requests_with_1024_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + """Benchmark 100 GET requests with a small payload of 1024 bytes.""" message_count = 100 - payload = b"a" * 2048 + payload = b"a" * 1024 headers = {hdrs.CONTENT_LENGTH: str(len(payload))} async def handler(request: web.Request) -> web.Response: @@ -148,14 +148,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_32768_content_length_payload( +def test_one_hundred_get_requests_with_30000_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 32768 bytes.""" + """Benchmark 100 GET requests with a payload of 30000 bytes.""" message_count = 100 - payload = b"a" * 32768 + payload = b"a" * 30000 headers = {hdrs.CONTENT_LENGTH: str(len(payload))} async def handler(request: web.Request) -> web.Response: @@ -176,14 +176,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_1mib_content_length_payload( +def test_one_hundred_get_requests_with_512kib_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + """Benchmark 100 GET requests with a payload of 512KiB.""" message_count = 100 - payload = b"a" * 1024**2 + payload = b"a" * (2**19) headers = {hdrs.CONTENT_LENGTH: str(len(payload))} async def handler(request: web.Request) -> web.Response: From 4f4b90fef082fbb37395c394d68ee0ab3fcbc7e6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 17:50:30 +0000 Subject: [PATCH 0920/1511] [PR #9835/32ccfc9a backport][3.10] Adjust client payload benchmarks to better represent real world cases (#9836) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 36 ++++++++++++++++----------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 7daddcf3db2..7292e4d647f 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -33,14 +33,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_2048_chunked_payload( +def test_one_hundred_get_requests_with_1024_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + """Benchmark 100 GET requests with a small payload of 1024 bytes.""" message_count = 100 - payload = b"a" * 2048 + payload = b"a" * 1024 async def handler(request: web.Request) -> web.Response: resp = web.Response(body=payload) @@ -62,14 +62,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_32768_chunked_payload( +def test_one_hundred_get_requests_with_30000_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 32768 bytes.""" + """Benchmark 100 GET requests with a payload of 30000 bytes.""" message_count = 100 - payload = b"a" * 32768 + payload = b"a" * 30000 async def handler(request: web.Request) -> web.Response: resp = web.Response(body=payload) @@ -91,14 +91,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_1mib_chunked_payload( +def test_one_hundred_get_requests_with_512kib_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + """Benchmark 100 GET requests with a payload of 512KiB.""" message_count = 100 - payload = b"a" * 1024**2 + payload = b"a" * (2**19) async def handler(request: web.Request) -> web.Response: resp = web.Response(body=payload) @@ -120,14 +120,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_2048_content_length_payload( +def test_one_hundred_get_requests_with_1024_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a small payload of 2048 bytes.""" + """Benchmark 100 GET requests with a small payload of 1024 bytes.""" message_count = 100 - payload = b"a" * 2048 + payload = b"a" * 1024 headers = {hdrs.CONTENT_LENGTH: str(len(payload))} async def handler(request: web.Request) -> web.Response: @@ -148,14 +148,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_32768_content_length_payload( +def test_one_hundred_get_requests_with_30000_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 32768 bytes.""" + """Benchmark 100 GET requests with a payload of 30000 bytes.""" message_count = 100 - payload = b"a" * 32768 + payload = b"a" * 30000 headers = {hdrs.CONTENT_LENGTH: str(len(payload))} async def handler(request: web.Request) -> web.Response: @@ -176,14 +176,14 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_one_hundred_get_requests_with_1mib_content_length_payload( +def test_one_hundred_get_requests_with_512kib_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 1MiB bytes.""" + """Benchmark 100 GET requests with a payload of 512KiB.""" message_count = 100 - payload = b"a" * 1024**2 + payload = b"a" * (2**19) headers = {hdrs.CONTENT_LENGTH: str(len(payload))} async def handler(request: web.Request) -> web.Response: From 3b1c76c0977f96dfa8a734d431c9b6936d0346a9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:46:50 +0000 Subject: [PATCH 0921/1511] [PR #9840/cc5fa316 backport][3.11] Add benchmark for sending compressed payload with chunks (#9842) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 35 +++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 7292e4d647f..2712b52f046 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -120,6 +120,41 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_five_get_requests_with_567296_compressed_chunked_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 5 compressed GET requests with a payload of 567296.""" + message_count = 5 + # This payload compresses poorly to ~567296 bytes. + payload = ( + bytes(range(0, 256)) + + bytes(range(255, 0, -1)) + + bytes(range(0, 128)) + + bytes(range(255, 0, -1)) + ) * 1024 + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=payload) + resp.enable_compression() + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_get_requests_with_1024_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From 68a1f42af90a5beae28c8617e0dfc15c3bd5153c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:59:50 +0000 Subject: [PATCH 0922/1511] [PR #9840/cc5fa316 backport][3.10] Add benchmark for sending compressed payload with chunks (#9841) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 35 +++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 7292e4d647f..2712b52f046 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -120,6 +120,41 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_five_get_requests_with_567296_compressed_chunked_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 5 compressed GET requests with a payload of 567296.""" + message_count = 5 + # This payload compresses poorly to ~567296 bytes. + payload = ( + bytes(range(0, 256)) + + bytes(range(255, 0, -1)) + + bytes(range(0, 128)) + + bytes(range(255, 0, -1)) + ) * 1024 + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=payload) + resp.enable_compression() + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + await resp.read() + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_get_requests_with_1024_content_length_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From e5917cd3480b01e7527b6524f9bec954325e1d5f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 19:05:11 -0600 Subject: [PATCH 0923/1511] [PR #9844/fabf3884 backport][3.10] Fix compressed get request benchmark payload length (#9845) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 2712b52f046..75fc7f7ed4e 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -120,23 +120,23 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_five_get_requests_with_567296_compressed_chunked_payload( +def test_get_request_with_251308_compressed_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 5 compressed GET requests with a payload of 567296.""" - message_count = 5 - # This payload compresses poorly to ~567296 bytes. - payload = ( - bytes(range(0, 256)) - + bytes(range(255, 0, -1)) - + bytes(range(0, 128)) - + bytes(range(255, 0, -1)) - ) * 1024 + """Benchmark compressed GET requests with a payload of 251308.""" + # This payload compresses to 251308 bytes + payload = b"".join( + [ + bytes((*range(0, i), *range(i, 0, -1))) + for _ in range(255) + for i in range(255) + ] + ) async def handler(request: web.Request) -> web.Response: - resp = web.Response(body=payload) + resp = web.Response(body=payload, zlib_executor_size=16384) resp.enable_compression() return resp @@ -145,9 +145,8 @@ async def handler(request: web.Request) -> web.Response: async def run_client_benchmark() -> None: client = await aiohttp_client(app) - for _ in range(message_count): - resp = await client.get("/") - await resp.read() + resp = await client.get("/") + await resp.read() await client.close() @benchmark From c39032b852d7a5613de89333c8d3495b92932953 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 13 Nov 2024 01:09:55 +0000 Subject: [PATCH 0924/1511] [PR #9844/fabf3884 backport][3.11] Fix compressed get request benchmark payload length (#9846) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 2712b52f046..75fc7f7ed4e 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -120,23 +120,23 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) -def test_five_get_requests_with_567296_compressed_chunked_payload( +def test_get_request_with_251308_compressed_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 5 compressed GET requests with a payload of 567296.""" - message_count = 5 - # This payload compresses poorly to ~567296 bytes. - payload = ( - bytes(range(0, 256)) - + bytes(range(255, 0, -1)) - + bytes(range(0, 128)) - + bytes(range(255, 0, -1)) - ) * 1024 + """Benchmark compressed GET requests with a payload of 251308.""" + # This payload compresses to 251308 bytes + payload = b"".join( + [ + bytes((*range(0, i), *range(i, 0, -1))) + for _ in range(255) + for i in range(255) + ] + ) async def handler(request: web.Request) -> web.Response: - resp = web.Response(body=payload) + resp = web.Response(body=payload, zlib_executor_size=16384) resp.enable_compression() return resp @@ -145,9 +145,8 @@ async def handler(request: web.Request) -> web.Response: async def run_client_benchmark() -> None: client = await aiohttp_client(app) - for _ in range(message_count): - resp = await client.get("/") - await resp.read() + resp = await client.get("/") + await resp.read() await client.close() @benchmark From 158bf304bdd8047eec192540fa5bf7fe3862bffd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 12 Nov 2024 19:30:45 -0600 Subject: [PATCH 0925/1511] Release 3.10.11rc0 (#9848) --- CHANGES.rst | 94 +++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 95 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 0245204fe5e..3a3d9880541 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,100 @@ .. towncrier release notes start +3.10.11rc0 (2024-11-12) +======================= + +Bug fixes +--------- + +- Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. + + + *Related issues and pull requests on GitHub:* + :issue:`9436`. + + + +- Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. + + + *Related issues and pull requests on GitHub:* + :issue:`9506`. + + + +- Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. + + The connector was not cancellation-safe. + + + *Related issues and pull requests on GitHub:* + :issue:`9670`, :issue:`9671`. + + + +- Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9686`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. + + If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. + + + *Related issues and pull requests on GitHub:* + :issue:`9600`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9470`. + + + +- Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9485`. + + + +- Improved performance of serializing HTTP headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9603`. + + + +- Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9726`, :issue:`9736`. + + + + +---- + + 3.10.10 (2024-10-10) ==================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 3c08b041af9..d092b00cc7a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.11.dev0" +__version__ = "3.10.11rc0" from typing import TYPE_CHECKING, Tuple From 354489d2d4d2665253bb0b387d08d02dd5d3ad4f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 12 Nov 2024 19:35:47 -0600 Subject: [PATCH 0926/1511] [PR #9839/a9a0d84 backport][3.11] Implement zero copy writes in `StreamWriter` (#9847) --- CHANGES/9839.misc.rst | 1 + aiohttp/http_writer.py | 68 ++++++++++++----- tests/test_client_request.py | 13 ++-- tests/test_http_writer.py | 143 ++++++++++++++++++++++++++++++++--- 4 files changed, 191 insertions(+), 34 deletions(-) create mode 100644 CHANGES/9839.misc.rst diff --git a/CHANGES/9839.misc.rst b/CHANGES/9839.misc.rst new file mode 100644 index 00000000000..8bdd50268a7 --- /dev/null +++ b/CHANGES/9839.misc.rst @@ -0,0 +1 @@ +Implemented zero copy writes for ``StreamWriter`` -- by :user:`bdraco`. diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index a1a9860b48d..c6c80edc3c4 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -2,7 +2,16 @@ import asyncio import zlib -from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa +from typing import ( # noqa + Any, + Awaitable, + Callable, + Iterable, + List, + NamedTuple, + Optional, + Union, +) from multidict import CIMultiDict @@ -76,6 +85,17 @@ def _write(self, chunk: bytes) -> None: raise ClientConnectionResetError("Cannot write to closing transport") transport.write(chunk) + def _writelines(self, chunks: Iterable[bytes]) -> None: + size = 0 + for chunk in chunks: + size += len(chunk) + self.buffer_size += size + self.output_size += size + transport = self._protocol.transport + if transport is None or transport.is_closing(): + raise ClientConnectionResetError("Cannot write to closing transport") + transport.writelines(chunks) + async def write( self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 ) -> None: @@ -110,10 +130,11 @@ async def write( if chunk: if self.chunked: - chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len_pre + chunk + b"\r\n" - - self._write(chunk) + self._writelines( + (f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n") + ) + else: + self._write(chunk) if self.buffer_size > LIMIT and drain: self.buffer_size = 0 @@ -142,22 +163,31 @@ async def write_eof(self, chunk: bytes = b"") -> None: await self._on_chunk_sent(chunk) if self._compress: - if chunk: - chunk = await self._compress.compress(chunk) + chunks: List[bytes] = [] + chunks_len = 0 + if chunk and (compressed_chunk := await self._compress.compress(chunk)): + chunks_len = len(compressed_chunk) + chunks.append(compressed_chunk) - chunk += self._compress.flush() - if chunk and self.chunked: - chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" - else: - if self.chunked: - if chunk: - chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" - else: - chunk = b"0\r\n\r\n" + flush_chunk = self._compress.flush() + chunks_len += len(flush_chunk) + chunks.append(flush_chunk) + assert chunks_len - if chunk: + if self.chunked: + chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii") + self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n")) + elif len(chunks) > 1: + self._writelines(chunks) + else: + self._write(chunks[0]) + elif self.chunked: + if chunk: + chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii") + self._writelines((chunk_len_pre, chunk, b"\r\n0\r\n\r\n")) + else: + self._write(b"0\r\n\r\n") + elif chunk: self._write(chunk) await self.drain() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 8947aa38944..870c9666f34 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -6,7 +6,7 @@ import urllib.parse import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict, Iterable, Optional from unittest import mock import pytest @@ -67,17 +67,18 @@ def protocol(loop, transport): @pytest.fixture -def transport(buf): - transport = mock.Mock() +def transport(buf: bytearray) -> mock.Mock: + transport = mock.create_autospec(asyncio.Transport, spec_set=True, instance=True) def write(chunk): buf.extend(chunk) - async def write_eof(): - pass + def writelines(chunks: Iterable[bytes]) -> None: + for chunk in chunks: + buf.extend(chunk) transport.write.side_effect = write - transport.write_eof.side_effect = write_eof + transport.writelines.side_effect = writelines transport.is_closing.return_value = False return transport diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index d330da48df7..e43b448bc0f 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -1,6 +1,8 @@ # Tests for aiohttp/http_writer.py import array import asyncio +import zlib +from typing import Iterable from unittest import mock import pytest @@ -23,7 +25,12 @@ def transport(buf): def write(chunk): buf.extend(chunk) + def writelines(chunks: Iterable[bytes]) -> None: + for chunk in chunks: + buf.extend(chunk) + transport.write.side_effect = write + transport.writelines.side_effect = writelines transport.is_closing.return_value = False return transport @@ -85,21 +92,53 @@ async def test_write_payload_length(protocol, transport, loop) -> None: assert b"da" == content.split(b"\r\n\r\n", 1)[-1] -async def test_write_payload_chunked_filter(protocol, transport, loop) -> None: - write = transport.write = mock.Mock() +async def test_write_large_payload_deflate_compression_data_in_eof( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + + await msg.write(b"data" * 4096) + assert transport.write.called # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + transport.write.reset_mock() # type: ignore[attr-defined] + assert not transport.writelines.called # type: ignore[attr-defined] + # This payload compresses to 20447 bytes + payload = b"".join( + [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] + ) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + assert transport.writelines.called # type: ignore[attr-defined] + chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + content = b"".join(chunks) + assert zlib.decompress(content) == (b"data" * 4096) + payload + + +async def test_write_payload_chunked_filter( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: msg = http.StreamWriter(protocol, loop) msg.enable_chunking() await msg.write(b"da") await msg.write(b"ta") await msg.write_eof() - content = b"".join([c[1][0] for c in list(write.mock_calls)]) + content = b"".join([b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)]) # type: ignore[attr-defined] + content += b"".join([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] assert content.endswith(b"2\r\nda\r\n2\r\nta\r\n0\r\n\r\n") -async def test_write_payload_chunked_filter_mutiple_chunks(protocol, transport, loop): - write = transport.write = mock.Mock() +async def test_write_payload_chunked_filter_multiple_chunks( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: msg = http.StreamWriter(protocol, loop) msg.enable_chunking() await msg.write(b"da") @@ -108,14 +147,14 @@ async def test_write_payload_chunked_filter_mutiple_chunks(protocol, transport, await msg.write(b"at") await msg.write(b"a2") await msg.write_eof() - content = b"".join([c[1][0] for c in list(write.mock_calls)]) + content = b"".join([b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)]) # type: ignore[attr-defined] + content += b"".join([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] assert content.endswith( b"2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n2\r\na2\r\n0\r\n\r\n" ) async def test_write_payload_deflate_compression(protocol, transport, loop) -> None: - COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b" write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, loop) @@ -129,7 +168,30 @@ async def test_write_payload_deflate_compression(protocol, transport, loop) -> N assert COMPRESSED == content.split(b"\r\n\r\n", 1)[-1] -async def test_write_payload_deflate_and_chunked(buf, protocol, transport, loop): +async def test_write_payload_deflate_compression_chunked( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + expected = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof() + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert content == expected + + +async def test_write_payload_deflate_and_chunked( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: msg = http.StreamWriter(protocol, loop) msg.enable_compression("deflate") msg.enable_chunking() @@ -142,8 +204,71 @@ async def test_write_payload_deflate_and_chunked(buf, protocol, transport, loop) assert thing == buf -async def test_write_payload_bytes_memoryview(buf, protocol, transport, loop): +async def test_write_payload_deflate_compression_chunked_data_in_eof( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + expected = b"2\r\nx\x9c\r\nd\r\nKI,IL\xcdK\x01\x00\x0b@\x02\xd2\r\n0\r\n\r\n" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof(b"end") + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert content == expected + + +async def test_write_large_payload_deflate_compression_chunked_data_in_eof( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + + await msg.write(b"data" * 4096) + # This payload compresses to 1111 bytes + payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + chunks = [] + for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] + chunked_payload = list(write_lines_call[1][0])[1:] + chunked_payload.pop() + chunks.extend(chunked_payload) + + assert all(chunks) + content = b"".join(chunks) + assert zlib.decompress(content) == (b"data" * 4096) + payload + + +async def test_write_payload_deflate_compression_chunked_connection_lost( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + with pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ), mock.patch.object(transport, "is_closing", return_value=True): + await msg.write_eof(b"end") + + +async def test_write_payload_bytes_memoryview( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: msg = http.StreamWriter(protocol, loop) mv = memoryview(b"abcd") From fe0a6e6ee42c8647d95e646b252ef326848b23a9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 12 Nov 2024 19:52:15 -0600 Subject: [PATCH 0927/1511] Release 3.11.0rc2 (#9849) --- CHANGES.rst | 26 +++++++++++++++++++++++++- aiohttp/__init__.py | 2 +- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 30996a47fba..1dd34019404 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,7 +10,7 @@ .. towncrier release notes start -3.11.0rc1 (2024-11-10) +3.11.0rc2 (2024-11-12) ====================== Bug fixes @@ -180,6 +180,14 @@ Features +- Added an :attr:`~aiohttp.abc.AbstractAccessLogger.enabled` property to :class:`aiohttp.abc.AbstractAccessLogger` to dynamically check if logging is enabled -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9822`. + + + Deprecations (removal in next major release) -------------------------------------------- @@ -417,6 +425,22 @@ Miscellaneous internal changes +- Improved performance of the ``WebsocketWriter`` when the protocol is not paused -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9796`. + + + +- Implemented zero copy writes for ``StreamWriter`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9839`. + + + ---- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 3efa6ffe54b..62501256a2c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0rc1" +__version__ = "3.11.0rc2" from typing import TYPE_CHECKING, Tuple From d24c19e3b328517cba8a856c61f3d079614a5a5a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 08:39:22 -0600 Subject: [PATCH 0928/1511] [PR #9851/541d86d backport][3.11] Fix incorrect parsing of chunk extensions with the pure Python parser (#9854) --- CHANGES/9851.bugfix.rst | 1 + aiohttp/http_parser.py | 7 ++++++ tests/test_http_parser.py | 50 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9851.bugfix.rst diff --git a/CHANGES/9851.bugfix.rst b/CHANGES/9851.bugfix.rst new file mode 100644 index 00000000000..02541a92dd4 --- /dev/null +++ b/CHANGES/9851.bugfix.rst @@ -0,0 +1 @@ +Fixed incorrect parsing of chunk extensions with the pure Python parser -- by :user:`bdraco`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index c20806841e7..148a30b2ca1 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -845,6 +845,13 @@ def feed_data( i = chunk.find(CHUNK_EXT, 0, pos) if i >= 0: size_b = chunk[:i] # strip chunk-extensions + # Verify no LF in the chunk-extension + if b"\n" in (ext := chunk[i:pos]): + exc = BadHttpMessage( + f"Unexpected LF in chunk-extension: {ext!r}" + ) + set_exception(self.payload, exc) + raise exc else: size_b = chunk[:pos] diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 2524bf34b3a..a8305da84f7 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1478,7 +1478,55 @@ async def test_parse_chunked_payload_split_chunks(response: Any) -> None: assert await reader.read() == b"firstsecond" -def test_partial_url(parser: Any) -> None: +@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") +async def test_parse_chunked_payload_with_lf_in_extensions_c_parser( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> None: + """Test the C-parser with a chunked payload that has a LF in the chunk extensions.""" + # The C parser will raise a BadHttpMessage from feed_data + parser = HttpRequestParserC( + protocol, + loop, + 2**16, + max_line_size=8190, + max_field_size=8190, + ) + payload = ( + b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n" + b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n" + ) + with pytest.raises(http_exceptions.BadHttpMessage, match="\\\\nxx"): + parser.feed_data(payload) + + +async def test_parse_chunked_payload_with_lf_in_extensions_py_parser( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> None: + """Test the py-parser with a chunked payload that has a LF in the chunk extensions.""" + # The py parser will not raise the BadHttpMessage directly, but instead + # it will set the exception on the StreamReader. + parser = HttpRequestParserPy( + protocol, + loop, + 2**16, + max_line_size=8190, + max_field_size=8190, + ) + payload = ( + b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n" + b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n" + ) + messages, _, _ = parser.feed_data(payload) + reader = messages[0][1] + assert isinstance(reader.exception(), http_exceptions.BadHttpMessage) + assert "\\nxx" in str(reader.exception()) + + +def test_partial_url(parser: HttpRequestParser) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(b"st HTTP/1.1\r\n\r\n") From bc15db61615079d1b6327ba42c682f758fa96936 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 08:44:23 -0600 Subject: [PATCH 0929/1511] [PR #9852/249855a backport][3.10] Fix system routes polluting the middleware cache (#9855) --- CHANGES/9852.bugfix.rst | 1 + aiohttp/web_app.py | 14 +++++++++++--- tests/test_web_middleware.py | 29 +++++++++++++++++++++++++++-- 3 files changed, 39 insertions(+), 5 deletions(-) create mode 100644 CHANGES/9852.bugfix.rst diff --git a/CHANGES/9852.bugfix.rst b/CHANGES/9852.bugfix.rst new file mode 100644 index 00000000000..b459d08478b --- /dev/null +++ b/CHANGES/9852.bugfix.rst @@ -0,0 +1 @@ +Fixed system routes polluting the middleware cache -- by :user:`bdraco`. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 78b1a67bacc..81a84833532 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -54,6 +54,7 @@ MaskDomain, MatchedSubAppResource, PrefixedSubAppResource, + SystemRoute, UrlDispatcher, ) @@ -79,7 +80,6 @@ _Resource = TypeVar("_Resource", bound=AbstractResource) -@lru_cache(None) def _build_middlewares( handler: Handler, apps: Tuple["Application", ...] ) -> Callable[[Request], Awaitable[StreamResponse]]: @@ -90,6 +90,9 @@ def _build_middlewares( return handler +_cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares) + + class Application(MutableMapping[Union[str, AppKey[Any]], Any]): ATTRS = frozenset( [ @@ -544,8 +547,13 @@ async def _handle(self, request: Request) -> StreamResponse: handler = match_info.handler if self._run_middlewares: - if not self._has_legacy_middlewares: - handler = _build_middlewares(handler, match_info.apps) + # If its a SystemRoute, don't cache building the middlewares since + # they are constructed for every MatchInfoError as a new handler + # is made each time. + if not self._has_legacy_middlewares and not isinstance( + match_info.route, SystemRoute + ): + handler = _cached_build_middleware(handler, match_info.apps) else: for app in match_info.apps[::-1]: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py index 9c4462be409..13acc589da9 100644 --- a/tests/test_web_middleware.py +++ b/tests/test_web_middleware.py @@ -1,10 +1,11 @@ import re -from typing import Any +from typing import Any, NoReturn import pytest from yarl import URL -from aiohttp import web +from aiohttp import web, web_app +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.typedefs import Handler @@ -520,3 +521,27 @@ async def call(self, request, handler: Handler): assert 201 == resp.status txt = await resp.text() assert "OK[new style middleware]" == txt + + +async def test_middleware_does_not_leak(aiohttp_client: AiohttpClient) -> None: + async def any_handler(request: web.Request) -> NoReturn: + assert False + + class Middleware: + @web.middleware + async def call( + self, request: web.Request, handler: Handler + ) -> web.StreamResponse: + return await handler(request) + + app = web.Application() + app.router.add_route("POST", "/any", any_handler) + app.middlewares.append(Middleware().call) + + client = await aiohttp_client(app) + + web_app._cached_build_middleware.cache_clear() + for _ in range(10): + resp = await client.get("/any") + assert resp.status == 405 + assert web_app._cached_build_middleware.cache_info().currsize < 10 From 259edc369075de63e6f3a4eaade058c62af0df71 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 08:50:36 -0600 Subject: [PATCH 0930/1511] [PR #9851/541d86d backport][3.10] Fix incorrect parsing of chunk extensions with the pure Python parser (#9853) --- CHANGES/9851.bugfix.rst | 1 + aiohttp/http_parser.py | 7 ++++++ tests/test_http_parser.py | 51 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9851.bugfix.rst diff --git a/CHANGES/9851.bugfix.rst b/CHANGES/9851.bugfix.rst new file mode 100644 index 00000000000..02541a92dd4 --- /dev/null +++ b/CHANGES/9851.bugfix.rst @@ -0,0 +1 @@ +Fixed incorrect parsing of chunk extensions with the pure Python parser -- by :user:`bdraco`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 686a2d02e28..9fc7e8a2c8b 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -845,6 +845,13 @@ def feed_data( i = chunk.find(CHUNK_EXT, 0, pos) if i >= 0: size_b = chunk[:i] # strip chunk-extensions + # Verify no LF in the chunk-extension + if b"\n" in (ext := chunk[i:pos]): + exc = BadHttpMessage( + f"Unexpected LF in chunk-extension: {ext!r}" + ) + set_exception(self.payload, exc) + raise exc else: size_b = chunk[:pos] diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 09f4f0746a5..c74c1697e65 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -13,6 +13,7 @@ import aiohttp from aiohttp import http_exceptions, streams +from aiohttp.base_protocol import BaseProtocol from aiohttp.http_parser import ( NO_EXTENSIONS, DeflateBuffer, @@ -1477,7 +1478,55 @@ async def test_parse_chunked_payload_split_chunks(response: Any) -> None: assert await reader.read() == b"firstsecond" -def test_partial_url(parser: Any) -> None: +@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") +async def test_parse_chunked_payload_with_lf_in_extensions_c_parser( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> None: + """Test the C-parser with a chunked payload that has a LF in the chunk extensions.""" + # The C parser will raise a BadHttpMessage from feed_data + parser = HttpRequestParserC( + protocol, + loop, + 2**16, + max_line_size=8190, + max_field_size=8190, + ) + payload = ( + b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n" + b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n" + ) + with pytest.raises(http_exceptions.BadHttpMessage, match="\\\\nxx"): + parser.feed_data(payload) + + +async def test_parse_chunked_payload_with_lf_in_extensions_py_parser( + loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +) -> None: + """Test the py-parser with a chunked payload that has a LF in the chunk extensions.""" + # The py parser will not raise the BadHttpMessage directly, but instead + # it will set the exception on the StreamReader. + parser = HttpRequestParserPy( + protocol, + loop, + 2**16, + max_line_size=8190, + max_field_size=8190, + ) + payload = ( + b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n" + b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n" + b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n" + ) + messages, _, _ = parser.feed_data(payload) + reader = messages[0][1] + assert isinstance(reader.exception(), http_exceptions.BadHttpMessage) + assert "\\nxx" in str(reader.exception()) + + +def test_partial_url(parser: HttpRequestParser) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(b"st HTTP/1.1\r\n\r\n") From 2fa8bcd6b5438b0cb7912b35ff853493bdaebc1b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 08:56:19 -0600 Subject: [PATCH 0931/1511] [PR #9852/249855a backport][3.11] Fix system routes polluting the middleware cache (#9856) --- CHANGES/9852.bugfix.rst | 1 + aiohttp/web_app.py | 16 ++++++++++++---- tests/test_web_middleware.py | 29 +++++++++++++++++++++++++++-- 3 files changed, 40 insertions(+), 6 deletions(-) create mode 100644 CHANGES/9852.bugfix.rst diff --git a/CHANGES/9852.bugfix.rst b/CHANGES/9852.bugfix.rst new file mode 100644 index 00000000000..b459d08478b --- /dev/null +++ b/CHANGES/9852.bugfix.rst @@ -0,0 +1 @@ +Fixed system routes polluting the middleware cache -- by :user:`bdraco`. diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index c29f32df413..5d542ab9222 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -1,7 +1,7 @@ import asyncio import logging import warnings -from functools import cache, partial, update_wrapper +from functools import lru_cache, partial, update_wrapper from typing import ( TYPE_CHECKING, Any, @@ -54,6 +54,7 @@ MaskDomain, MatchedSubAppResource, PrefixedSubAppResource, + SystemRoute, UrlDispatcher, ) @@ -79,7 +80,6 @@ _Resource = TypeVar("_Resource", bound=AbstractResource) -@cache def _build_middlewares( handler: Handler, apps: Tuple["Application", ...] ) -> Callable[[Request], Awaitable[StreamResponse]]: @@ -90,6 +90,9 @@ def _build_middlewares( return handler +_cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares) + + class Application(MutableMapping[Union[str, AppKey[Any]], Any]): ATTRS = frozenset( [ @@ -544,8 +547,13 @@ async def _handle(self, request: Request) -> StreamResponse: handler = match_info.handler if self._run_middlewares: - if not self._has_legacy_middlewares: - handler = _build_middlewares(handler, match_info.apps) + # If its a SystemRoute, don't cache building the middlewares since + # they are constructed for every MatchInfoError as a new handler + # is made each time. + if not self._has_legacy_middlewares and not isinstance( + match_info.route, SystemRoute + ): + handler = _cached_build_middleware(handler, match_info.apps) else: for app in match_info.apps[::-1]: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py index 9c4462be409..13acc589da9 100644 --- a/tests/test_web_middleware.py +++ b/tests/test_web_middleware.py @@ -1,10 +1,11 @@ import re -from typing import Any +from typing import Any, NoReturn import pytest from yarl import URL -from aiohttp import web +from aiohttp import web, web_app +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.typedefs import Handler @@ -520,3 +521,27 @@ async def call(self, request, handler: Handler): assert 201 == resp.status txt = await resp.text() assert "OK[new style middleware]" == txt + + +async def test_middleware_does_not_leak(aiohttp_client: AiohttpClient) -> None: + async def any_handler(request: web.Request) -> NoReturn: + assert False + + class Middleware: + @web.middleware + async def call( + self, request: web.Request, handler: Handler + ) -> web.StreamResponse: + return await handler(request) + + app = web.Application() + app.router.add_route("POST", "/any", any_handler) + app.middlewares.append(Middleware().call) + + client = await aiohttp_client(app) + + web_app._cached_build_middleware.cache_clear() + for _ in range(10): + resp = await client.get("/any") + assert resp.status == 405 + assert web_app._cached_build_middleware.cache_info().currsize < 10 From beb7b740533b81d75706e6615f07d92fcbf1c325 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 09:07:02 -0600 Subject: [PATCH 0932/1511] Release 3.10.11 (#9857) --- CHANGES.rst | 220 ++++++++++++++++++++++++++++++++++++++ CHANGES/9436.bugfix.rst | 1 - CHANGES/9470.misc.rst | 1 - CHANGES/9485.misc.rst | 1 - CHANGES/9506.bugfix.rst | 1 - CHANGES/9600.breaking.rst | 3 - CHANGES/9603.misc.rst | 1 - CHANGES/9670.bugfix.rst | 1 - CHANGES/9671.bugfix.rst | 3 - CHANGES/9686.bugfix.rst | 1 - CHANGES/9726.misc.rst | 1 - CHANGES/9736.misc.rst | 1 - CHANGES/9851.bugfix.rst | 1 - CHANGES/9852.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 15 files changed, 221 insertions(+), 18 deletions(-) delete mode 100644 CHANGES/9436.bugfix.rst delete mode 100644 CHANGES/9470.misc.rst delete mode 100644 CHANGES/9485.misc.rst delete mode 100644 CHANGES/9506.bugfix.rst delete mode 100644 CHANGES/9600.breaking.rst delete mode 100644 CHANGES/9603.misc.rst delete mode 120000 CHANGES/9670.bugfix.rst delete mode 100644 CHANGES/9671.bugfix.rst delete mode 100644 CHANGES/9686.bugfix.rst delete mode 100644 CHANGES/9726.misc.rst delete mode 120000 CHANGES/9736.misc.rst delete mode 100644 CHANGES/9851.bugfix.rst delete mode 100644 CHANGES/9852.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 3a3d9880541..9fb01cac5fa 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,226 @@ .. towncrier release notes start +3.10.11 (2024-11-13) +==================== + +Bug fixes +--------- + +- Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. + + + *Related issues and pull requests on GitHub:* + :issue:`9436`. + + + +- Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. + + + *Related issues and pull requests on GitHub:* + :issue:`9506`. + + + +- Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. + + The connector was not cancellation-safe. + + + *Related issues and pull requests on GitHub:* + :issue:`9670`, :issue:`9671`. + + + +- Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9686`. + + + +- Fixed incorrect parsing of chunk extensions with the pure Python parser -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9851`. + + + +- Fixed system routes polluting the middleware cache -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9852`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. + + If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. + + + *Related issues and pull requests on GitHub:* + :issue:`9600`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9470`. + + + +- Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9485`. + + + +- Improved performance of serializing HTTP headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9603`. + + + +- Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9726`, :issue:`9736`. + + + + +---- + + +3.10.11rc0 (2024-11-13) +======================= + +Bug fixes +--------- + +- Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. + + + *Related issues and pull requests on GitHub:* + :issue:`9436`. + + + +- Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. + + + *Related issues and pull requests on GitHub:* + :issue:`9506`. + + + +- Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. + + The connector was not cancellation-safe. + + + *Related issues and pull requests on GitHub:* + :issue:`9670`, :issue:`9671`. + + + +- Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9686`. + + + +- Fixed incorrect parsing of chunk extensions with the pure Python parser -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9851`. + + + +- Fixed system routes polluting the middleware cache -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9852`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. + + If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. + + + *Related issues and pull requests on GitHub:* + :issue:`9600`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9470`. + + + +- Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9485`. + + + +- Improved performance of serializing HTTP headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9603`. + + + +- Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9726`, :issue:`9736`. + + + + +---- + + 3.10.11rc0 (2024-11-12) ======================= diff --git a/CHANGES/9436.bugfix.rst b/CHANGES/9436.bugfix.rst deleted file mode 100644 index 7bd7fbcfe28..00000000000 --- a/CHANGES/9436.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. diff --git a/CHANGES/9470.misc.rst b/CHANGES/9470.misc.rst deleted file mode 100644 index c363a0f8cfe..00000000000 --- a/CHANGES/9470.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. diff --git a/CHANGES/9485.misc.rst b/CHANGES/9485.misc.rst deleted file mode 100644 index bb0978abd46..00000000000 --- a/CHANGES/9485.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. diff --git a/CHANGES/9506.bugfix.rst b/CHANGES/9506.bugfix.rst deleted file mode 100644 index 05ad6a59375..00000000000 --- a/CHANGES/9506.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. diff --git a/CHANGES/9600.breaking.rst b/CHANGES/9600.breaking.rst deleted file mode 100644 index 5997344e4cd..00000000000 --- a/CHANGES/9600.breaking.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. - -If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. diff --git a/CHANGES/9603.misc.rst b/CHANGES/9603.misc.rst deleted file mode 100644 index 8a27657cdb9..00000000000 --- a/CHANGES/9603.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of serializing HTTP headers -- by :user:`bdraco`. diff --git a/CHANGES/9670.bugfix.rst b/CHANGES/9670.bugfix.rst deleted file mode 120000 index b0411a405a0..00000000000 --- a/CHANGES/9670.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -9671.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9671.bugfix.rst b/CHANGES/9671.bugfix.rst deleted file mode 100644 index d2ca2e8ccb5..00000000000 --- a/CHANGES/9671.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. - -The connector was not cancellation-safe. diff --git a/CHANGES/9686.bugfix.rst b/CHANGES/9686.bugfix.rst deleted file mode 100644 index 397fb75ba77..00000000000 --- a/CHANGES/9686.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. diff --git a/CHANGES/9726.misc.rst b/CHANGES/9726.misc.rst deleted file mode 100644 index 460c48b7995..00000000000 --- a/CHANGES/9726.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. diff --git a/CHANGES/9736.misc.rst b/CHANGES/9736.misc.rst deleted file mode 120000 index 98c0ac8ac1d..00000000000 --- a/CHANGES/9736.misc.rst +++ /dev/null @@ -1 +0,0 @@ -9726.misc.rst \ No newline at end of file diff --git a/CHANGES/9851.bugfix.rst b/CHANGES/9851.bugfix.rst deleted file mode 100644 index 02541a92dd4..00000000000 --- a/CHANGES/9851.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed incorrect parsing of chunk extensions with the pure Python parser -- by :user:`bdraco`. diff --git a/CHANGES/9852.bugfix.rst b/CHANGES/9852.bugfix.rst deleted file mode 100644 index b459d08478b..00000000000 --- a/CHANGES/9852.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed system routes polluting the middleware cache -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index d092b00cc7a..4fc7908843a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.10.11rc0" +__version__ = "3.10.11" from typing import TYPE_CHECKING, Tuple From 3e09325e4839117df13fbac301f360edf8d3a0ee Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 09:18:16 -0600 Subject: [PATCH 0933/1511] Remove 3.10.11rc0 from 3.10 changelog (#9858) --- CHANGES.rst | 204 ---------------------------------------------------- 1 file changed, 204 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9fb01cac5fa..b1112737e98 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -116,213 +116,9 @@ Miscellaneous internal changes - ----- - - -3.10.11rc0 (2024-11-13) -======================= - -Bug fixes ---------- - -- Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. - - - *Related issues and pull requests on GitHub:* - :issue:`9436`. - - - -- Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. - - - *Related issues and pull requests on GitHub:* - :issue:`9506`. - - - -- Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. - - The connector was not cancellation-safe. - - - *Related issues and pull requests on GitHub:* - :issue:`9670`, :issue:`9671`. - - - -- Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9686`. - - - -- Fixed incorrect parsing of chunk extensions with the pure Python parser -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9851`. - - - -- Fixed system routes polluting the middleware cache -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9852`. - - - - -Removals and backward incompatible breaking changes ---------------------------------------------------- - -- Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. - - If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. - - - *Related issues and pull requests on GitHub:* - :issue:`9600`. - - - - -Miscellaneous internal changes ------------------------------- - -- Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9470`. - - - -- Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9485`. - - - -- Improved performance of serializing HTTP headers -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9603`. - - - -- Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9726`, :issue:`9736`. - - - - ---- -3.10.11rc0 (2024-11-12) -======================= - -Bug fixes ---------- - -- Authentication provided by a redirect now takes precedence over provided ``auth`` when making requests with the client -- by :user:`PLPeeters`. - - - *Related issues and pull requests on GitHub:* - :issue:`9436`. - - - -- Fixed :py:meth:`WebSocketResponse.close() <aiohttp.web.WebSocketResponse.close>` to discard non-close messages within its timeout window after sending close -- by :user:`lenard-mosys`. - - - *Related issues and pull requests on GitHub:* - :issue:`9506`. - - - -- Fixed a deadlock that could occur while attempting to get a new connection slot after a timeout -- by :user:`bdraco`. - - The connector was not cancellation-safe. - - - *Related issues and pull requests on GitHub:* - :issue:`9670`, :issue:`9671`. - - - -- Fixed the WebSocket flow control calculation undercounting with multi-byte data -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9686`. - - - - -Removals and backward incompatible breaking changes ---------------------------------------------------- - -- Improved performance of the connector when a connection can be reused -- by :user:`bdraco`. - - If ``BaseConnector.connect`` has been subclassed and replaced with custom logic, the ``ceil_timeout`` must be added. - - - *Related issues and pull requests on GitHub:* - :issue:`9600`. - - - - -Miscellaneous internal changes ------------------------------- - -- Improved performance of the client request lifecycle when there are no cookies -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9470`. - - - -- Improved performance of sending client requests when the writer can finish synchronously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9485`. - - - -- Improved performance of serializing HTTP headers -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9603`. - - - -- Passing ``enable_cleanup_closed`` to :py:class:`aiohttp.TCPConnector` is now ignored on Python 3.12.7+ and 3.13.1+ since the underlying bug that caused asyncio to leak SSL connections has been fixed upstream -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`9726`, :issue:`9736`. - - - - ----- - 3.10.10 (2024-10-10) ==================== From c311912f853651fd70090d11e1dd425824378839 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 10:03:28 -0600 Subject: [PATCH 0934/1511] Release 3.11.0 (#9860) --- CHANGES.rst | 359 +++++++++++++++++++++++++++++++++++ CHANGES/3945.deprecation.rst | 1 - CHANGES/6257.feature | 4 - CHANGES/6652.bugfix.rst | 1 - CHANGES/6800.bugfix.rst | 1 - CHANGES/7731.misc.rst | 1 - CHANGES/7941.feature | 1 - CHANGES/8612.feature.rst | 1 - CHANGES/8797.breaking.rst | 1 - CHANGES/8909.breaking.rst | 1 - CHANGES/8920.misc.rst | 1 - CHANGES/8956.feature.rst | 1 - CHANGES/8966.feature.rst | 1 - CHANGES/8977.bugfix.rst | 1 - CHANGES/9033.misc.rst | 1 - CHANGES/9079.breaking.rst | 1 - CHANGES/9141.misc.rst | 2 - CHANGES/9207.feature.rst | 1 - CHANGES/9285.misc.rst | 1 - CHANGES/9305.breaking.rst | 1 - CHANGES/9335.feature.rst | 1 - CHANGES/9344.breaking.rst | 1 - CHANGES/9348.feature.rst | 1 - CHANGES/9359.contrib.rst | 2 - CHANGES/9365.breaking.rst | 1 - CHANGES/9394.packaging.rst | 6 - CHANGES/9407.misc.rst | 1 - CHANGES/9466.feature.rst | 1 - CHANGES/9530.feature.rst | 2 - CHANGES/9542.packaging.rst | 1 - CHANGES/9543.feature.rst | 1 - CHANGES/9552.packaging.rst | 1 - CHANGES/9554.feature.rst | 1 - CHANGES/9556.feature.rst | 1 - CHANGES/9558.feature.rst | 1 - CHANGES/9572.feature.rst | 1 - CHANGES/9574.breaking.rst | 1 - CHANGES/9636.feature.rst | 1 - CHANGES/9649.feature.rst | 1 - CHANGES/9659.misc.rst | 1 - CHANGES/9672.bugfix.rst | 3 - CHANGES/9679.misc.rst | 1 - CHANGES/9685.breaking.rst | 1 - CHANGES/9692.breaking.rst | 1 - CHANGES/9722.misc.rst | 1 - CHANGES/9756.misc.rst | 1 - CHANGES/9757.misc.rst | 1 - CHANGES/9781.feature.rst | 1 - CHANGES/9796.misc.rst | 1 - CHANGES/9822.feature.rst | 1 - CHANGES/9839.misc.rst | 1 - aiohttp/__init__.py | 2 +- 52 files changed, 360 insertions(+), 64 deletions(-) delete mode 100644 CHANGES/3945.deprecation.rst delete mode 100644 CHANGES/6257.feature delete mode 100644 CHANGES/6652.bugfix.rst delete mode 100644 CHANGES/6800.bugfix.rst delete mode 100644 CHANGES/7731.misc.rst delete mode 100644 CHANGES/7941.feature delete mode 100644 CHANGES/8612.feature.rst delete mode 100644 CHANGES/8797.breaking.rst delete mode 120000 CHANGES/8909.breaking.rst delete mode 100644 CHANGES/8920.misc.rst delete mode 100644 CHANGES/8956.feature.rst delete mode 100644 CHANGES/8966.feature.rst delete mode 100644 CHANGES/8977.bugfix.rst delete mode 100644 CHANGES/9033.misc.rst delete mode 120000 CHANGES/9079.breaking.rst delete mode 100644 CHANGES/9141.misc.rst delete mode 100644 CHANGES/9207.feature.rst delete mode 100644 CHANGES/9285.misc.rst delete mode 120000 CHANGES/9305.breaking.rst delete mode 100644 CHANGES/9335.feature.rst delete mode 100644 CHANGES/9344.breaking.rst delete mode 100644 CHANGES/9348.feature.rst delete mode 100644 CHANGES/9359.contrib.rst delete mode 100644 CHANGES/9365.breaking.rst delete mode 100644 CHANGES/9394.packaging.rst delete mode 100644 CHANGES/9407.misc.rst delete mode 120000 CHANGES/9466.feature.rst delete mode 100644 CHANGES/9530.feature.rst delete mode 100644 CHANGES/9542.packaging.rst delete mode 100644 CHANGES/9543.feature.rst delete mode 120000 CHANGES/9552.packaging.rst delete mode 120000 CHANGES/9554.feature.rst delete mode 120000 CHANGES/9556.feature.rst delete mode 120000 CHANGES/9558.feature.rst delete mode 100644 CHANGES/9572.feature.rst delete mode 100644 CHANGES/9574.breaking.rst delete mode 120000 CHANGES/9636.feature.rst delete mode 120000 CHANGES/9649.feature.rst delete mode 100644 CHANGES/9659.misc.rst delete mode 100644 CHANGES/9672.bugfix.rst delete mode 100644 CHANGES/9679.misc.rst delete mode 100644 CHANGES/9685.breaking.rst delete mode 100644 CHANGES/9692.breaking.rst delete mode 100644 CHANGES/9722.misc.rst delete mode 100644 CHANGES/9756.misc.rst delete mode 100644 CHANGES/9757.misc.rst delete mode 120000 CHANGES/9781.feature.rst delete mode 100644 CHANGES/9796.misc.rst delete mode 100644 CHANGES/9822.feature.rst delete mode 100644 CHANGES/9839.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 5844b560409..dab7a1b40cf 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,365 @@ .. towncrier release notes start +3.11.0 (2024-11-13) +=================== + +Bug fixes +--------- + +- Raise :exc:`aiohttp.ServerFingerprintMismatch` exception on client-side if request through http proxy with mismatching server fingerprint digest: `aiohttp.ClientSession(headers=headers, connector=TCPConnector(ssl=aiohttp.Fingerprint(mismatch_digest), trust_env=True).request(...)` -- by :user:`gangj`. + + + *Related issues and pull requests on GitHub:* + :issue:`6652`. + + + +- Modified websocket :meth:`aiohttp.ClientWebSocketResponse.receive_str`, :py:meth:`aiohttp.ClientWebSocketResponse.receive_bytes`, :py:meth:`aiohttp.web.WebSocketResponse.receive_str` & :py:meth:`aiohttp.web.WebSocketResponse.receive_bytes` methods to raise new :py:exc:`aiohttp.WSMessageTypeError` exception, instead of generic :py:exc:`TypeError`, when websocket messages of incorrect types are received -- by :user:`ara-25`. + + + *Related issues and pull requests on GitHub:* + :issue:`6800`. + + + +- Made ``TestClient.app`` a ``Generic`` so type checkers will know the correct type (avoiding unneeded ``client.app is not None`` checks) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8977`. + + + +- Fixed the keep-alive connection pool to be FIFO instead of LIFO -- by :user:`bdraco`. + + Keep-alive connections are more likely to be reused before they disconnect. + + + *Related issues and pull requests on GitHub:* + :issue:`9672`. + + + + +Features +-------- + +- Added ``strategy`` parameter to :meth:`aiohttp.web.StreamResponse.enable_compression` + The value of this parameter is passed to the :func:`zlib.compressobj` function, allowing people + to use a more sufficient compression algorithm for their data served by :mod:`aiohttp.web` + -- by :user:`shootkin` + + + *Related issues and pull requests on GitHub:* + :issue:`6257`. + + + +- Added ``server_hostname`` parameter to ``ws_connect``. + + + *Related issues and pull requests on GitHub:* + :issue:`7941`. + + + +- Exported :py:class:`~aiohttp.ClientWSTimeout` to top-level namespace -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8612`. + + + +- Added ``secure``/``httponly``/``samesite`` parameters to ``.del_cookie()`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8956`. + + + +- Updated :py:class:`~aiohttp.ClientSession`'s auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` + + + *Related issues and pull requests on GitHub:* + :issue:`8966`, :issue:`9466`. + + + +- Added ``proxy`` and ``proxy_auth`` parameters to :py:class:`~aiohttp.ClientSession` -- by :user:`meshya`. + + + *Related issues and pull requests on GitHub:* + :issue:`9207`. + + + +- Added ``default_to_multipart`` parameter to ``FormData``. + + + *Related issues and pull requests on GitHub:* + :issue:`9335`. + + + +- Added :py:meth:`~aiohttp.ClientWebSocketResponse.send_frame` and :py:meth:`~aiohttp.web.WebSocketResponse.send_frame` for WebSockets -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9348`. + + + +- Updated :py:class:`~aiohttp.ClientSession` to support paths in ``base_url`` parameter. + ``base_url`` paths must end with a ``/`` -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`9530`. + + + +- Improved performance of reading WebSocket messages with a Cython implementation -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9543`, :issue:`9554`, :issue:`9556`, :issue:`9558`, :issue:`9636`, :issue:`9649`, :issue:`9781`. + + + +- Added ``writer_limit`` to the :py:class:`~aiohttp.web.WebSocketResponse` to be able to adjust the limit before the writer forces the buffer to be drained -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9572`. + + + +- Added an :attr:`~aiohttp.abc.AbstractAccessLogger.enabled` property to :class:`aiohttp.abc.AbstractAccessLogger` to dynamically check if logging is enabled -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9822`. + + + + +Deprecations (removal in next major release) +-------------------------------------------- + +- Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in :py:meth:`~aiohttp.ClientSession.ws_connect`. Change default websocket receive timeout from `None` to `10.0`. + + + *Related issues and pull requests on GitHub:* + :issue:`3945`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Dropped support for Python 3.8 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8797`. + + + +- Increased minimum yarl version to 1.17.0 -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8909`, :issue:`9079`, :issue:`9305`, :issue:`9574`. + + + +- Removed the ``is_ipv6_address`` and ``is_ip4_address`` helpers are they are no longer used -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9344`. + + + +- Changed ``ClientRequest.connection_key`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9365`. + + + +- ``FlowControlDataQueue`` has been replaced with the ``WebSocketDataQueue`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9685`. + + + +- Changed ``ClientRequest.request_info`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9692`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Switched to using the :mod:`propcache <propcache.api>` package for property caching + -- by :user:`bdraco`. + + The :mod:`propcache <propcache.api>` package is derived from the property caching + code in :mod:`yarl` and has been broken out to avoid maintaining it for multiple + projects. + + + *Related issues and pull requests on GitHub:* + :issue:`9394`. + + + +- Separated ``aiohttp.http_websocket`` into multiple files to make it easier to maintain -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9542`, :issue:`9552`. + + + + +Contributor-facing changes +-------------------------- + +- Changed diagram images generator from ``blockdiag`` to ``GraphViz``. + Generating documentation now requires the GraphViz executable to be included in $PATH or sphinx build configuration. + + + *Related issues and pull requests on GitHub:* + :issue:`9359`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added flake8 settings to avoid some forms of implicit concatenation. -- by :user:`booniepepper`. + + + *Related issues and pull requests on GitHub:* + :issue:`7731`. + + + +- Enabled keep-alive support on proxies (which was originally disabled several years ago) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8920`. + + + +- Changed web entry point to not listen on TCP when only a Unix path is passed -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9033`. + + + +- Disabled automatic retries of failed requests in :class:`aiohttp.test_utils.TestClient`'s client session + (which could potentially hide errors in tests) -- by :user:`ShubhAgarwal-dev`. + + + *Related issues and pull requests on GitHub:* + :issue:`9141`. + + + +- Changed web ``keepalive_timeout`` default to around an hour in order to reduce race conditions on reverse proxies -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9285`. + + + +- Reduced memory required for stream objects created during the client request lifecycle -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9407`. + + + +- Improved performance of the internal ``DataQueue`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9659`. + + + +- Improved performance of calling ``receive`` for WebSockets for the most common message types -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9679`. + + + +- Replace internal helper methods ``method_must_be_empty_body`` and ``status_code_must_be_empty_body`` with simple `set` lookups -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9722`. + + + +- Improved performance of :py:class:`aiohttp.BaseConnector` when there is no ``limit_per_host`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9756`. + + + +- Improved performance of sending HTTP requests when there is no body -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9757`. + + + +- Improved performance of the ``WebsocketWriter`` when the protocol is not paused -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9796`. + + + +- Implemented zero copy writes for ``StreamWriter`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9839`. + + + + +---- + + 3.10.11 (2024-11-13) ==================== diff --git a/CHANGES/3945.deprecation.rst b/CHANGES/3945.deprecation.rst deleted file mode 100644 index 91c510c6d32..00000000000 --- a/CHANGES/3945.deprecation.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in :py:meth:`~aiohttp.ClientSession.ws_connect`. Change default websocket receive timeout from `None` to `10.0`. diff --git a/CHANGES/6257.feature b/CHANGES/6257.feature deleted file mode 100644 index 51fc6bf9bb7..00000000000 --- a/CHANGES/6257.feature +++ /dev/null @@ -1,4 +0,0 @@ -Added ``strategy`` parameter to :meth:`aiohttp.web.StreamResponse.enable_compression` -The value of this parameter is passed to the :func:`zlib.compressobj` function, allowing people -to use a more sufficient compression algorithm for their data served by :mod:`aiohttp.web` --- by :user:`shootkin` diff --git a/CHANGES/6652.bugfix.rst b/CHANGES/6652.bugfix.rst deleted file mode 100644 index 972557b0d96..00000000000 --- a/CHANGES/6652.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Raise :exc:`aiohttp.ServerFingerprintMismatch` exception on client-side if request through http proxy with mismatching server fingerprint digest: `aiohttp.ClientSession(headers=headers, connector=TCPConnector(ssl=aiohttp.Fingerprint(mismatch_digest), trust_env=True).request(...)` -- by :user:`gangj`. diff --git a/CHANGES/6800.bugfix.rst b/CHANGES/6800.bugfix.rst deleted file mode 100644 index 880d70dc1fb..00000000000 --- a/CHANGES/6800.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Modified websocket :meth:`aiohttp.ClientWebSocketResponse.receive_str`, :py:meth:`aiohttp.ClientWebSocketResponse.receive_bytes`, :py:meth:`aiohttp.web.WebSocketResponse.receive_str` & :py:meth:`aiohttp.web.WebSocketResponse.receive_bytes` methods to raise new :py:exc:`aiohttp.WSMessageTypeError` exception, instead of generic :py:exc:`TypeError`, when websocket messages of incorrect types are received -- by :user:`ara-25`. diff --git a/CHANGES/7731.misc.rst b/CHANGES/7731.misc.rst deleted file mode 100644 index f46ffa5816b..00000000000 --- a/CHANGES/7731.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Added flake8 settings to avoid some forms of implicit concatenation. -- by :user:`booniepepper`. diff --git a/CHANGES/7941.feature b/CHANGES/7941.feature deleted file mode 100644 index 6f4530f103a..00000000000 --- a/CHANGES/7941.feature +++ /dev/null @@ -1 +0,0 @@ -Added ``server_hostname`` parameter to ``ws_connect``. diff --git a/CHANGES/8612.feature.rst b/CHANGES/8612.feature.rst deleted file mode 100644 index 51ede16ebe5..00000000000 --- a/CHANGES/8612.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Exported :py:class:`~aiohttp.ClientWSTimeout` to top-level namespace -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8797.breaking.rst b/CHANGES/8797.breaking.rst deleted file mode 100644 index c219ea3d264..00000000000 --- a/CHANGES/8797.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Dropped support for Python 3.8 -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8909.breaking.rst b/CHANGES/8909.breaking.rst deleted file mode 120000 index 09e6008b8cd..00000000000 --- a/CHANGES/8909.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -9574.breaking.rst \ No newline at end of file diff --git a/CHANGES/8920.misc.rst b/CHANGES/8920.misc.rst deleted file mode 100644 index 2e8640593a4..00000000000 --- a/CHANGES/8920.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Enabled keep-alive support on proxies (which was originally disabled several years ago) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8956.feature.rst b/CHANGES/8956.feature.rst deleted file mode 100644 index 245b481089a..00000000000 --- a/CHANGES/8956.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``secure``/``httponly``/``samesite`` parameters to ``.del_cookie()`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8966.feature.rst b/CHANGES/8966.feature.rst deleted file mode 100644 index 68ec1323568..00000000000 --- a/CHANGES/8966.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Updated :py:class:`~aiohttp.ClientSession`'s auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` diff --git a/CHANGES/8977.bugfix.rst b/CHANGES/8977.bugfix.rst deleted file mode 100644 index 7d21fe0c3fa..00000000000 --- a/CHANGES/8977.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Made ``TestClient.app`` a ``Generic`` so type checkers will know the correct type (avoiding unneeded ``client.app is not None`` checks) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9033.misc.rst b/CHANGES/9033.misc.rst deleted file mode 100644 index 07a017ffdda..00000000000 --- a/CHANGES/9033.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Changed web entry point to not listen on TCP when only a Unix path is passed -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9079.breaking.rst b/CHANGES/9079.breaking.rst deleted file mode 120000 index 09e6008b8cd..00000000000 --- a/CHANGES/9079.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -9574.breaking.rst \ No newline at end of file diff --git a/CHANGES/9141.misc.rst b/CHANGES/9141.misc.rst deleted file mode 100644 index d23439fa742..00000000000 --- a/CHANGES/9141.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Disabled automatic retries of failed requests in :class:`aiohttp.test_utils.TestClient`'s client session -(which could potentially hide errors in tests) -- by :user:`ShubhAgarwal-dev`. diff --git a/CHANGES/9207.feature.rst b/CHANGES/9207.feature.rst deleted file mode 100644 index cb54a1dd1f9..00000000000 --- a/CHANGES/9207.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``proxy`` and ``proxy_auth`` parameters to :py:class:`~aiohttp.ClientSession` -- by :user:`meshya`. diff --git a/CHANGES/9285.misc.rst b/CHANGES/9285.misc.rst deleted file mode 100644 index 78c8a773cf0..00000000000 --- a/CHANGES/9285.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Changed web ``keepalive_timeout`` default to around an hour in order to reduce race conditions on reverse proxies -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9305.breaking.rst b/CHANGES/9305.breaking.rst deleted file mode 120000 index 09e6008b8cd..00000000000 --- a/CHANGES/9305.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -9574.breaking.rst \ No newline at end of file diff --git a/CHANGES/9335.feature.rst b/CHANGES/9335.feature.rst deleted file mode 100644 index 9a2e0684b44..00000000000 --- a/CHANGES/9335.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``default_to_multipart`` parameter to ``FormData``. diff --git a/CHANGES/9344.breaking.rst b/CHANGES/9344.breaking.rst deleted file mode 100644 index 5888b674a51..00000000000 --- a/CHANGES/9344.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Removed the ``is_ipv6_address`` and ``is_ip4_address`` helpers are they are no longer used -- by :user:`bdraco`. diff --git a/CHANGES/9348.feature.rst b/CHANGES/9348.feature.rst deleted file mode 100644 index 66fa5c1a06e..00000000000 --- a/CHANGES/9348.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added :py:meth:`~aiohttp.ClientWebSocketResponse.send_frame` and :py:meth:`~aiohttp.web.WebSocketResponse.send_frame` for WebSockets -- by :user:`bdraco`. diff --git a/CHANGES/9359.contrib.rst b/CHANGES/9359.contrib.rst deleted file mode 100644 index cff763e8b09..00000000000 --- a/CHANGES/9359.contrib.rst +++ /dev/null @@ -1,2 +0,0 @@ -Changed diagram images generator from ``blockdiag`` to ``GraphViz``. -Generating documentation now requires the GraphViz executable to be included in $PATH or sphinx build configuration. diff --git a/CHANGES/9365.breaking.rst b/CHANGES/9365.breaking.rst deleted file mode 100644 index f0224170f07..00000000000 --- a/CHANGES/9365.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Changed ``ClientRequest.connection_key`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. diff --git a/CHANGES/9394.packaging.rst b/CHANGES/9394.packaging.rst deleted file mode 100644 index 456ac0f52c8..00000000000 --- a/CHANGES/9394.packaging.rst +++ /dev/null @@ -1,6 +0,0 @@ -Switched to using the :mod:`propcache <propcache.api>` package for property caching --- by :user:`bdraco`. - -The :mod:`propcache <propcache.api>` package is derived from the property caching -code in :mod:`yarl` and has been broken out to avoid maintaining it for multiple -projects. diff --git a/CHANGES/9407.misc.rst b/CHANGES/9407.misc.rst deleted file mode 100644 index d2a4e1e3ae3..00000000000 --- a/CHANGES/9407.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Reduced memory required for stream objects created during the client request lifecycle -- by :user:`bdraco`. diff --git a/CHANGES/9466.feature.rst b/CHANGES/9466.feature.rst deleted file mode 120000 index a54874e90e0..00000000000 --- a/CHANGES/9466.feature.rst +++ /dev/null @@ -1 +0,0 @@ -8966.feature.rst \ No newline at end of file diff --git a/CHANGES/9530.feature.rst b/CHANGES/9530.feature.rst deleted file mode 100644 index cc4e75a13ca..00000000000 --- a/CHANGES/9530.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Updated :py:class:`~aiohttp.ClientSession` to support paths in ``base_url`` parameter. -``base_url`` paths must end with a ``/`` -- by :user:`Cycloctane`. diff --git a/CHANGES/9542.packaging.rst b/CHANGES/9542.packaging.rst deleted file mode 100644 index c77b962994f..00000000000 --- a/CHANGES/9542.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Separated ``aiohttp.http_websocket`` into multiple files to make it easier to maintain -- by :user:`bdraco`. diff --git a/CHANGES/9543.feature.rst b/CHANGES/9543.feature.rst deleted file mode 100644 index ee624ddc48d..00000000000 --- a/CHANGES/9543.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of reading WebSocket messages with a Cython implementation -- by :user:`bdraco`. diff --git a/CHANGES/9552.packaging.rst b/CHANGES/9552.packaging.rst deleted file mode 120000 index c9dc8a14683..00000000000 --- a/CHANGES/9552.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -9542.packaging.rst \ No newline at end of file diff --git a/CHANGES/9554.feature.rst b/CHANGES/9554.feature.rst deleted file mode 120000 index a93584bccd8..00000000000 --- a/CHANGES/9554.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9543.feature.rst \ No newline at end of file diff --git a/CHANGES/9556.feature.rst b/CHANGES/9556.feature.rst deleted file mode 120000 index a93584bccd8..00000000000 --- a/CHANGES/9556.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9543.feature.rst \ No newline at end of file diff --git a/CHANGES/9558.feature.rst b/CHANGES/9558.feature.rst deleted file mode 120000 index e3e7a75e700..00000000000 --- a/CHANGES/9558.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9554.feature.rst \ No newline at end of file diff --git a/CHANGES/9572.feature.rst b/CHANGES/9572.feature.rst deleted file mode 100644 index 9e6778819da..00000000000 --- a/CHANGES/9572.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``writer_limit`` to the :py:class:`~aiohttp.web.WebSocketResponse` to be able to adjust the limit before the writer forces the buffer to be drained -- by :user:`bdraco`. diff --git a/CHANGES/9574.breaking.rst b/CHANGES/9574.breaking.rst deleted file mode 100644 index 4175991dfcf..00000000000 --- a/CHANGES/9574.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Increased minimum yarl version to 1.17.0 -- by :user:`bdraco`. diff --git a/CHANGES/9636.feature.rst b/CHANGES/9636.feature.rst deleted file mode 120000 index a93584bccd8..00000000000 --- a/CHANGES/9636.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9543.feature.rst \ No newline at end of file diff --git a/CHANGES/9649.feature.rst b/CHANGES/9649.feature.rst deleted file mode 120000 index a93584bccd8..00000000000 --- a/CHANGES/9649.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9543.feature.rst \ No newline at end of file diff --git a/CHANGES/9659.misc.rst b/CHANGES/9659.misc.rst deleted file mode 100644 index b121e2f7485..00000000000 --- a/CHANGES/9659.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of the internal ``DataQueue`` -- by :user:`bdraco`. diff --git a/CHANGES/9672.bugfix.rst b/CHANGES/9672.bugfix.rst deleted file mode 100644 index 110e397e647..00000000000 --- a/CHANGES/9672.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed the keep-alive connection pool to be FIFO instead of LIFO -- by :user:`bdraco`. - -Keep-alive connections are more likely to be reused before they disconnect. diff --git a/CHANGES/9679.misc.rst b/CHANGES/9679.misc.rst deleted file mode 100644 index 55969f6c662..00000000000 --- a/CHANGES/9679.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of calling ``receive`` for WebSockets for the most common message types -- by :user:`bdraco`. diff --git a/CHANGES/9685.breaking.rst b/CHANGES/9685.breaking.rst deleted file mode 100644 index 7ea7cd68e76..00000000000 --- a/CHANGES/9685.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -``FlowControlDataQueue`` has been replaced with the ``WebSocketDataQueue`` -- by :user:`bdraco`. diff --git a/CHANGES/9692.breaking.rst b/CHANGES/9692.breaking.rst deleted file mode 100644 index e0fdae11416..00000000000 --- a/CHANGES/9692.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Changed ``ClientRequest.request_info`` to be a `NamedTuple` to improve client performance -- by :user:`bdraco`. diff --git a/CHANGES/9722.misc.rst b/CHANGES/9722.misc.rst deleted file mode 100644 index 998db377fa1..00000000000 --- a/CHANGES/9722.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Replace internal helper methods ``method_must_be_empty_body`` and ``status_code_must_be_empty_body`` with simple `set` lookups -- by :user:`bdraco`. diff --git a/CHANGES/9756.misc.rst b/CHANGES/9756.misc.rst deleted file mode 100644 index 54f232ac403..00000000000 --- a/CHANGES/9756.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of :py:class:`aiohttp.BaseConnector` when there is no ``limit_per_host`` -- by :user:`bdraco`. diff --git a/CHANGES/9757.misc.rst b/CHANGES/9757.misc.rst deleted file mode 100644 index 824b1d88447..00000000000 --- a/CHANGES/9757.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of sending HTTP requests when there is no body -- by :user:`bdraco`. diff --git a/CHANGES/9781.feature.rst b/CHANGES/9781.feature.rst deleted file mode 120000 index a93584bccd8..00000000000 --- a/CHANGES/9781.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9543.feature.rst \ No newline at end of file diff --git a/CHANGES/9796.misc.rst b/CHANGES/9796.misc.rst deleted file mode 100644 index 8cf4d621fb8..00000000000 --- a/CHANGES/9796.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of the ``WebsocketWriter`` when the protocol is not paused -- by :user:`bdraco`. diff --git a/CHANGES/9822.feature.rst b/CHANGES/9822.feature.rst deleted file mode 100644 index f361c586499..00000000000 --- a/CHANGES/9822.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added an :attr:`~aiohttp.abc.AbstractAccessLogger.enabled` property to :class:`aiohttp.abc.AbstractAccessLogger` to dynamically check if logging is enabled -- by :user:`bdraco`. diff --git a/CHANGES/9839.misc.rst b/CHANGES/9839.misc.rst deleted file mode 100644 index 8bdd50268a7..00000000000 --- a/CHANGES/9839.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Implemented zero copy writes for ``StreamWriter`` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 62501256a2c..83e7f53d868 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0rc2" +__version__ = "3.11.0" from typing import TYPE_CHECKING, Tuple From 72661bd850bf0f7c237e5eea7bc3a4815c22d6ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 10:10:02 -0600 Subject: [PATCH 0935/1511] Increment version to 3.11.1.dev0 (#9863) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 83e7f53d868..c0168c22522 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0" +__version__ = "3.11.1.dev0" from typing import TYPE_CHECKING, Tuple From 51a531f6737169849d88defa1629ec3de2f26528 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 10:12:56 -0600 Subject: [PATCH 0936/1511] Increment version to 3.12.0.dev0 (#9864) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 83e7f53d868..f05c8f04301 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.0" +__version__ = "3.12.0.dev0" from typing import TYPE_CHECKING, Tuple From b580b8651c0ab671dea4091a501f915bb082b1dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 13 Nov 2024 10:16:34 -0600 Subject: [PATCH 0937/1511] Update dependabot config for 3.12 (#9865) --- .github/dependabot.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9cf1501e811..5d4bbe08db7 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -25,7 +25,7 @@ updates: directory: "/" labels: - dependencies - target-branch: "3.11" + target-branch: "3.12" schedule: interval: "daily" open-pull-requests-limit: 10 @@ -37,7 +37,7 @@ updates: - dependency-type: "all" labels: - dependencies - target-branch: "3.11" + target-branch: "3.12" schedule: interval: "daily" open-pull-requests-limit: 10 From daaea93917e9f7247ab991286858f8f94ca8d117 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 14:54:46 +0000 Subject: [PATCH 0938/1511] [PR #9873/c9698c8e backport][3.11] Make creating `RequestInfo` backwards compatible with 3.10 (#9874) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #9866 --- CHANGES/9873.bugfix.rst | 1 + aiohttp/client_reqrep.py | 25 +++++++++++++++++++-- tests/test_client_request.py | 43 ++++++++++++++++++++++++++++++++++++ 3 files changed, 67 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9873.bugfix.rst diff --git a/CHANGES/9873.bugfix.rst b/CHANGES/9873.bugfix.rst new file mode 100644 index 00000000000..90f708fa879 --- /dev/null +++ b/CHANGES/9873.bugfix.rst @@ -0,0 +1 @@ +Added a backward compatibility layer to `~aiohttp.RequestInfo` to allow creating these objects without a `real_url` -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e33d3d4b1dd..267b509b0e6 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -42,6 +42,7 @@ from .compression_utils import HAS_BROTLI from .formdata import FormData from .helpers import ( + _SENTINEL, BaseTimerContext, BasicAuth, HeadersMixin, @@ -103,13 +104,31 @@ class ContentDisposition: filename: Optional[str] -class RequestInfo(NamedTuple): +class _RequestInfo(NamedTuple): url: URL method: str headers: "CIMultiDictProxy[str]" real_url: URL +class RequestInfo(_RequestInfo): + + def __new__( + cls, + url: URL, + method: str, + headers: "CIMultiDictProxy[str]", + real_url: URL = _SENTINEL, # type: ignore[assignment] + ) -> "RequestInfo": + """Create a new RequestInfo instance. + + For backwards compatibility, the real_url parameter is optional. + """ + return tuple.__new__( + cls, (url, method, headers, url if real_url is _SENTINEL else real_url) + ) + + class Fingerprint: HASHFUNC_BY_DIGESTLEN = { 16: md5, @@ -391,7 +410,9 @@ def port(self) -> Optional[int]: def request_info(self) -> RequestInfo: headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) # These are created on every request, so we use a NamedTuple - # for performance reasons. + # for performance reasons. We don't use the RequestInfo.__new__ + # method because it has a different signature which is provided + # for backwards compatibility only. return tuple.__new__( RequestInfo, (self.url, self.method, headers, self.original_url) ) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 870c9666f34..20ccf6c03d1 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1492,3 +1492,46 @@ async def test_connection_key_without_proxy() -> None: ) assert req.connection_key.proxy_headers_hash is None await req.close() + + +def test_request_info_back_compat() -> None: + """Test RequestInfo can be created without real_url.""" + url = URL("http://example.com") + other_url = URL("http://example.org") + assert ( + aiohttp.RequestInfo( + url=url, method="GET", headers=CIMultiDictProxy(CIMultiDict()) + ).real_url + is url + ) + assert ( + aiohttp.RequestInfo(url, "GET", CIMultiDictProxy(CIMultiDict())).real_url is url + ) + assert ( + aiohttp.RequestInfo( + url, "GET", CIMultiDictProxy(CIMultiDict()), real_url=url + ).real_url + is url + ) + assert ( + aiohttp.RequestInfo( + url, "GET", CIMultiDictProxy(CIMultiDict()), real_url=other_url + ).real_url + is other_url + ) + + +def test_request_info_tuple_new() -> None: + """Test RequestInfo must be created with real_url using tuple.__new__.""" + url = URL("http://example.com") + with pytest.raises(IndexError): + tuple.__new__( + aiohttp.RequestInfo, (url, "GET", CIMultiDictProxy(CIMultiDict())) + ).real_url + + assert ( + tuple.__new__( + aiohttp.RequestInfo, (url, "GET", CIMultiDictProxy(CIMultiDict()), url) + ).real_url + is url + ) From fe1196c20c86d201990be45f4f0f4b2b167913ad Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 14 Nov 2024 09:31:10 -0600 Subject: [PATCH 0939/1511] Release 3.11.1 (#9875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: 🇺🇦 Sviatoslav Sydorenko (Святослав Сидоренко) <sviat@redhat.com> --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/9873.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/9873.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index dab7a1b40cf..ba8bb03854f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.11.1 (2024-11-14) +=================== + +Bug fixes +--------- + +- Added a backward compatibility layer to :class:`aiohttp.RequestInfo` to allow creating these objects without a ``real_url`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9873`. + + + + +---- + + 3.11.0 (2024-11-13) =================== diff --git a/CHANGES/9873.bugfix.rst b/CHANGES/9873.bugfix.rst deleted file mode 100644 index 90f708fa879..00000000000 --- a/CHANGES/9873.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Added a backward compatibility layer to `~aiohttp.RequestInfo` to allow creating these objects without a `real_url` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index c0168c22522..63966d73a61 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.1.dev0" +__version__ = "3.11.1" from typing import TYPE_CHECKING, Tuple From e998143ceb4a0d7d70c1a309688688229989b696 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 14 Nov 2024 10:53:58 -0600 Subject: [PATCH 0940/1511] Increment version to 3.11.2.dev0 (#9879) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 63966d73a61..2d18de694ca 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.1" +__version__ = "3.11.2.dev0" from typing import TYPE_CHECKING, Tuple From 581390c191dae50d4786d7129428ab4d5d1656ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 14 Nov 2024 14:49:56 -0600 Subject: [PATCH 0941/1511] [PR #9883/a118114 backport][3.11] Fix improperly closed WebSocket connections generating a backtrace (#9884) --- CHANGES/9883.bugfix.rst | 1 + aiohttp/_websocket/reader_py.py | 3 ++ tests/test_client_ws_functional.py | 70 +++++++++++++++++++++++++++++- 3 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9883.bugfix.rst diff --git a/CHANGES/9883.bugfix.rst b/CHANGES/9883.bugfix.rst new file mode 100644 index 00000000000..3ffb8361448 --- /dev/null +++ b/CHANGES/9883.bugfix.rst @@ -0,0 +1 @@ +Fixed improperly closed WebSocket connections generating an unhandled exception -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 2295a255148..94d20010890 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -66,6 +66,9 @@ def __init__( self._get_buffer = self._buffer.popleft self._put_buffer = self._buffer.append + def is_eof(self) -> bool: + return self._eof + def exception(self) -> Optional[BaseException]: return self._exception diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index e4b57bd199d..9ab5dc52b1c 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,6 @@ import asyncio import sys -from typing import Any, NoReturn, Optional +from typing import Any, List, NoReturn, Optional from unittest import mock import pytest @@ -1203,3 +1203,71 @@ async def test_ws_connect_with_wrong_ssl_type(aiohttp_client: AiohttpClient) -> with pytest.raises(TypeError, match="ssl should be SSLContext, .*"): await session.ws_connect("/", ssl=42) + + +async def test_websocket_connection_not_closed_properly( + aiohttp_client: AiohttpClient, +) -> None: + """Test that closing the connection via __del__ does not raise an exception.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + assert resp._conn is not None + # Simulate the connection not being closed properly + # https://github.com/aio-libs/aiohttp/issues/9880 + resp._conn.release() + + # Clean up so the test does not leak + await resp.close() + + +async def test_websocket_connection_cancellation( + aiohttp_client: AiohttpClient, loop: asyncio.AbstractEventLoop +) -> None: + """Test canceling the WebSocket connection task does not raise an exception in __del__.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + sync_future: "asyncio.Future[List[aiohttp.ClientWebSocketResponse]]" = ( + loop.create_future() + ) + client = await aiohttp_client(app) + + async def websocket_task() -> None: + resp = await client.ws_connect("/") + assert resp is not None # ensure we hold a reference to the response + # The test harness will cleanup the unclosed websocket + # for us, so we need to copy the websockets to ensure + # we can control the cleanup + sync_future.set_result(client._websockets.copy()) + client._websockets.clear() + await asyncio.sleep(0) + + task = loop.create_task(websocket_task()) + websockets = await sync_future + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + + websocket = websockets.pop() + # Call the `__del__` methods manually since when it gets gc'd it not reproducible + del websocket._response + + # Cleanup properly + websocket._response = mock.Mock() + await websocket.close() From 32c896d2903413b0d667d03a508a588dd304272c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 21:12:57 +0000 Subject: [PATCH 0942/1511] [PR #9873/c9698c8e backport][3.12] Make creating `RequestInfo` backwards compatible with 3.10 (#9886) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #9866 --- CHANGES/9873.bugfix.rst | 1 + aiohttp/client_reqrep.py | 25 +++++++++++++++++++-- tests/test_client_request.py | 43 ++++++++++++++++++++++++++++++++++++ 3 files changed, 67 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9873.bugfix.rst diff --git a/CHANGES/9873.bugfix.rst b/CHANGES/9873.bugfix.rst new file mode 100644 index 00000000000..90f708fa879 --- /dev/null +++ b/CHANGES/9873.bugfix.rst @@ -0,0 +1 @@ +Added a backward compatibility layer to `~aiohttp.RequestInfo` to allow creating these objects without a `real_url` -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e33d3d4b1dd..267b509b0e6 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -42,6 +42,7 @@ from .compression_utils import HAS_BROTLI from .formdata import FormData from .helpers import ( + _SENTINEL, BaseTimerContext, BasicAuth, HeadersMixin, @@ -103,13 +104,31 @@ class ContentDisposition: filename: Optional[str] -class RequestInfo(NamedTuple): +class _RequestInfo(NamedTuple): url: URL method: str headers: "CIMultiDictProxy[str]" real_url: URL +class RequestInfo(_RequestInfo): + + def __new__( + cls, + url: URL, + method: str, + headers: "CIMultiDictProxy[str]", + real_url: URL = _SENTINEL, # type: ignore[assignment] + ) -> "RequestInfo": + """Create a new RequestInfo instance. + + For backwards compatibility, the real_url parameter is optional. + """ + return tuple.__new__( + cls, (url, method, headers, url if real_url is _SENTINEL else real_url) + ) + + class Fingerprint: HASHFUNC_BY_DIGESTLEN = { 16: md5, @@ -391,7 +410,9 @@ def port(self) -> Optional[int]: def request_info(self) -> RequestInfo: headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) # These are created on every request, so we use a NamedTuple - # for performance reasons. + # for performance reasons. We don't use the RequestInfo.__new__ + # method because it has a different signature which is provided + # for backwards compatibility only. return tuple.__new__( RequestInfo, (self.url, self.method, headers, self.original_url) ) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 870c9666f34..20ccf6c03d1 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1492,3 +1492,46 @@ async def test_connection_key_without_proxy() -> None: ) assert req.connection_key.proxy_headers_hash is None await req.close() + + +def test_request_info_back_compat() -> None: + """Test RequestInfo can be created without real_url.""" + url = URL("http://example.com") + other_url = URL("http://example.org") + assert ( + aiohttp.RequestInfo( + url=url, method="GET", headers=CIMultiDictProxy(CIMultiDict()) + ).real_url + is url + ) + assert ( + aiohttp.RequestInfo(url, "GET", CIMultiDictProxy(CIMultiDict())).real_url is url + ) + assert ( + aiohttp.RequestInfo( + url, "GET", CIMultiDictProxy(CIMultiDict()), real_url=url + ).real_url + is url + ) + assert ( + aiohttp.RequestInfo( + url, "GET", CIMultiDictProxy(CIMultiDict()), real_url=other_url + ).real_url + is other_url + ) + + +def test_request_info_tuple_new() -> None: + """Test RequestInfo must be created with real_url using tuple.__new__.""" + url = URL("http://example.com") + with pytest.raises(IndexError): + tuple.__new__( + aiohttp.RequestInfo, (url, "GET", CIMultiDictProxy(CIMultiDict())) + ).real_url + + assert ( + tuple.__new__( + aiohttp.RequestInfo, (url, "GET", CIMultiDictProxy(CIMultiDict()), url) + ).real_url + is url + ) From 9af80791e8f94cf70a552d2940c1cd9dfeb3faad Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 14 Nov 2024 15:18:25 -0600 Subject: [PATCH 0943/1511] [PR #9883/a118114 backport][3.12] Fix improperly closed WebSocket connections generating a backtrace (#9887) --- CHANGES/9883.bugfix.rst | 1 + aiohttp/_websocket/reader_py.py | 3 ++ tests/test_client_ws_functional.py | 70 +++++++++++++++++++++++++++++- 3 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9883.bugfix.rst diff --git a/CHANGES/9883.bugfix.rst b/CHANGES/9883.bugfix.rst new file mode 100644 index 00000000000..3ffb8361448 --- /dev/null +++ b/CHANGES/9883.bugfix.rst @@ -0,0 +1 @@ +Fixed improperly closed WebSocket connections generating an unhandled exception -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 2295a255148..94d20010890 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -66,6 +66,9 @@ def __init__( self._get_buffer = self._buffer.popleft self._put_buffer = self._buffer.append + def is_eof(self) -> bool: + return self._eof + def exception(self) -> Optional[BaseException]: return self._exception diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index e4b57bd199d..9ab5dc52b1c 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,6 @@ import asyncio import sys -from typing import Any, NoReturn, Optional +from typing import Any, List, NoReturn, Optional from unittest import mock import pytest @@ -1203,3 +1203,71 @@ async def test_ws_connect_with_wrong_ssl_type(aiohttp_client: AiohttpClient) -> with pytest.raises(TypeError, match="ssl should be SSLContext, .*"): await session.ws_connect("/", ssl=42) + + +async def test_websocket_connection_not_closed_properly( + aiohttp_client: AiohttpClient, +) -> None: + """Test that closing the connection via __del__ does not raise an exception.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + resp = await client.ws_connect("/") + assert resp._conn is not None + # Simulate the connection not being closed properly + # https://github.com/aio-libs/aiohttp/issues/9880 + resp._conn.release() + + # Clean up so the test does not leak + await resp.close() + + +async def test_websocket_connection_cancellation( + aiohttp_client: AiohttpClient, loop: asyncio.AbstractEventLoop +) -> None: + """Test canceling the WebSocket connection task does not raise an exception in __del__.""" + + async def handler(request: web.Request) -> NoReturn: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.close() + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + + sync_future: "asyncio.Future[List[aiohttp.ClientWebSocketResponse]]" = ( + loop.create_future() + ) + client = await aiohttp_client(app) + + async def websocket_task() -> None: + resp = await client.ws_connect("/") + assert resp is not None # ensure we hold a reference to the response + # The test harness will cleanup the unclosed websocket + # for us, so we need to copy the websockets to ensure + # we can control the cleanup + sync_future.set_result(client._websockets.copy()) + client._websockets.clear() + await asyncio.sleep(0) + + task = loop.create_task(websocket_task()) + websockets = await sync_future + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + + websocket = websockets.pop() + # Call the `__del__` methods manually since when it gets gc'd it not reproducible + del websocket._response + + # Cleanup properly + websocket._response = mock.Mock() + await websocket.close() From 5cc81372e224b8ade838503a108e1726d32f43ce Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 22:08:42 +0000 Subject: [PATCH 0944/1511] [PR #9885/76b0d734 backport][3.12] Add benchmarks for web middleware (#9890) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_middleware.py | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 tests/test_benchmarks_web_middleware.py diff --git a/tests/test_benchmarks_web_middleware.py b/tests/test_benchmarks_web_middleware.py new file mode 100644 index 00000000000..14aa269e360 --- /dev/null +++ b/tests/test_benchmarks_web_middleware.py @@ -0,0 +1,44 @@ +"""codspeed benchmarks for web middlewares.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient +from aiohttp.typedefs import Handler + + +def test_ten_web_middlewares( + benchmark: BenchmarkFixture, + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, +) -> None: + """Benchmark 100 requests with 10 middlewares.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + + class MiddlewareClass: + @web.middleware + async def call( + self, request: web.Request, handler: Handler + ) -> web.StreamResponse: + return await handler(request) + + for _ in range(10): + app.middlewares.append(MiddlewareClass().call) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From 00978de17b2a6890b57097c0b22b71a3f9edb6ba Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 22:10:29 +0000 Subject: [PATCH 0945/1511] [PR #9885/76b0d734 backport][3.11] Add benchmarks for web middleware (#9889) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_middleware.py | 44 +++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 tests/test_benchmarks_web_middleware.py diff --git a/tests/test_benchmarks_web_middleware.py b/tests/test_benchmarks_web_middleware.py new file mode 100644 index 00000000000..14aa269e360 --- /dev/null +++ b/tests/test_benchmarks_web_middleware.py @@ -0,0 +1,44 @@ +"""codspeed benchmarks for web middlewares.""" + +import asyncio + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient +from aiohttp.typedefs import Handler + + +def test_ten_web_middlewares( + benchmark: BenchmarkFixture, + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, +) -> None: + """Benchmark 100 requests with 10 middlewares.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + + class MiddlewareClass: + @web.middleware + async def call( + self, request: web.Request, handler: Handler + ) -> web.StreamResponse: + return await handler(request) + + for _ in range(10): + app.middlewares.append(MiddlewareClass().call) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From db37e91f3ef19ac1702ae0b0547b3825a022b525 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 14 Nov 2024 16:24:06 -0600 Subject: [PATCH 0946/1511] Release 3.11.2 (#9892) --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/9883.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/9883.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index ba8bb03854f..2391c9576c2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.11.2 (2024-11-14) +=================== + +Bug fixes +--------- + +- Fixed improperly closed WebSocket connections generating an unhandled exception -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9883`. + + + + +---- + + 3.11.1 (2024-11-14) =================== diff --git a/CHANGES/9883.bugfix.rst b/CHANGES/9883.bugfix.rst deleted file mode 100644 index 3ffb8361448..00000000000 --- a/CHANGES/9883.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed improperly closed WebSocket connections generating an unhandled exception -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 2d18de694ca..e2c155b02e8 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.2.dev0" +__version__ = "3.11.2" from typing import TYPE_CHECKING, Tuple From 7f6ea64ebfe7ea63760f984c6032533171883acc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 14 Nov 2024 16:48:31 -0600 Subject: [PATCH 0947/1511] Increment version to 3.11.3.dev0 (#9894) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e2c155b02e8..d88f7facb10 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.2" +__version__ = "3.11.3.dev0" from typing import TYPE_CHECKING, Tuple From 866499fc8c3d0deef53af52aea1aa807dd85ded5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 06:51:08 +0000 Subject: [PATCH 0948/1511] [PR #9900/7180ea8e backport][3.12] Add benchmark for a route that supports multiple methods (#9903) --- tests/test_benchmarks_client.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 75fc7f7ed4e..61439183334 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -33,6 +33,34 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_simple_get_requests_multiple_methods_route( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple GET requests on a route with multiple methods.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + # GET intentionally registered last to ensure time complexity + # of the route lookup is benchmarked + for method in ("DELETE", "HEAD", "OPTIONS", "PATCH", "POST", "PUT", "GET"): + app.router.add_route(method, "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_get_requests_with_1024_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From e2a8a1185bfd373e9d80b6bc73e2bc99940614e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 10:39:04 +0000 Subject: [PATCH 0949/1511] Bump codecov/codecov-action from 4 to 5 (#9905) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/codecov/codecov-action/releases">codecov/codecov-action's releases</a>.</em></p> <blockquote> <h2>v5.0.0</h2> <h2>v5 Release</h2> <p><code>v5</code> of the Codecov GitHub Action will use the <a href="https://github.com/codecov/wrapper">Codecov Wrapper</a> to encapsulate the <a href="https://github.com/codecov/codecov-cli">CLI</a>. This will help ensure that the Action gets updates quicker.</p> <h3>Migration Guide</h3> <p>The <code>v5</code> release also coincides with the opt-out feature for tokens for public repositories. In the <code>Global Upload Token</code> section of the settings page of an organization in codecov.io, you can set the ability for Codecov to receive a coverage reports from any source. This will allow contributors or other members of a repository to upload without needing access to the Codecov token. For more details see <a href="https://docs.codecov.com/docs/codecov-tokens#uploading-without-a-token">how to upload without a token</a>.</p> <blockquote> <p>[!WARNING]<br /> <strong>The following arguments have been changed</strong></p> <ul> <li><code>file</code> (this has been deprecated in favor of <code>files</code>)</li> <li><code>plugin</code> (this has been deprecated in favor of <code>plugins</code>)</li> </ul> </blockquote> <p>The following arguments have been added:</p> <ul> <li><code>binary</code></li> <li><code>gcov_args</code></li> <li><code>gcov_executable</code></li> <li><code>gcov_ignore</code></li> <li><code>gcov_include</code></li> <li><code>report_type</code></li> <li><code>skip_validation</code></li> <li><code>swift_project</code></li> </ul> <p>You can see their usage in the <code>action.yml</code> <a href="https://github.com/codecov/codecov-action/blob/main/action.yml">file</a>.</p> <h2>What's Changed</h2> <ul> <li>chore(deps): bump to eslint9+ and remove eslint-config-google by <a href="https://github.com/thomasrockhu-codecov"><code>@​thomasrockhu-codecov</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1591">codecov/codecov-action#1591</a></li> <li>build(deps-dev): bump <code>@​octokit/webhooks-types</code> from 7.5.1 to 7.6.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1595">codecov/codecov-action#1595</a></li> <li>build(deps-dev): bump typescript from 5.6.2 to 5.6.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1604">codecov/codecov-action#1604</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 8.8.0 to 8.8.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1601">codecov/codecov-action#1601</a></li> <li>build(deps): bump <code>@​actions/core</code> from 1.11.0 to 1.11.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1597">codecov/codecov-action#1597</a></li> <li>build(deps): bump github/codeql-action from 3.26.9 to 3.26.11 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1596">codecov/codecov-action#1596</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.8.0 to 8.8.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1600">codecov/codecov-action#1600</a></li> <li>build(deps-dev): bump eslint from 9.11.1 to 9.12.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1598">codecov/codecov-action#1598</a></li> <li>build(deps): bump github/codeql-action from 3.26.11 to 3.26.12 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1609">codecov/codecov-action#1609</a></li> <li>build(deps): bump actions/checkout from 4.2.0 to 4.2.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1608">codecov/codecov-action#1608</a></li> <li>build(deps): bump actions/upload-artifact from 4.4.0 to 4.4.3 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1607">codecov/codecov-action#1607</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 8.8.1 to 8.9.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1612">codecov/codecov-action#1612</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.8.1 to 8.9.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1611">codecov/codecov-action#1611</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.9.0 to 8.10.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1615">codecov/codecov-action#1615</a></li> <li>build(deps-dev): bump eslint from 9.12.0 to 9.13.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1618">codecov/codecov-action#1618</a></li> <li>build(deps): bump github/codeql-action from 3.26.12 to 3.26.13 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1617">codecov/codecov-action#1617</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 8.9.0 to 8.10.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1614">codecov/codecov-action#1614</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.10.0 to 8.11.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1620">codecov/codecov-action#1620</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 8.10.0 to 8.11.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1619">codecov/codecov-action#1619</a></li> <li>build(deps-dev): bump <code>@​types/jest</code> from 29.5.13 to 29.5.14 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1622">codecov/codecov-action#1622</a></li> <li>build(deps): bump actions/checkout from 4.2.1 to 4.2.2 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1625">codecov/codecov-action#1625</a></li> <li>build(deps): bump github/codeql-action from 3.26.13 to 3.27.0 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1624">codecov/codecov-action#1624</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.11.0 to 8.12.1 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1626">codecov/codecov-action#1626</a></li> <li>build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.12.1 to 8.12.2 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/codecov/codecov-action/pull/1629">codecov/codecov-action#1629</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md">codecov/codecov-action's changelog</a>.</em></p> <blockquote> <h2>4.0.0-beta.2</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/1085">#1085</a> not adding -n if empty to do-upload command</li> </ul> <h2>4.0.0-beta.1</h2> <p><code>v4</code> represents a move from the <a href="https://github.com/codecov/uploader">universal uploader</a> to the <a href="https://github.com/codecov/codecov-cli">Codecov CLI</a>. Although this will unlock new features for our users, the CLI is not yet at feature parity with the universal uploader.</p> <h3>Breaking Changes</h3> <ul> <li>No current support for <code>aarch64</code> and <code>alpine</code> architectures.</li> <li>Tokenless uploading is unsuported</li> <li>Various arguments to the Action have been removed</li> </ul> <h2>3.1.4</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/967">#967</a> Fix typo in README.md</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/971">#971</a> fix: add back in working dir</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/969">#969</a> fix: CLI option names for uploader</li> </ul> <h3>Dependencies</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/970">#970</a> build(deps-dev): bump <code>@​types/node</code> from 18.15.12 to 18.16.3</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/979">#979</a> build(deps-dev): bump <code>@​types/node</code> from 20.1.0 to 20.1.2</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/981">#981</a> build(deps-dev): bump <code>@​types/node</code> from 20.1.2 to 20.1.4</li> </ul> <h2>3.1.3</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/960">#960</a> fix: allow for aarch64 build</li> </ul> <h3>Dependencies</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/957">#957</a> build(deps-dev): bump jest-junit from 15.0.0 to 16.0.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/958">#958</a> build(deps): bump openpgp from 5.7.0 to 5.8.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/959">#959</a> build(deps-dev): bump <code>@​types/node</code> from 18.15.10 to 18.15.12</li> </ul> <h2>3.1.2</h2> <h3>Fixes</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/718">#718</a> Update README.md</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/851">#851</a> Remove unsupported path_to_write_report argument</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/898">#898</a> codeql-analysis.yml</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/901">#901</a> Update README to contain correct information - inputs and negate feature</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/955">#955</a> fix: add in all the extra arguments for uploader</li> </ul> <h3>Dependencies</h3> <ul> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/819">#819</a> build(deps): bump openpgp from 5.4.0 to 5.5.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/835">#835</a> build(deps): bump node-fetch from 3.2.4 to 3.2.10</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/840">#840</a> build(deps): bump ossf/scorecard-action from 1.1.1 to 2.0.4</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/841">#841</a> build(deps): bump <code>@​actions/core</code> from 1.9.1 to 1.10.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/843">#843</a> build(deps): bump <code>@​actions/github</code> from 5.0.3 to 5.1.1</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/869">#869</a> build(deps): bump node-fetch from 3.2.10 to 3.3.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/872">#872</a> build(deps-dev): bump jest-junit from 13.2.0 to 15.0.0</li> <li><a href="https://redirect.github.com/codecov/codecov-action/issues/879">#879</a> build(deps): bump decode-uri-component from 0.2.0 to 0.2.2</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/codecov/codecov-action/commit/968872560f81e7bdde9272853e65f2507c0eca7c"><code>9688725</code></a> Update README.md</li> <li><a href="https://github.com/codecov/codecov-action/commit/2112eaec1bedbdabc7e93d5312449d0d62b07c60"><code>2112eae</code></a> chore(deps): bump wrapper to 0.0.23 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1644">#1644</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/193421c5b3d1aca4209c9754f224ca0d85729414"><code>193421c</code></a> fixL use the correct source (<a href="https://redirect.github.com/codecov/codecov-action/issues/1642">#1642</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/6018df70b05b191502ce08196e76e30ea3578615"><code>6018df7</code></a> fix: update container builds (<a href="https://redirect.github.com/codecov/codecov-action/issues/1640">#1640</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/eff1a643d6887ee5935d4ca343e9076dc377d416"><code>eff1a64</code></a> fix: add missing vars (<a href="https://redirect.github.com/codecov/codecov-action/issues/1638">#1638</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/4582d54fd3d27d9130327cdb51361c32016fa400"><code>4582d54</code></a> Update README.md (<a href="https://redirect.github.com/codecov/codecov-action/issues/1639">#1639</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/bb7467c2bce05781760a0964d48e35e96ee59505"><code>bb7467c</code></a> feat: use wrapper (<a href="https://redirect.github.com/codecov/codecov-action/issues/1621">#1621</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/1d6059880cab9176d33e31e0f1ab076b20495f5e"><code>1d60598</code></a> build(deps-dev): bump <code>@​typescript-eslint/eslint-plugin</code> from 8.12.2 to 8.13.0 ...</li> <li><a href="https://github.com/codecov/codecov-action/commit/e587ce276eb45f1fcd960de3c01c83119213efca"><code>e587ce2</code></a> build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 8.12.2 to 8.13.0 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1635">#1635</a>)</li> <li><a href="https://github.com/codecov/codecov-action/commit/e43f28e103e52bb26d252b5a97fcdfa06175321e"><code>e43f28e</code></a> build(deps-dev): bump <code>@​typescript-eslint/parser</code> from 8.11.0 to 8.12.2 (<a href="https://redirect.github.com/codecov/codecov-action/issues/1628">#1628</a>)</li> <li>Additional commits viewable in <a href="https://github.com/codecov/codecov-action/compare/v4...v5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=codecov/codecov-action&package-manager=github_actions&previous-version=4&new-version=5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index ef59b56234e..24107f89e10 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -221,7 +221,7 @@ jobs: run: | python -m coverage xml - name: Upload coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: file: ./coverage.xml flags: >- From 85f3531e5909dfe4a10ba8aa66f614e42f84e0d3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 12:11:14 +0000 Subject: [PATCH 0950/1511] [PR #9900/7180ea8e backport][3.11] Add benchmark for a route that supports multiple methods (#9902) --- tests/test_benchmarks_client.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 75fc7f7ed4e..61439183334 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -33,6 +33,34 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_simple_get_requests_multiple_methods_route( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple GET requests on a route with multiple methods.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + # GET intentionally registered last to ensure time complexity + # of the route lookup is benchmarked + for method in ("DELETE", "HEAD", "OPTIONS", "PATCH", "POST", "PUT", "GET"): + app.router.add_route(method, "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_get_requests_with_1024_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From 36d2bef9953363d36962e5a86f1d443ef0394802 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 15 Nov 2024 09:07:11 -0600 Subject: [PATCH 0951/1511] [PR #9899/2249f2d backport][3.11] Refactor Resource.resolve to avoid linear search of methods (#9908) --- CHANGES/9899.misc.rst | 1 + aiohttp/web_urldispatcher.py | 45 ++++++++++++++++-------------------- 2 files changed, 21 insertions(+), 25 deletions(-) create mode 100644 CHANGES/9899.misc.rst diff --git a/CHANGES/9899.misc.rst b/CHANGES/9899.misc.rst new file mode 100644 index 00000000000..53243495d36 --- /dev/null +++ b/CHANGES/9899.misc.rst @@ -0,0 +1 @@ +Improved performance of resolving resources when multiple methods are registered for the same route -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 5153e5b73b0..03d7d7b15b0 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -350,7 +350,9 @@ async def _default_expect_handler(request: Request) -> None: class Resource(AbstractResource): def __init__(self, *, name: Optional[str] = None) -> None: super().__init__(name=name) - self._routes: List[ResourceRoute] = [] + self._routes: Dict[str, ResourceRoute] = {} + self._any_route: Optional[ResourceRoute] = None + self._allowed_methods: Set[str] = set() def add_route( self, @@ -359,14 +361,12 @@ def add_route( *, expect_handler: Optional[_ExpectHandler] = None, ) -> "ResourceRoute": - - for route_obj in self._routes: - if route_obj.method == method or route_obj.method == hdrs.METH_ANY: - raise RuntimeError( - "Added route will never be executed, " - "method {route.method} is already " - "registered".format(route=route_obj) - ) + if route := self._routes.get(method, self._any_route): + raise RuntimeError( + "Added route will never be executed, " + f"method {route.method} is already " + "registered" + ) route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) self.register_route(route_obj) @@ -376,23 +376,18 @@ def register_route(self, route: "ResourceRoute") -> None: assert isinstance( route, ResourceRoute ), f"Instance of Route class is required, got {route!r}" - self._routes.append(route) + if route.method == hdrs.METH_ANY: + self._any_route = route + else: + self._allowed_methods.add(route.method) + self._routes[route.method] = route async def resolve(self, request: Request) -> _Resolve: - allowed_methods: Set[str] = set() - - match_dict = self._match(request.rel_url.path_safe) - if match_dict is None: - return None, allowed_methods - - for route_obj in self._routes: - route_method = route_obj.method - allowed_methods.add(route_method) - - if route_method == request.method or route_method == hdrs.METH_ANY: - return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) - else: - return None, allowed_methods + if (match_dict := self._match(request.rel_url.path_safe)) is None: + return None, set() + if route := self._routes.get(request.method, self._any_route): + return UrlMappingMatchInfo(match_dict, route), self._allowed_methods + return None, self._allowed_methods @abc.abstractmethod def _match(self, path: str) -> Optional[Dict[str, str]]: @@ -402,7 +397,7 @@ def __len__(self) -> int: return len(self._routes) def __iter__(self) -> Iterator["ResourceRoute"]: - return iter(self._routes) + return iter(self._routes.values()) # TODO: implement all abstract methods From 4838f4077feb4b7385aefd97e6c5a9ca2df2ce7c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 15 Nov 2024 09:10:16 -0600 Subject: [PATCH 0952/1511] [PR #9899/2249f2d backport][3.12] Refactor Resource.resolve to avoid linear search of methods (#9909) --- CHANGES/9899.misc.rst | 1 + aiohttp/web_urldispatcher.py | 45 ++++++++++++++++-------------------- 2 files changed, 21 insertions(+), 25 deletions(-) create mode 100644 CHANGES/9899.misc.rst diff --git a/CHANGES/9899.misc.rst b/CHANGES/9899.misc.rst new file mode 100644 index 00000000000..53243495d36 --- /dev/null +++ b/CHANGES/9899.misc.rst @@ -0,0 +1 @@ +Improved performance of resolving resources when multiple methods are registered for the same route -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 5153e5b73b0..03d7d7b15b0 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -350,7 +350,9 @@ async def _default_expect_handler(request: Request) -> None: class Resource(AbstractResource): def __init__(self, *, name: Optional[str] = None) -> None: super().__init__(name=name) - self._routes: List[ResourceRoute] = [] + self._routes: Dict[str, ResourceRoute] = {} + self._any_route: Optional[ResourceRoute] = None + self._allowed_methods: Set[str] = set() def add_route( self, @@ -359,14 +361,12 @@ def add_route( *, expect_handler: Optional[_ExpectHandler] = None, ) -> "ResourceRoute": - - for route_obj in self._routes: - if route_obj.method == method or route_obj.method == hdrs.METH_ANY: - raise RuntimeError( - "Added route will never be executed, " - "method {route.method} is already " - "registered".format(route=route_obj) - ) + if route := self._routes.get(method, self._any_route): + raise RuntimeError( + "Added route will never be executed, " + f"method {route.method} is already " + "registered" + ) route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) self.register_route(route_obj) @@ -376,23 +376,18 @@ def register_route(self, route: "ResourceRoute") -> None: assert isinstance( route, ResourceRoute ), f"Instance of Route class is required, got {route!r}" - self._routes.append(route) + if route.method == hdrs.METH_ANY: + self._any_route = route + else: + self._allowed_methods.add(route.method) + self._routes[route.method] = route async def resolve(self, request: Request) -> _Resolve: - allowed_methods: Set[str] = set() - - match_dict = self._match(request.rel_url.path_safe) - if match_dict is None: - return None, allowed_methods - - for route_obj in self._routes: - route_method = route_obj.method - allowed_methods.add(route_method) - - if route_method == request.method or route_method == hdrs.METH_ANY: - return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) - else: - return None, allowed_methods + if (match_dict := self._match(request.rel_url.path_safe)) is None: + return None, set() + if route := self._routes.get(request.method, self._any_route): + return UrlMappingMatchInfo(match_dict, route), self._allowed_methods + return None, self._allowed_methods @abc.abstractmethod def _match(self, path: str) -> Optional[Dict[str, str]]: @@ -402,7 +397,7 @@ def __len__(self) -> int: return len(self._routes) def __iter__(self) -> Iterator["ResourceRoute"]: - return iter(self._routes) + return iter(self._routes.values()) # TODO: implement all abstract methods From a1720c5efcdff7d304d8fb6a83dd47de23d059ef Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:13:27 +0000 Subject: [PATCH 0953/1511] [PR #9911/4441d3c8 backport][3.11] Only construct the `allowed_methods` `set` once for a `StaticResource` (#9912) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_urldispatcher.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 03d7d7b15b0..e4d43514ebe 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -580,6 +580,7 @@ def __init__( "HEAD", self._handle, self, expect_handler=expect_handler ), } + self._allowed_methods = set(self._routes) def url_for( # type: ignore[override] self, @@ -646,10 +647,10 @@ def set_options_route(self, handler: Handler) -> None: async def resolve(self, request: Request) -> _Resolve: path = request.rel_url.path_safe method = request.method - allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: return None, set() + allowed_methods = self._allowed_methods if method not in allowed_methods: return None, allowed_methods From dcc79e8d728ceee61ffb25624c81fdc3babff0d3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:18:05 +0000 Subject: [PATCH 0954/1511] [PR #9911/4441d3c8 backport][3.12] Only construct the `allowed_methods` `set` once for a `StaticResource` (#9913) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_urldispatcher.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 03d7d7b15b0..e4d43514ebe 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -580,6 +580,7 @@ def __init__( "HEAD", self._handle, self, expect_handler=expect_handler ), } + self._allowed_methods = set(self._routes) def url_for( # type: ignore[override] self, @@ -646,10 +647,10 @@ def set_options_route(self, handler: Handler) -> None: async def resolve(self, request: Request) -> _Resolve: path = request.rel_url.path_safe method = request.method - allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: return None, set() + allowed_methods = self._allowed_methods if method not in allowed_methods: return None, allowed_methods From 30a6fbcf31353c9ac633d021bee1e30552f65518 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 11:05:46 -0600 Subject: [PATCH 0955/1511] [PR #9910/4a9bbf92 backport][3.11] Add benchmarks for the URL dispatcher (#9916) --- tests/test_benchmarks_web_urldispatcher.py | 187 +++++++++++++++++++++ 1 file changed, 187 insertions(+) create mode 100644 tests/test_benchmarks_web_urldispatcher.py diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py new file mode 100644 index 00000000000..2ffb53ee0f7 --- /dev/null +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -0,0 +1,187 @@ +"""codspeed benchmarks for the URL dispatcher.""" + +import asyncio +import pathlib +from typing import NoReturn +from unittest import mock + +from multidict import CIMultiDict, CIMultiDictProxy +from pytest_codspeed import BenchmarkFixture +from yarl import URL + +import aiohttp +from aiohttp import web +from aiohttp.http import HttpVersion, RawRequestMessage + + +def _mock_request(method: str, path: str) -> web.Request: + message = RawRequestMessage( + method, + path, + HttpVersion(1, 1), + CIMultiDictProxy(CIMultiDict()), + (), + False, + None, + False, + False, + URL(path), + ) + + return web.Request( + message, mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock() + ) + + +def test_resolve_root_route( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve top level PlainResources route 100 times.""" + resolve_count = 100 + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> None: + for _ in range(resolve_count): + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_static_root_route( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve top level StaticResource route 100 times.""" + resolve_count = 100 + + app = web.Application() + app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> None: + for _ in range(resolve_count): + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_single_fixed_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve PlainResources route 100 times.""" + resolve_count = 100 + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + request = _mock_request(method="GET", path="/api/server/dispatch/1/update") + + async def run_url_dispatcher_benchmark() -> None: + for _ in range(resolve_count): + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_multiple_fixed_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve 250 different PlainResources routes.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + + requests = [ + _mock_request(method="GET", path=f"/api/server/dispatch/{count}/update") + for count in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_multiple_level_fixed_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve 1024 different PlainResources routes.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + urls = [ + f"/api/{a}/{b}/{c}/{d}/{e}/update" + for a in ("a", "b", "c", "d") + for b in ("e", "f", "g", "h") + for c in ("i", "j", "k", "l") + for d in ("m", "n", "o", "p") + for e in ("n", "o", "p", "q") + ] + for url in urls: + app.router.add_route("GET", url, handler) + + requests = [_mock_request(method="GET", path=url) for url in urls] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_dynamic_resource_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve different a DynamicResource when there are 250 PlainResources registered.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/server/other/{count}/update", handler) + app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) + + requests = [ + _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) From 48a79d8a22ef0bb70ee1fcf9d47a723f6da4296a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:14:42 +0000 Subject: [PATCH 0956/1511] [PR #9910/4a9bbf92 backport][3.12] Add benchmarks for the URL dispatcher (#9917) --- tests/test_benchmarks_web_urldispatcher.py | 187 +++++++++++++++++++++ 1 file changed, 187 insertions(+) create mode 100644 tests/test_benchmarks_web_urldispatcher.py diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py new file mode 100644 index 00000000000..2ffb53ee0f7 --- /dev/null +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -0,0 +1,187 @@ +"""codspeed benchmarks for the URL dispatcher.""" + +import asyncio +import pathlib +from typing import NoReturn +from unittest import mock + +from multidict import CIMultiDict, CIMultiDictProxy +from pytest_codspeed import BenchmarkFixture +from yarl import URL + +import aiohttp +from aiohttp import web +from aiohttp.http import HttpVersion, RawRequestMessage + + +def _mock_request(method: str, path: str) -> web.Request: + message = RawRequestMessage( + method, + path, + HttpVersion(1, 1), + CIMultiDictProxy(CIMultiDict()), + (), + False, + None, + False, + False, + URL(path), + ) + + return web.Request( + message, mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock() + ) + + +def test_resolve_root_route( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve top level PlainResources route 100 times.""" + resolve_count = 100 + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> None: + for _ in range(resolve_count): + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_static_root_route( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve top level StaticResource route 100 times.""" + resolve_count = 100 + + app = web.Application() + app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> None: + for _ in range(resolve_count): + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_single_fixed_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve PlainResources route 100 times.""" + resolve_count = 100 + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + request = _mock_request(method="GET", path="/api/server/dispatch/1/update") + + async def run_url_dispatcher_benchmark() -> None: + for _ in range(resolve_count): + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_multiple_fixed_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve 250 different PlainResources routes.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + + requests = [ + _mock_request(method="GET", path=f"/api/server/dispatch/{count}/update") + for count in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_multiple_level_fixed_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve 1024 different PlainResources routes.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + urls = [ + f"/api/{a}/{b}/{c}/{d}/{e}/update" + for a in ("a", "b", "c", "d") + for b in ("e", "f", "g", "h") + for c in ("i", "j", "k", "l") + for d in ("m", "n", "o", "p") + for e in ("n", "o", "p", "q") + ] + for url in urls: + app.router.add_route("GET", url, handler) + + requests = [_mock_request(method="GET", path=url) for url in urls] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_dynamic_resource_url_with_many_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve different a DynamicResource when there are 250 PlainResources registered.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/server/other/{count}/update", handler) + app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) + + requests = [ + _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await app._router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) From 1510dae08f7a4e492d26d15352c8e7093851054c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 13:44:39 +0100 Subject: [PATCH 0957/1511] [PR #9921/e9637a92 backport][3.11] Freeze app for url dispatcher benchmarks (#9923) **This is a backport of PR #9921 as merged into master (e9637a92b19e46116ab2faf1af968dbd978e2033).** It better reflects the real usage scenario, runners do app freezing before the start. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 2ffb53ee0f7..cca3be0c826 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -45,6 +45,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() app.router.add_route("GET", "/", handler) + app.freeze() request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: @@ -65,6 +66,7 @@ def test_resolve_static_root_route( app = web.Application() app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) + app.freeze() request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: @@ -89,6 +91,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + app.freeze() request = _mock_request(method="GET", path="/api/server/dispatch/1/update") async def run_url_dispatcher_benchmark() -> None: @@ -112,6 +115,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + app.freeze() requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{count}/update") @@ -147,6 +151,7 @@ async def handler(request: web.Request) -> NoReturn: ] for url in urls: app.router.add_route("GET", url, handler) + app.freeze() requests = [_mock_request(method="GET", path=url) for url in urls] @@ -172,6 +177,7 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250): app.router.add_route("GET", f"/api/server/other/{count}/update", handler) app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) + app.freeze() requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") From 8916bc000c6577e4c5f6842cdcce9b9594ef290e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 13:44:56 +0100 Subject: [PATCH 0958/1511] [PR #9921/e9637a92 backport][3.12] Freeze app for url dispatcher benchmarks (#9924) **This is a backport of PR #9921 as merged into master (e9637a92b19e46116ab2faf1af968dbd978e2033).** It better reflects the real usage scenario, runners do app freezing before the start. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 2ffb53ee0f7..cca3be0c826 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -45,6 +45,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() app.router.add_route("GET", "/", handler) + app.freeze() request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: @@ -65,6 +66,7 @@ def test_resolve_static_root_route( app = web.Application() app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) + app.freeze() request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: @@ -89,6 +91,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + app.freeze() request = _mock_request(method="GET", path="/api/server/dispatch/1/update") async def run_url_dispatcher_benchmark() -> None: @@ -112,6 +115,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + app.freeze() requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{count}/update") @@ -147,6 +151,7 @@ async def handler(request: web.Request) -> NoReturn: ] for url in urls: app.router.add_route("GET", url, handler) + app.freeze() requests = [_mock_request(method="GET", path=url) for url in urls] @@ -172,6 +177,7 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250): app.router.add_route("GET", f"/api/server/other/{count}/update", handler) app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) + app.freeze() requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") From 06e5c8fce20ea9e4407512d659e6abf00ba9b630 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 13:12:31 +0000 Subject: [PATCH 0959/1511] [PR #9918/6ada2737 backport][3.11] Remove __author__ from __dir__ (#9919) **This is a backport of PR #9918 as merged into master (6ada2737485af65c7df8089b1c66c56e38da2bcd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9918.bugfix.rst | 1 + aiohttp/__init__.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9918.bugfix.rst diff --git a/CHANGES/9918.bugfix.rst b/CHANGES/9918.bugfix.rst new file mode 100644 index 00000000000..bdf69abe848 --- /dev/null +++ b/CHANGES/9918.bugfix.rst @@ -0,0 +1 @@ +Removed non-existing ``__author__`` from ``dir(aiohttp)`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index d88f7facb10..4bac155c9d6 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -242,7 +242,7 @@ def __dir__() -> Tuple[str, ...]: - return __all__ + ("__author__", "__doc__") + return __all__ + ("__doc__",) def __getattr__(name: str) -> object: From b766eab6d8be22b205aa15552c5d789d2c6c23b5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 13:12:42 +0000 Subject: [PATCH 0960/1511] [PR #9918/6ada2737 backport][3.12] Remove __author__ from __dir__ (#9920) **This is a backport of PR #9918 as merged into master (6ada2737485af65c7df8089b1c66c56e38da2bcd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/9918.bugfix.rst | 1 + aiohttp/__init__.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9918.bugfix.rst diff --git a/CHANGES/9918.bugfix.rst b/CHANGES/9918.bugfix.rst new file mode 100644 index 00000000000..bdf69abe848 --- /dev/null +++ b/CHANGES/9918.bugfix.rst @@ -0,0 +1 @@ +Removed non-existing ``__author__`` from ``dir(aiohttp)`` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f05c8f04301..1338dae487c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -242,7 +242,7 @@ def __dir__() -> Tuple[str, ...]: - return __all__ + ("__author__", "__doc__") + return __all__ + ("__doc__",) def __getattr__(name: str) -> object: From 163405ff1cc1dc0f4a865769518e13b23f1e90dd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 16:08:27 +0100 Subject: [PATCH 0961/1511] [PR #9925/69756b49 backport][3.11] Add benchmarks for github url resolving emulation (#9927) **This is a backport of PR #9925 as merged into master (69756b495d8fe0e1bf47056582d63cb6b0cb2913).** I think github API is a representative example for possible route table. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/github-urls.json | 653 +++++++++++++++++++++ tests/test_benchmarks_web_urldispatcher.py | 136 ++++- 2 files changed, 783 insertions(+), 6 deletions(-) create mode 100644 tests/github-urls.json diff --git a/tests/github-urls.json b/tests/github-urls.json new file mode 100644 index 00000000000..02e37554615 --- /dev/null +++ b/tests/github-urls.json @@ -0,0 +1,653 @@ +[ + "/", + "/advisories", + "/advisories/{ghsa_id}", + "/app", + "/app-manifests/{code}/conversions", + "/app/hook/config", + "/app/hook/deliveries", + "/app/hook/deliveries/{delivery_id}", + "/app/hook/deliveries/{delivery_id}/attempts", + "/app/installation-requests", + "/app/installations", + "/app/installations/{installation_id}", + "/app/installations/{installation_id}/access_tokens", + "/app/installations/{installation_id}/suspended", + "/applications/{client_id}/grant", + "/applications/{client_id}/token", + "/applications/{client_id}/token/scoped", + "/apps/{app_slug}", + "/assignments/{assignment_id}", + "/assignments/{assignment_id}/accepted_assignments", + "/assignments/{assignment_id}/grades", + "/classrooms", + "/classrooms/{classroom_id}", + "/classrooms/{classroom_id}/assignments", + "/codes_of_conduct", + "/codes_of_conduct/{key}", + "/emojis", + "/enterprises/{enterprise}/copilot/billing/seats", + "/enterprises/{enterprise}/copilot/metrics", + "/enterprises/{enterprise}/copilot/usage", + "/enterprises/{enterprise}/dependabot/alerts", + "/enterprises/{enterprise}/secret-scanning/alerts", + "/enterprises/{enterprise}/team/{team_slug}/copilot/metrics", + "/enterprises/{enterprise}/team/{team_slug}/copilot/usage", + "/events", + "/feeds", + "/gists", + "/gists/public", + "/gists/starred", + "/gists/{gist_id}", + "/gists/{gist_id}/comments", + "/gists/{gist_id}/comments/{comment_id}", + "/gists/{gist_id}/commits", + "/gists/{gist_id}/forks", + "/gists/{gist_id}/star", + "/gists/{gist_id}/{sha}", + "/gitignore/templates", + "/gitignore/templates/{name}", + "/installation/repositories", + "/installation/token", + "/issues", + "/licenses", + "/licenses/{license}", + "/markdown", + "/markdown/raw", + "/marketplace_listing/accounts/{account_id}", + "/marketplace_listing/plans", + "/marketplace_listing/plans/{plan_id}/accounts", + "/marketplace_listing/stubbed/accounts/{account_id}", + "/marketplace_listing/stubbed/plans", + "/marketplace_listing/stubbed/plans/{plan_id}/accounts", + "/meta", + "/networks/{owner}/{repo}/events", + "/notifications", + "/notifications/threads/{thread_id}", + "/notifications/threads/{thread_id}/subscription", + "/octocat", + "/organizations", + "/orgs/{org}", + "/orgs/{org}/actions/cache/usage", + "/orgs/{org}/actions/cache/usage-by-repository", + "/orgs/{org}/actions/oidc/customization/sub", + "/orgs/{org}/actions/permissions", + "/orgs/{org}/actions/permissions/repositories", + "/orgs/{org}/actions/permissions/repositories/{repository_id}", + "/orgs/{org}/actions/permissions/selected-actions", + "/orgs/{org}/actions/permissions/workflow", + "/orgs/{org}/actions/runner-groups", + "/orgs/{org}/actions/runner-groups/{runner_group_id}", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", + "/orgs/{org}/actions/runners", + "/orgs/{org}/actions/runners/downloads", + "/orgs/{org}/actions/runners/generate-jitconfig", + "/orgs/{org}/actions/runners/registration-token", + "/orgs/{org}/actions/runners/remove-token", + "/orgs/{org}/actions/runners/{runner_id}", + "/orgs/{org}/actions/runners/{runner_id}/labels", + "/orgs/{org}/actions/runners/{runner_id}/labels/{name}", + "/orgs/{org}/actions/secrets", + "/orgs/{org}/actions/secrets/public-key", + "/orgs/{org}/actions/secrets/{secret_name}", + "/orgs/{org}/actions/secrets/{secret_name}/repositories", + "/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", + "/orgs/{org}/actions/variables", + "/orgs/{org}/actions/variables/{name}", + "/orgs/{org}/actions/variables/{name}/repositories", + "/orgs/{org}/actions/variables/{name}/repositories/{repository_id}", + "/orgs/{org}/attestations/{subject_digest}", + "/orgs/{org}/blocks", + "/orgs/{org}/blocks/{username}", + "/orgs/{org}/code-scanning/alerts", + "/orgs/{org}/code-security/configurations", + "/orgs/{org}/code-security/configurations/defaults", + "/orgs/{org}/code-security/configurations/detach", + "/orgs/{org}/code-security/configurations/{configuration_id}", + "/orgs/{org}/code-security/configurations/{configuration_id}/attach", + "/orgs/{org}/code-security/configurations/{configuration_id}/defaults", + "/orgs/{org}/code-security/configurations/{configuration_id}/repositories", + "/orgs/{org}/codespaces", + "/orgs/{org}/codespaces/access", + "/orgs/{org}/codespaces/access/selected_users", + "/orgs/{org}/codespaces/secrets", + "/orgs/{org}/codespaces/secrets/public-key", + "/orgs/{org}/codespaces/secrets/{secret_name}", + "/orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", + "/orgs/{org}/copilot/billing", + "/orgs/{org}/copilot/billing/seats", + "/orgs/{org}/copilot/billing/selected_teams", + "/orgs/{org}/copilot/billing/selected_users", + "/orgs/{org}/copilot/metrics", + "/orgs/{org}/copilot/usage", + "/orgs/{org}/dependabot/alerts", + "/orgs/{org}/dependabot/secrets", + "/orgs/{org}/dependabot/secrets/public-key", + "/orgs/{org}/dependabot/secrets/{secret_name}", + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", + "/orgs/{org}/docker/conflicts", + "/orgs/{org}/events", + "/orgs/{org}/failed_invitations", + "/orgs/{org}/hooks", + "/orgs/{org}/hooks/{hook_id}", + "/orgs/{org}/hooks/{hook_id}/config", + "/orgs/{org}/hooks/{hook_id}/deliveries", + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + "/orgs/{org}/hooks/{hook_id}/pings", + "/orgs/{org}/insights/api/route-stats/{actor_type}/{actor_id}", + "/orgs/{org}/insights/api/subject-stats", + "/orgs/{org}/insights/api/summary-stats", + "/orgs/{org}/insights/api/summary-stats/users/{user_id}", + "/orgs/{org}/insights/api/summary-stats/{actor_type}/{actor_id}", + "/orgs/{org}/insights/api/time-stats", + "/orgs/{org}/insights/api/time-stats/users/{user_id}", + "/orgs/{org}/insights/api/time-stats/{actor_type}/{actor_id}", + "/orgs/{org}/insights/api/user-stats/{user_id}", + "/orgs/{org}/installation", + "/orgs/{org}/installations", + "/orgs/{org}/interaction-limits", + "/orgs/{org}/invitations", + "/orgs/{org}/invitations/{invitation_id}", + "/orgs/{org}/invitations/{invitation_id}/teams", + "/orgs/{org}/issues", + "/orgs/{org}/members", + "/orgs/{org}/members/{username}", + "/orgs/{org}/members/{username}/codespaces", + "/orgs/{org}/members/{username}/codespaces/{codespace_name}", + "/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", + "/orgs/{org}/members/{username}/copilot", + "/orgs/{org}/memberships/{username}", + "/orgs/{org}/migrations", + "/orgs/{org}/migrations/{migration_id}", + "/orgs/{org}/migrations/{migration_id}/archive", + "/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", + "/orgs/{org}/migrations/{migration_id}/repositories", + "/orgs/{org}/organization-roles", + "/orgs/{org}/organization-roles/teams/{team_slug}", + "/orgs/{org}/organization-roles/teams/{team_slug}/{role_id}", + "/orgs/{org}/organization-roles/users/{username}", + "/orgs/{org}/organization-roles/users/{username}/{role_id}", + "/orgs/{org}/organization-roles/{role_id}", + "/orgs/{org}/organization-roles/{role_id}/teams", + "/orgs/{org}/organization-roles/{role_id}/users", + "/orgs/{org}/outside_collaborators", + "/orgs/{org}/outside_collaborators/{username}", + "/orgs/{org}/packages", + "/orgs/{org}/packages/{package_type}/{package_name}", + "/orgs/{org}/packages/{package_type}/{package_name}/restore", + "/orgs/{org}/packages/{package_type}/{package_name}/versions", + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + "/orgs/{org}/personal-access-token-requests", + "/orgs/{org}/personal-access-token-requests/{pat_request_id}", + "/orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "/orgs/{org}/personal-access-tokens", + "/orgs/{org}/personal-access-tokens/{pat_id}", + "/orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "/orgs/{org}/projects", + "/orgs/{org}/properties/schema", + "/orgs/{org}/properties/schema/{custom_property_name}", + "/orgs/{org}/properties/values", + "/orgs/{org}/public_members", + "/orgs/{org}/public_members/{username}", + "/orgs/{org}/repos", + "/orgs/{org}/rulesets", + "/orgs/{org}/rulesets/rule-suites", + "/orgs/{org}/rulesets/rule-suites/{rule_suite_id}", + "/orgs/{org}/rulesets/{ruleset_id}", + "/orgs/{org}/secret-scanning/alerts", + "/orgs/{org}/security-advisories", + "/orgs/{org}/security-managers", + "/orgs/{org}/security-managers/teams/{team_slug}", + "/orgs/{org}/settings/billing/actions", + "/orgs/{org}/settings/billing/packages", + "/orgs/{org}/settings/billing/shared-storage", + "/orgs/{org}/team/{team_slug}/copilot/metrics", + "/orgs/{org}/team/{team_slug}/copilot/usage", + "/orgs/{org}/teams", + "/orgs/{org}/teams/{team_slug}", + "/orgs/{org}/teams/{team_slug}/discussions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", + "/orgs/{org}/teams/{team_slug}/invitations", + "/orgs/{org}/teams/{team_slug}/members", + "/orgs/{org}/teams/{team_slug}/memberships/{username}", + "/orgs/{org}/teams/{team_slug}/projects", + "/orgs/{org}/teams/{team_slug}/projects/{project_id}", + "/orgs/{org}/teams/{team_slug}/repos", + "/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + "/orgs/{org}/teams/{team_slug}/teams", + "/orgs/{org}/{security_product}/{enablement}", + "/projects/columns/cards/{card_id}", + "/projects/columns/cards/{card_id}/moves", + "/projects/columns/{column_id}", + "/projects/columns/{column_id}/cards", + "/projects/columns/{column_id}/moves", + "/projects/{project_id}", + "/projects/{project_id}/collaborators", + "/projects/{project_id}/collaborators/{username}", + "/projects/{project_id}/collaborators/{username}/permission", + "/projects/{project_id}/columns", + "/rate_limit", + "/repos/{owner}/{repo}", + "/repos/{owner}/{repo}/actions/artifacts", + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", + "/repos/{owner}/{repo}/actions/cache/usage", + "/repos/{owner}/{repo}/actions/caches", + "/repos/{owner}/{repo}/actions/caches/{cache_id}", + "/repos/{owner}/{repo}/actions/jobs/{job_id}", + "/repos/{owner}/{repo}/actions/jobs/{job_id}/logs", + "/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", + "/repos/{owner}/{repo}/actions/oidc/customization/sub", + "/repos/{owner}/{repo}/actions/organization-secrets", + "/repos/{owner}/{repo}/actions/organization-variables", + "/repos/{owner}/{repo}/actions/permissions", + "/repos/{owner}/{repo}/actions/permissions/access", + "/repos/{owner}/{repo}/actions/permissions/selected-actions", + "/repos/{owner}/{repo}/actions/permissions/workflow", + "/repos/{owner}/{repo}/actions/runners", + "/repos/{owner}/{repo}/actions/runners/downloads", + "/repos/{owner}/{repo}/actions/runners/generate-jitconfig", + "/repos/{owner}/{repo}/actions/runners/registration-token", + "/repos/{owner}/{repo}/actions/runners/remove-token", + "/repos/{owner}/{repo}/actions/runners/{runner_id}", + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", + "/repos/{owner}/{repo}/actions/runs", + "/repos/{owner}/{repo}/actions/runs/{run_id}", + "/repos/{owner}/{repo}/actions/runs/{run_id}/approvals", + "/repos/{owner}/{repo}/actions/runs/{run_id}/approve", + "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/cancel", + "/repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule", + "/repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel", + "/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/logs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun", + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/timing", + "/repos/{owner}/{repo}/actions/secrets", + "/repos/{owner}/{repo}/actions/secrets/public-key", + "/repos/{owner}/{repo}/actions/secrets/{secret_name}", + "/repos/{owner}/{repo}/actions/variables", + "/repos/{owner}/{repo}/actions/variables/{name}", + "/repos/{owner}/{repo}/actions/workflows", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", + "/repos/{owner}/{repo}/activity", + "/repos/{owner}/{repo}/assignees", + "/repos/{owner}/{repo}/assignees/{assignee}", + "/repos/{owner}/{repo}/attestations", + "/repos/{owner}/{repo}/attestations/{subject_digest}", + "/repos/{owner}/{repo}/autolinks", + "/repos/{owner}/{repo}/autolinks/{autolink_id}", + "/repos/{owner}/{repo}/automated-security-fixes", + "/repos/{owner}/{repo}/branches", + "/repos/{owner}/{repo}/branches/{branch}", + "/repos/{owner}/{repo}/branches/{branch}/protection", + "/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + "/repos/{owner}/{repo}/branches/{branch}/rename", + "/repos/{owner}/{repo}/check-runs", + "/repos/{owner}/{repo}/check-runs/{check_run_id}", + "/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", + "/repos/{owner}/{repo}/check-suites", + "/repos/{owner}/{repo}/check-suites/preferences", + "/repos/{owner}/{repo}/check-suites/{check_suite_id}", + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", + "/repos/{owner}/{repo}/code-scanning/alerts", + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "/repos/{owner}/{repo}/code-scanning/analyses", + "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", + "/repos/{owner}/{repo}/code-scanning/codeql/databases", + "/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}", + "/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses", + "/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}", + "/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}/repos/{repo_owner}/{repo_name}", + "/repos/{owner}/{repo}/code-scanning/default-setup", + "/repos/{owner}/{repo}/code-scanning/sarifs", + "/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}", + "/repos/{owner}/{repo}/code-security-configuration", + "/repos/{owner}/{repo}/codeowners/errors", + "/repos/{owner}/{repo}/codespaces", + "/repos/{owner}/{repo}/codespaces/devcontainers", + "/repos/{owner}/{repo}/codespaces/machines", + "/repos/{owner}/{repo}/codespaces/new", + "/repos/{owner}/{repo}/codespaces/permissions_check", + "/repos/{owner}/{repo}/codespaces/secrets", + "/repos/{owner}/{repo}/codespaces/secrets/public-key", + "/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + "/repos/{owner}/{repo}/collaborators", + "/repos/{owner}/{repo}/collaborators/{username}", + "/repos/{owner}/{repo}/collaborators/{username}/permission", + "/repos/{owner}/{repo}/comments", + "/repos/{owner}/{repo}/comments/{comment_id}", + "/repos/{owner}/{repo}/comments/{comment_id}/reactions", + "/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/commits", + "/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", + "/repos/{owner}/{repo}/commits/{commit_sha}/comments", + "/repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "/repos/{owner}/{repo}/commits/{ref}", + "/repos/{owner}/{repo}/commits/{ref}/check-runs", + "/repos/{owner}/{repo}/commits/{ref}/check-suites", + "/repos/{owner}/{repo}/commits/{ref}/status", + "/repos/{owner}/{repo}/commits/{ref}/statuses", + "/repos/{owner}/{repo}/community/profile", + "/repos/{owner}/{repo}/compare/{basehead}", + "/repos/{owner}/{repo}/contents/{path}", + "/repos/{owner}/{repo}/contributors", + "/repos/{owner}/{repo}/dependabot/alerts", + "/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", + "/repos/{owner}/{repo}/dependabot/secrets", + "/repos/{owner}/{repo}/dependabot/secrets/public-key", + "/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + "/repos/{owner}/{repo}/dependency-graph/compare/{basehead}", + "/repos/{owner}/{repo}/dependency-graph/sbom", + "/repos/{owner}/{repo}/dependency-graph/snapshots", + "/repos/{owner}/{repo}/deployments", + "/repos/{owner}/{repo}/deployments/{deployment_id}", + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", + "/repos/{owner}/{repo}/dispatches", + "/repos/{owner}/{repo}/environments", + "/repos/{owner}/{repo}/environments/{environment_name}", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}", + "/repos/{owner}/{repo}/environments/{environment_name}/secrets", + "/repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key", + "/repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}", + "/repos/{owner}/{repo}/environments/{environment_name}/variables", + "/repos/{owner}/{repo}/environments/{environment_name}/variables/{name}", + "/repos/{owner}/{repo}/events", + "/repos/{owner}/{repo}/forks", + "/repos/{owner}/{repo}/git/blobs", + "/repos/{owner}/{repo}/git/blobs/{file_sha}", + "/repos/{owner}/{repo}/git/commits", + "/repos/{owner}/{repo}/git/commits/{commit_sha}", + "/repos/{owner}/{repo}/git/matching-refs/{ref}", + "/repos/{owner}/{repo}/git/ref/{ref}", + "/repos/{owner}/{repo}/git/refs", + "/repos/{owner}/{repo}/git/refs/{ref}", + "/repos/{owner}/{repo}/git/tags", + "/repos/{owner}/{repo}/git/tags/{tag_sha}", + "/repos/{owner}/{repo}/git/trees", + "/repos/{owner}/{repo}/git/trees/{tree_sha}", + "/repos/{owner}/{repo}/hooks", + "/repos/{owner}/{repo}/hooks/{hook_id}", + "/repos/{owner}/{repo}/hooks/{hook_id}/config", + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + "/repos/{owner}/{repo}/hooks/{hook_id}/pings", + "/repos/{owner}/{repo}/hooks/{hook_id}/tests", + "/repos/{owner}/{repo}/import", + "/repos/{owner}/{repo}/import/authors", + "/repos/{owner}/{repo}/import/authors/{author_id}", + "/repos/{owner}/{repo}/import/large_files", + "/repos/{owner}/{repo}/import/lfs", + "/repos/{owner}/{repo}/installation", + "/repos/{owner}/{repo}/interaction-limits", + "/repos/{owner}/{repo}/invitations", + "/repos/{owner}/{repo}/invitations/{invitation_id}", + "/repos/{owner}/{repo}/issues", + "/repos/{owner}/{repo}/issues/comments", + "/repos/{owner}/{repo}/issues/comments/{comment_id}", + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/issues/events", + "/repos/{owner}/{repo}/issues/events/{event_id}", + "/repos/{owner}/{repo}/issues/{issue_number}", + "/repos/{owner}/{repo}/issues/{issue_number}/assignees", + "/repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}", + "/repos/{owner}/{repo}/issues/{issue_number}/comments", + "/repos/{owner}/{repo}/issues/{issue_number}/events", + "/repos/{owner}/{repo}/issues/{issue_number}/labels", + "/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", + "/repos/{owner}/{repo}/issues/{issue_number}/lock", + "/repos/{owner}/{repo}/issues/{issue_number}/reactions", + "/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/issues/{issue_number}/timeline", + "/repos/{owner}/{repo}/keys", + "/repos/{owner}/{repo}/keys/{key_id}", + "/repos/{owner}/{repo}/labels", + "/repos/{owner}/{repo}/labels/{name}", + "/repos/{owner}/{repo}/languages", + "/repos/{owner}/{repo}/license", + "/repos/{owner}/{repo}/merge-upstream", + "/repos/{owner}/{repo}/merges", + "/repos/{owner}/{repo}/milestones", + "/repos/{owner}/{repo}/milestones/{milestone_number}", + "/repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "/repos/{owner}/{repo}/notifications", + "/repos/{owner}/{repo}/pages", + "/repos/{owner}/{repo}/pages/builds", + "/repos/{owner}/{repo}/pages/builds/latest", + "/repos/{owner}/{repo}/pages/builds/{build_id}", + "/repos/{owner}/{repo}/pages/deployments", + "/repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}", + "/repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel", + "/repos/{owner}/{repo}/pages/health", + "/repos/{owner}/{repo}/private-vulnerability-reporting", + "/repos/{owner}/{repo}/projects", + "/repos/{owner}/{repo}/properties/values", + "/repos/{owner}/{repo}/pulls", + "/repos/{owner}/{repo}/pulls/comments", + "/repos/{owner}/{repo}/pulls/comments/{comment_id}", + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/pulls/{pull_number}", + "/repos/{owner}/{repo}/pulls/{pull_number}/codespaces", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + "/repos/{owner}/{repo}/pulls/{pull_number}/commits", + "/repos/{owner}/{repo}/pulls/{pull_number}/files", + "/repos/{owner}/{repo}/pulls/{pull_number}/merge", + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", + "/repos/{owner}/{repo}/pulls/{pull_number}/update-branch", + "/repos/{owner}/{repo}/readme", + "/repos/{owner}/{repo}/readme/{dir}", + "/repos/{owner}/{repo}/releases", + "/repos/{owner}/{repo}/releases/assets/{asset_id}", + "/repos/{owner}/{repo}/releases/generate-notes", + "/repos/{owner}/{repo}/releases/latest", + "/repos/{owner}/{repo}/releases/tags/{tag}", + "/repos/{owner}/{repo}/releases/{release_id}", + "/repos/{owner}/{repo}/releases/{release_id}/assets", + "/repos/{owner}/{repo}/releases/{release_id}/reactions", + "/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/rules/branches/{branch}", + "/repos/{owner}/{repo}/rulesets", + "/repos/{owner}/{repo}/rulesets/rule-suites", + "/repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}", + "/repos/{owner}/{repo}/rulesets/{ruleset_id}", + "/repos/{owner}/{repo}/secret-scanning/alerts", + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "/repos/{owner}/{repo}/secret-scanning/push-protection-bypasses", + "/repos/{owner}/{repo}/security-advisories", + "/repos/{owner}/{repo}/security-advisories/reports", + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}", + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve", + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks", + "/repos/{owner}/{repo}/stargazers", + "/repos/{owner}/{repo}/stats/code_frequency", + "/repos/{owner}/{repo}/stats/commit_activity", + "/repos/{owner}/{repo}/stats/contributors", + "/repos/{owner}/{repo}/stats/participation", + "/repos/{owner}/{repo}/stats/punch_card", + "/repos/{owner}/{repo}/statuses/{sha}", + "/repos/{owner}/{repo}/subscribers", + "/repos/{owner}/{repo}/subscription", + "/repos/{owner}/{repo}/tags", + "/repos/{owner}/{repo}/tags/protection", + "/repos/{owner}/{repo}/tags/protection/{tag_protection_id}", + "/repos/{owner}/{repo}/tarball/{ref}", + "/repos/{owner}/{repo}/teams", + "/repos/{owner}/{repo}/topics", + "/repos/{owner}/{repo}/traffic/clones", + "/repos/{owner}/{repo}/traffic/popular/paths", + "/repos/{owner}/{repo}/traffic/popular/referrers", + "/repos/{owner}/{repo}/traffic/views", + "/repos/{owner}/{repo}/transfer", + "/repos/{owner}/{repo}/vulnerability-alerts", + "/repos/{owner}/{repo}/zipball/{ref}", + "/repos/{template_owner}/{template_repo}/generate", + "/repositories", + "/search/code", + "/search/commits", + "/search/issues", + "/search/labels", + "/search/repositories", + "/search/topics", + "/search/users", + "/teams/{team_id}", + "/teams/{team_id}/discussions", + "/teams/{team_id}/discussions/{discussion_number}", + "/teams/{team_id}/discussions/{discussion_number}/comments", + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "/teams/{team_id}/discussions/{discussion_number}/reactions", + "/teams/{team_id}/invitations", + "/teams/{team_id}/members", + "/teams/{team_id}/members/{username}", + "/teams/{team_id}/memberships/{username}", + "/teams/{team_id}/projects", + "/teams/{team_id}/projects/{project_id}", + "/teams/{team_id}/repos", + "/teams/{team_id}/repos/{owner}/{repo}", + "/teams/{team_id}/teams", + "/user", + "/user/blocks", + "/user/blocks/{username}", + "/user/codespaces", + "/user/codespaces/secrets", + "/user/codespaces/secrets/public-key", + "/user/codespaces/secrets/{secret_name}", + "/user/codespaces/secrets/{secret_name}/repositories", + "/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", + "/user/codespaces/{codespace_name}", + "/user/codespaces/{codespace_name}/exports", + "/user/codespaces/{codespace_name}/exports/{export_id}", + "/user/codespaces/{codespace_name}/machines", + "/user/codespaces/{codespace_name}/publish", + "/user/codespaces/{codespace_name}/start", + "/user/codespaces/{codespace_name}/stop", + "/user/docker/conflicts", + "/user/email/visibility", + "/user/emails", + "/user/followers", + "/user/following", + "/user/following/{username}", + "/user/gpg_keys", + "/user/gpg_keys/{gpg_key_id}", + "/user/installations", + "/user/installations/{installation_id}/repositories", + "/user/installations/{installation_id}/repositories/{repository_id}", + "/user/interaction-limits", + "/user/issues", + "/user/keys", + "/user/keys/{key_id}", + "/user/marketplace_purchases", + "/user/marketplace_purchases/stubbed", + "/user/memberships/orgs", + "/user/memberships/orgs/{org}", + "/user/migrations", + "/user/migrations/{migration_id}", + "/user/migrations/{migration_id}/archive", + "/user/migrations/{migration_id}/repos/{repo_name}/lock", + "/user/migrations/{migration_id}/repositories", + "/user/orgs", + "/user/packages", + "/user/packages/{package_type}/{package_name}", + "/user/packages/{package_type}/{package_name}/restore", + "/user/packages/{package_type}/{package_name}/versions", + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}", + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + "/user/projects", + "/user/public_emails", + "/user/repos", + "/user/repository_invitations", + "/user/repository_invitations/{invitation_id}", + "/user/social_accounts", + "/user/ssh_signing_keys", + "/user/ssh_signing_keys/{ssh_signing_key_id}", + "/user/starred", + "/user/starred/{owner}/{repo}", + "/user/subscriptions", + "/user/teams", + "/user/{account_id}", + "/users", + "/users/{username}", + "/users/{username}/attestations/{subject_digest}", + "/users/{username}/docker/conflicts", + "/users/{username}/events", + "/users/{username}/events/orgs/{org}", + "/users/{username}/events/public", + "/users/{username}/followers", + "/users/{username}/following", + "/users/{username}/following/{target_user}", + "/users/{username}/gists", + "/users/{username}/gpg_keys", + "/users/{username}/hovercard", + "/users/{username}/installation", + "/users/{username}/keys", + "/users/{username}/orgs", + "/users/{username}/packages", + "/users/{username}/packages/{package_type}/{package_name}", + "/users/{username}/packages/{package_type}/{package_name}/restore", + "/users/{username}/packages/{package_type}/{package_name}/versions", + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + "/users/{username}/projects", + "/users/{username}/received_events", + "/users/{username}/received_events/public", + "/users/{username}/repos", + "/users/{username}/settings/billing/actions", + "/users/{username}/settings/billing/packages", + "/users/{username}/settings/billing/shared-storage", + "/users/{username}/social_accounts", + "/users/{username}/ssh_signing_keys", + "/users/{username}/starred", + "/users/{username}/subscriptions", + "/versions", + "/zen" +] diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index cca3be0c826..452f4afa7b3 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -1,7 +1,11 @@ """codspeed benchmarks for the URL dispatcher.""" import asyncio +import json import pathlib +import random +import string +from pathlib import Path from typing import NoReturn from unittest import mock @@ -46,11 +50,12 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() app.router.add_route("GET", "/", handler) app.freeze() + router = app.router request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: for _ in range(resolve_count): - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -67,11 +72,12 @@ def test_resolve_static_root_route( app = web.Application() app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) app.freeze() + router = app.router request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: for _ in range(resolve_count): - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -92,11 +98,12 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) app.freeze() + router = app.router request = _mock_request(method="GET", path="/api/server/dispatch/1/update") async def run_url_dispatcher_benchmark() -> None: for _ in range(resolve_count): - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -116,6 +123,7 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) app.freeze() + router = app.router requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{count}/update") @@ -124,7 +132,7 @@ async def handler(request: web.Request) -> NoReturn: async def run_url_dispatcher_benchmark() -> None: for request in requests: - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -152,12 +160,13 @@ async def handler(request: web.Request) -> NoReturn: for url in urls: app.router.add_route("GET", url, handler) app.freeze() + router = app.router requests = [_mock_request(method="GET", path=url) for url in urls] async def run_url_dispatcher_benchmark() -> None: for request in requests: - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -178,6 +187,7 @@ async def handler(request: web.Request) -> NoReturn: app.router.add_route("GET", f"/api/server/other/{count}/update", handler) app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) app.freeze() + router = app.router requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") @@ -186,7 +196,121 @@ async def handler(request: web.Request) -> NoReturn: async def run_url_dispatcher_benchmark() -> None: for request in requests: - await app._router.resolve(request) + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_gitapi( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve DynamicResource for simulated github API. + + The benchmark uses OpenAPI generated info for github. + To update the local data file please run the following command: + $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json + """ + + async def handler(request: web.Request) -> NoReturn: + assert False + + here = Path(__file__).parent + with (here / "github-urls.json").open() as f: + urls = json.load(f) + + app = web.Application() + for url in urls: + app.router.add_get(url, handler) + app.freeze() + router = app.router + + # PR reviews API was selected absolutely voluntary. + # It is not any special but sits somewhere in the middle of the urls list. + # If anybody has better idea please suggest. + + alnums = string.ascii_letters + string.digits + + requests = [] + for i in range(250): + owner = "".join(random.sample(alnums, 10)) + repo = "".join(random.sample(alnums, 10)) + pull_number = random.randint(0, 250) + requests.append( + _mock_request( + method="GET", path=f"/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ) + ) + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_gitapi_subapps( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve DynamicResource for simulated github API, grouped in subapps. + + The benchmark uses OpenAPI generated info for github. + To update the local data file please run the following command: + $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json + """ + + async def handler(request: web.Request) -> NoReturn: + assert False + + here = Path(__file__).parent + with (here / "github-urls.json").open() as f: + urls = json.load(f) + + subapps = { + "gists": web.Application(), + "orgs": web.Application(), + "projects": web.Application(), + "repos": web.Application(), + "teams": web.Application(), + "user": web.Application(), + "users": web.Application(), + } + + app = web.Application() + for url in urls: + parts = url.split("/") + subapp = subapps.get(parts[1], app) + subapp.router.add_get(url, handler) + for key, subapp in subapps.items(): + app.add_subapp("/" + key, subapp) + app.freeze() + router = app.router + + # PR reviews API was selected absolutely voluntary. + # It is not any special but sits somewhere in the middle of the urls list. + # If anybody has better idea please suggest. + + alnums = string.ascii_letters + string.digits + + requests = [] + for i in range(250): + owner = "".join(random.sample(alnums, 10)) + repo = "".join(random.sample(alnums, 10)) + pull_number = random.randint(0, 250) + requests.append( + _mock_request( + method="GET", path=f"/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ) + ) + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) @benchmark def _run() -> None: From 2e1cb660f8530ec69c132f6b0d8c050522eae007 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 16:09:19 +0100 Subject: [PATCH 0962/1511] [PR #9925/69756b49 backport][3.12] Add benchmarks for github url resolving emulation (#9928) **This is a backport of PR #9925 as merged into master (69756b495d8fe0e1bf47056582d63cb6b0cb2913).** I think github API is a representative example for possible route table. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/github-urls.json | 653 +++++++++++++++++++++ tests/test_benchmarks_web_urldispatcher.py | 136 ++++- 2 files changed, 783 insertions(+), 6 deletions(-) create mode 100644 tests/github-urls.json diff --git a/tests/github-urls.json b/tests/github-urls.json new file mode 100644 index 00000000000..02e37554615 --- /dev/null +++ b/tests/github-urls.json @@ -0,0 +1,653 @@ +[ + "/", + "/advisories", + "/advisories/{ghsa_id}", + "/app", + "/app-manifests/{code}/conversions", + "/app/hook/config", + "/app/hook/deliveries", + "/app/hook/deliveries/{delivery_id}", + "/app/hook/deliveries/{delivery_id}/attempts", + "/app/installation-requests", + "/app/installations", + "/app/installations/{installation_id}", + "/app/installations/{installation_id}/access_tokens", + "/app/installations/{installation_id}/suspended", + "/applications/{client_id}/grant", + "/applications/{client_id}/token", + "/applications/{client_id}/token/scoped", + "/apps/{app_slug}", + "/assignments/{assignment_id}", + "/assignments/{assignment_id}/accepted_assignments", + "/assignments/{assignment_id}/grades", + "/classrooms", + "/classrooms/{classroom_id}", + "/classrooms/{classroom_id}/assignments", + "/codes_of_conduct", + "/codes_of_conduct/{key}", + "/emojis", + "/enterprises/{enterprise}/copilot/billing/seats", + "/enterprises/{enterprise}/copilot/metrics", + "/enterprises/{enterprise}/copilot/usage", + "/enterprises/{enterprise}/dependabot/alerts", + "/enterprises/{enterprise}/secret-scanning/alerts", + "/enterprises/{enterprise}/team/{team_slug}/copilot/metrics", + "/enterprises/{enterprise}/team/{team_slug}/copilot/usage", + "/events", + "/feeds", + "/gists", + "/gists/public", + "/gists/starred", + "/gists/{gist_id}", + "/gists/{gist_id}/comments", + "/gists/{gist_id}/comments/{comment_id}", + "/gists/{gist_id}/commits", + "/gists/{gist_id}/forks", + "/gists/{gist_id}/star", + "/gists/{gist_id}/{sha}", + "/gitignore/templates", + "/gitignore/templates/{name}", + "/installation/repositories", + "/installation/token", + "/issues", + "/licenses", + "/licenses/{license}", + "/markdown", + "/markdown/raw", + "/marketplace_listing/accounts/{account_id}", + "/marketplace_listing/plans", + "/marketplace_listing/plans/{plan_id}/accounts", + "/marketplace_listing/stubbed/accounts/{account_id}", + "/marketplace_listing/stubbed/plans", + "/marketplace_listing/stubbed/plans/{plan_id}/accounts", + "/meta", + "/networks/{owner}/{repo}/events", + "/notifications", + "/notifications/threads/{thread_id}", + "/notifications/threads/{thread_id}/subscription", + "/octocat", + "/organizations", + "/orgs/{org}", + "/orgs/{org}/actions/cache/usage", + "/orgs/{org}/actions/cache/usage-by-repository", + "/orgs/{org}/actions/oidc/customization/sub", + "/orgs/{org}/actions/permissions", + "/orgs/{org}/actions/permissions/repositories", + "/orgs/{org}/actions/permissions/repositories/{repository_id}", + "/orgs/{org}/actions/permissions/selected-actions", + "/orgs/{org}/actions/permissions/workflow", + "/orgs/{org}/actions/runner-groups", + "/orgs/{org}/actions/runner-groups/{runner_group_id}", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners", + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", + "/orgs/{org}/actions/runners", + "/orgs/{org}/actions/runners/downloads", + "/orgs/{org}/actions/runners/generate-jitconfig", + "/orgs/{org}/actions/runners/registration-token", + "/orgs/{org}/actions/runners/remove-token", + "/orgs/{org}/actions/runners/{runner_id}", + "/orgs/{org}/actions/runners/{runner_id}/labels", + "/orgs/{org}/actions/runners/{runner_id}/labels/{name}", + "/orgs/{org}/actions/secrets", + "/orgs/{org}/actions/secrets/public-key", + "/orgs/{org}/actions/secrets/{secret_name}", + "/orgs/{org}/actions/secrets/{secret_name}/repositories", + "/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", + "/orgs/{org}/actions/variables", + "/orgs/{org}/actions/variables/{name}", + "/orgs/{org}/actions/variables/{name}/repositories", + "/orgs/{org}/actions/variables/{name}/repositories/{repository_id}", + "/orgs/{org}/attestations/{subject_digest}", + "/orgs/{org}/blocks", + "/orgs/{org}/blocks/{username}", + "/orgs/{org}/code-scanning/alerts", + "/orgs/{org}/code-security/configurations", + "/orgs/{org}/code-security/configurations/defaults", + "/orgs/{org}/code-security/configurations/detach", + "/orgs/{org}/code-security/configurations/{configuration_id}", + "/orgs/{org}/code-security/configurations/{configuration_id}/attach", + "/orgs/{org}/code-security/configurations/{configuration_id}/defaults", + "/orgs/{org}/code-security/configurations/{configuration_id}/repositories", + "/orgs/{org}/codespaces", + "/orgs/{org}/codespaces/access", + "/orgs/{org}/codespaces/access/selected_users", + "/orgs/{org}/codespaces/secrets", + "/orgs/{org}/codespaces/secrets/public-key", + "/orgs/{org}/codespaces/secrets/{secret_name}", + "/orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", + "/orgs/{org}/copilot/billing", + "/orgs/{org}/copilot/billing/seats", + "/orgs/{org}/copilot/billing/selected_teams", + "/orgs/{org}/copilot/billing/selected_users", + "/orgs/{org}/copilot/metrics", + "/orgs/{org}/copilot/usage", + "/orgs/{org}/dependabot/alerts", + "/orgs/{org}/dependabot/secrets", + "/orgs/{org}/dependabot/secrets/public-key", + "/orgs/{org}/dependabot/secrets/{secret_name}", + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", + "/orgs/{org}/docker/conflicts", + "/orgs/{org}/events", + "/orgs/{org}/failed_invitations", + "/orgs/{org}/hooks", + "/orgs/{org}/hooks/{hook_id}", + "/orgs/{org}/hooks/{hook_id}/config", + "/orgs/{org}/hooks/{hook_id}/deliveries", + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + "/orgs/{org}/hooks/{hook_id}/pings", + "/orgs/{org}/insights/api/route-stats/{actor_type}/{actor_id}", + "/orgs/{org}/insights/api/subject-stats", + "/orgs/{org}/insights/api/summary-stats", + "/orgs/{org}/insights/api/summary-stats/users/{user_id}", + "/orgs/{org}/insights/api/summary-stats/{actor_type}/{actor_id}", + "/orgs/{org}/insights/api/time-stats", + "/orgs/{org}/insights/api/time-stats/users/{user_id}", + "/orgs/{org}/insights/api/time-stats/{actor_type}/{actor_id}", + "/orgs/{org}/insights/api/user-stats/{user_id}", + "/orgs/{org}/installation", + "/orgs/{org}/installations", + "/orgs/{org}/interaction-limits", + "/orgs/{org}/invitations", + "/orgs/{org}/invitations/{invitation_id}", + "/orgs/{org}/invitations/{invitation_id}/teams", + "/orgs/{org}/issues", + "/orgs/{org}/members", + "/orgs/{org}/members/{username}", + "/orgs/{org}/members/{username}/codespaces", + "/orgs/{org}/members/{username}/codespaces/{codespace_name}", + "/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", + "/orgs/{org}/members/{username}/copilot", + "/orgs/{org}/memberships/{username}", + "/orgs/{org}/migrations", + "/orgs/{org}/migrations/{migration_id}", + "/orgs/{org}/migrations/{migration_id}/archive", + "/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", + "/orgs/{org}/migrations/{migration_id}/repositories", + "/orgs/{org}/organization-roles", + "/orgs/{org}/organization-roles/teams/{team_slug}", + "/orgs/{org}/organization-roles/teams/{team_slug}/{role_id}", + "/orgs/{org}/organization-roles/users/{username}", + "/orgs/{org}/organization-roles/users/{username}/{role_id}", + "/orgs/{org}/organization-roles/{role_id}", + "/orgs/{org}/organization-roles/{role_id}/teams", + "/orgs/{org}/organization-roles/{role_id}/users", + "/orgs/{org}/outside_collaborators", + "/orgs/{org}/outside_collaborators/{username}", + "/orgs/{org}/packages", + "/orgs/{org}/packages/{package_type}/{package_name}", + "/orgs/{org}/packages/{package_type}/{package_name}/restore", + "/orgs/{org}/packages/{package_type}/{package_name}/versions", + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + "/orgs/{org}/personal-access-token-requests", + "/orgs/{org}/personal-access-token-requests/{pat_request_id}", + "/orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "/orgs/{org}/personal-access-tokens", + "/orgs/{org}/personal-access-tokens/{pat_id}", + "/orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "/orgs/{org}/projects", + "/orgs/{org}/properties/schema", + "/orgs/{org}/properties/schema/{custom_property_name}", + "/orgs/{org}/properties/values", + "/orgs/{org}/public_members", + "/orgs/{org}/public_members/{username}", + "/orgs/{org}/repos", + "/orgs/{org}/rulesets", + "/orgs/{org}/rulesets/rule-suites", + "/orgs/{org}/rulesets/rule-suites/{rule_suite_id}", + "/orgs/{org}/rulesets/{ruleset_id}", + "/orgs/{org}/secret-scanning/alerts", + "/orgs/{org}/security-advisories", + "/orgs/{org}/security-managers", + "/orgs/{org}/security-managers/teams/{team_slug}", + "/orgs/{org}/settings/billing/actions", + "/orgs/{org}/settings/billing/packages", + "/orgs/{org}/settings/billing/shared-storage", + "/orgs/{org}/team/{team_slug}/copilot/metrics", + "/orgs/{org}/team/{team_slug}/copilot/usage", + "/orgs/{org}/teams", + "/orgs/{org}/teams/{team_slug}", + "/orgs/{org}/teams/{team_slug}/discussions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", + "/orgs/{org}/teams/{team_slug}/invitations", + "/orgs/{org}/teams/{team_slug}/members", + "/orgs/{org}/teams/{team_slug}/memberships/{username}", + "/orgs/{org}/teams/{team_slug}/projects", + "/orgs/{org}/teams/{team_slug}/projects/{project_id}", + "/orgs/{org}/teams/{team_slug}/repos", + "/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", + "/orgs/{org}/teams/{team_slug}/teams", + "/orgs/{org}/{security_product}/{enablement}", + "/projects/columns/cards/{card_id}", + "/projects/columns/cards/{card_id}/moves", + "/projects/columns/{column_id}", + "/projects/columns/{column_id}/cards", + "/projects/columns/{column_id}/moves", + "/projects/{project_id}", + "/projects/{project_id}/collaborators", + "/projects/{project_id}/collaborators/{username}", + "/projects/{project_id}/collaborators/{username}/permission", + "/projects/{project_id}/columns", + "/rate_limit", + "/repos/{owner}/{repo}", + "/repos/{owner}/{repo}/actions/artifacts", + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", + "/repos/{owner}/{repo}/actions/cache/usage", + "/repos/{owner}/{repo}/actions/caches", + "/repos/{owner}/{repo}/actions/caches/{cache_id}", + "/repos/{owner}/{repo}/actions/jobs/{job_id}", + "/repos/{owner}/{repo}/actions/jobs/{job_id}/logs", + "/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", + "/repos/{owner}/{repo}/actions/oidc/customization/sub", + "/repos/{owner}/{repo}/actions/organization-secrets", + "/repos/{owner}/{repo}/actions/organization-variables", + "/repos/{owner}/{repo}/actions/permissions", + "/repos/{owner}/{repo}/actions/permissions/access", + "/repos/{owner}/{repo}/actions/permissions/selected-actions", + "/repos/{owner}/{repo}/actions/permissions/workflow", + "/repos/{owner}/{repo}/actions/runners", + "/repos/{owner}/{repo}/actions/runners/downloads", + "/repos/{owner}/{repo}/actions/runners/generate-jitconfig", + "/repos/{owner}/{repo}/actions/runners/registration-token", + "/repos/{owner}/{repo}/actions/runners/remove-token", + "/repos/{owner}/{repo}/actions/runners/{runner_id}", + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", + "/repos/{owner}/{repo}/actions/runs", + "/repos/{owner}/{repo}/actions/runs/{run_id}", + "/repos/{owner}/{repo}/actions/runs/{run_id}/approvals", + "/repos/{owner}/{repo}/actions/runs/{run_id}/approve", + "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/cancel", + "/repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule", + "/repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel", + "/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/logs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun", + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", + "/repos/{owner}/{repo}/actions/runs/{run_id}/timing", + "/repos/{owner}/{repo}/actions/secrets", + "/repos/{owner}/{repo}/actions/secrets/public-key", + "/repos/{owner}/{repo}/actions/secrets/{secret_name}", + "/repos/{owner}/{repo}/actions/variables", + "/repos/{owner}/{repo}/actions/variables/{name}", + "/repos/{owner}/{repo}/actions/workflows", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", + "/repos/{owner}/{repo}/activity", + "/repos/{owner}/{repo}/assignees", + "/repos/{owner}/{repo}/assignees/{assignee}", + "/repos/{owner}/{repo}/attestations", + "/repos/{owner}/{repo}/attestations/{subject_digest}", + "/repos/{owner}/{repo}/autolinks", + "/repos/{owner}/{repo}/autolinks/{autolink_id}", + "/repos/{owner}/{repo}/automated-security-fixes", + "/repos/{owner}/{repo}/branches", + "/repos/{owner}/{repo}/branches/{branch}", + "/repos/{owner}/{repo}/branches/{branch}/protection", + "/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + "/repos/{owner}/{repo}/branches/{branch}/rename", + "/repos/{owner}/{repo}/check-runs", + "/repos/{owner}/{repo}/check-runs/{check_run_id}", + "/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", + "/repos/{owner}/{repo}/check-suites", + "/repos/{owner}/{repo}/check-suites/preferences", + "/repos/{owner}/{repo}/check-suites/{check_suite_id}", + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", + "/repos/{owner}/{repo}/code-scanning/alerts", + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "/repos/{owner}/{repo}/code-scanning/analyses", + "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", + "/repos/{owner}/{repo}/code-scanning/codeql/databases", + "/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}", + "/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses", + "/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}", + "/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}/repos/{repo_owner}/{repo_name}", + "/repos/{owner}/{repo}/code-scanning/default-setup", + "/repos/{owner}/{repo}/code-scanning/sarifs", + "/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}", + "/repos/{owner}/{repo}/code-security-configuration", + "/repos/{owner}/{repo}/codeowners/errors", + "/repos/{owner}/{repo}/codespaces", + "/repos/{owner}/{repo}/codespaces/devcontainers", + "/repos/{owner}/{repo}/codespaces/machines", + "/repos/{owner}/{repo}/codespaces/new", + "/repos/{owner}/{repo}/codespaces/permissions_check", + "/repos/{owner}/{repo}/codespaces/secrets", + "/repos/{owner}/{repo}/codespaces/secrets/public-key", + "/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", + "/repos/{owner}/{repo}/collaborators", + "/repos/{owner}/{repo}/collaborators/{username}", + "/repos/{owner}/{repo}/collaborators/{username}/permission", + "/repos/{owner}/{repo}/comments", + "/repos/{owner}/{repo}/comments/{comment_id}", + "/repos/{owner}/{repo}/comments/{comment_id}/reactions", + "/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/commits", + "/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", + "/repos/{owner}/{repo}/commits/{commit_sha}/comments", + "/repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "/repos/{owner}/{repo}/commits/{ref}", + "/repos/{owner}/{repo}/commits/{ref}/check-runs", + "/repos/{owner}/{repo}/commits/{ref}/check-suites", + "/repos/{owner}/{repo}/commits/{ref}/status", + "/repos/{owner}/{repo}/commits/{ref}/statuses", + "/repos/{owner}/{repo}/community/profile", + "/repos/{owner}/{repo}/compare/{basehead}", + "/repos/{owner}/{repo}/contents/{path}", + "/repos/{owner}/{repo}/contributors", + "/repos/{owner}/{repo}/dependabot/alerts", + "/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", + "/repos/{owner}/{repo}/dependabot/secrets", + "/repos/{owner}/{repo}/dependabot/secrets/public-key", + "/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", + "/repos/{owner}/{repo}/dependency-graph/compare/{basehead}", + "/repos/{owner}/{repo}/dependency-graph/sbom", + "/repos/{owner}/{repo}/dependency-graph/snapshots", + "/repos/{owner}/{repo}/deployments", + "/repos/{owner}/{repo}/deployments/{deployment_id}", + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", + "/repos/{owner}/{repo}/dispatches", + "/repos/{owner}/{repo}/environments", + "/repos/{owner}/{repo}/environments/{environment_name}", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}", + "/repos/{owner}/{repo}/environments/{environment_name}/secrets", + "/repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key", + "/repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}", + "/repos/{owner}/{repo}/environments/{environment_name}/variables", + "/repos/{owner}/{repo}/environments/{environment_name}/variables/{name}", + "/repos/{owner}/{repo}/events", + "/repos/{owner}/{repo}/forks", + "/repos/{owner}/{repo}/git/blobs", + "/repos/{owner}/{repo}/git/blobs/{file_sha}", + "/repos/{owner}/{repo}/git/commits", + "/repos/{owner}/{repo}/git/commits/{commit_sha}", + "/repos/{owner}/{repo}/git/matching-refs/{ref}", + "/repos/{owner}/{repo}/git/ref/{ref}", + "/repos/{owner}/{repo}/git/refs", + "/repos/{owner}/{repo}/git/refs/{ref}", + "/repos/{owner}/{repo}/git/tags", + "/repos/{owner}/{repo}/git/tags/{tag_sha}", + "/repos/{owner}/{repo}/git/trees", + "/repos/{owner}/{repo}/git/trees/{tree_sha}", + "/repos/{owner}/{repo}/hooks", + "/repos/{owner}/{repo}/hooks/{hook_id}", + "/repos/{owner}/{repo}/hooks/{hook_id}/config", + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", + "/repos/{owner}/{repo}/hooks/{hook_id}/pings", + "/repos/{owner}/{repo}/hooks/{hook_id}/tests", + "/repos/{owner}/{repo}/import", + "/repos/{owner}/{repo}/import/authors", + "/repos/{owner}/{repo}/import/authors/{author_id}", + "/repos/{owner}/{repo}/import/large_files", + "/repos/{owner}/{repo}/import/lfs", + "/repos/{owner}/{repo}/installation", + "/repos/{owner}/{repo}/interaction-limits", + "/repos/{owner}/{repo}/invitations", + "/repos/{owner}/{repo}/invitations/{invitation_id}", + "/repos/{owner}/{repo}/issues", + "/repos/{owner}/{repo}/issues/comments", + "/repos/{owner}/{repo}/issues/comments/{comment_id}", + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/issues/events", + "/repos/{owner}/{repo}/issues/events/{event_id}", + "/repos/{owner}/{repo}/issues/{issue_number}", + "/repos/{owner}/{repo}/issues/{issue_number}/assignees", + "/repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}", + "/repos/{owner}/{repo}/issues/{issue_number}/comments", + "/repos/{owner}/{repo}/issues/{issue_number}/events", + "/repos/{owner}/{repo}/issues/{issue_number}/labels", + "/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", + "/repos/{owner}/{repo}/issues/{issue_number}/lock", + "/repos/{owner}/{repo}/issues/{issue_number}/reactions", + "/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/issues/{issue_number}/timeline", + "/repos/{owner}/{repo}/keys", + "/repos/{owner}/{repo}/keys/{key_id}", + "/repos/{owner}/{repo}/labels", + "/repos/{owner}/{repo}/labels/{name}", + "/repos/{owner}/{repo}/languages", + "/repos/{owner}/{repo}/license", + "/repos/{owner}/{repo}/merge-upstream", + "/repos/{owner}/{repo}/merges", + "/repos/{owner}/{repo}/milestones", + "/repos/{owner}/{repo}/milestones/{milestone_number}", + "/repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "/repos/{owner}/{repo}/notifications", + "/repos/{owner}/{repo}/pages", + "/repos/{owner}/{repo}/pages/builds", + "/repos/{owner}/{repo}/pages/builds/latest", + "/repos/{owner}/{repo}/pages/builds/{build_id}", + "/repos/{owner}/{repo}/pages/deployments", + "/repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}", + "/repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel", + "/repos/{owner}/{repo}/pages/health", + "/repos/{owner}/{repo}/private-vulnerability-reporting", + "/repos/{owner}/{repo}/projects", + "/repos/{owner}/{repo}/properties/values", + "/repos/{owner}/{repo}/pulls", + "/repos/{owner}/{repo}/pulls/comments", + "/repos/{owner}/{repo}/pulls/comments/{comment_id}", + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/pulls/{pull_number}", + "/repos/{owner}/{repo}/pulls/{pull_number}/codespaces", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + "/repos/{owner}/{repo}/pulls/{pull_number}/commits", + "/repos/{owner}/{repo}/pulls/{pull_number}/files", + "/repos/{owner}/{repo}/pulls/{pull_number}/merge", + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", + "/repos/{owner}/{repo}/pulls/{pull_number}/update-branch", + "/repos/{owner}/{repo}/readme", + "/repos/{owner}/{repo}/readme/{dir}", + "/repos/{owner}/{repo}/releases", + "/repos/{owner}/{repo}/releases/assets/{asset_id}", + "/repos/{owner}/{repo}/releases/generate-notes", + "/repos/{owner}/{repo}/releases/latest", + "/repos/{owner}/{repo}/releases/tags/{tag}", + "/repos/{owner}/{repo}/releases/{release_id}", + "/repos/{owner}/{repo}/releases/{release_id}/assets", + "/repos/{owner}/{repo}/releases/{release_id}/reactions", + "/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", + "/repos/{owner}/{repo}/rules/branches/{branch}", + "/repos/{owner}/{repo}/rulesets", + "/repos/{owner}/{repo}/rulesets/rule-suites", + "/repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}", + "/repos/{owner}/{repo}/rulesets/{ruleset_id}", + "/repos/{owner}/{repo}/secret-scanning/alerts", + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "/repos/{owner}/{repo}/secret-scanning/push-protection-bypasses", + "/repos/{owner}/{repo}/security-advisories", + "/repos/{owner}/{repo}/security-advisories/reports", + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}", + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve", + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks", + "/repos/{owner}/{repo}/stargazers", + "/repos/{owner}/{repo}/stats/code_frequency", + "/repos/{owner}/{repo}/stats/commit_activity", + "/repos/{owner}/{repo}/stats/contributors", + "/repos/{owner}/{repo}/stats/participation", + "/repos/{owner}/{repo}/stats/punch_card", + "/repos/{owner}/{repo}/statuses/{sha}", + "/repos/{owner}/{repo}/subscribers", + "/repos/{owner}/{repo}/subscription", + "/repos/{owner}/{repo}/tags", + "/repos/{owner}/{repo}/tags/protection", + "/repos/{owner}/{repo}/tags/protection/{tag_protection_id}", + "/repos/{owner}/{repo}/tarball/{ref}", + "/repos/{owner}/{repo}/teams", + "/repos/{owner}/{repo}/topics", + "/repos/{owner}/{repo}/traffic/clones", + "/repos/{owner}/{repo}/traffic/popular/paths", + "/repos/{owner}/{repo}/traffic/popular/referrers", + "/repos/{owner}/{repo}/traffic/views", + "/repos/{owner}/{repo}/transfer", + "/repos/{owner}/{repo}/vulnerability-alerts", + "/repos/{owner}/{repo}/zipball/{ref}", + "/repos/{template_owner}/{template_repo}/generate", + "/repositories", + "/search/code", + "/search/commits", + "/search/issues", + "/search/labels", + "/search/repositories", + "/search/topics", + "/search/users", + "/teams/{team_id}", + "/teams/{team_id}/discussions", + "/teams/{team_id}/discussions/{discussion_number}", + "/teams/{team_id}/discussions/{discussion_number}/comments", + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "/teams/{team_id}/discussions/{discussion_number}/reactions", + "/teams/{team_id}/invitations", + "/teams/{team_id}/members", + "/teams/{team_id}/members/{username}", + "/teams/{team_id}/memberships/{username}", + "/teams/{team_id}/projects", + "/teams/{team_id}/projects/{project_id}", + "/teams/{team_id}/repos", + "/teams/{team_id}/repos/{owner}/{repo}", + "/teams/{team_id}/teams", + "/user", + "/user/blocks", + "/user/blocks/{username}", + "/user/codespaces", + "/user/codespaces/secrets", + "/user/codespaces/secrets/public-key", + "/user/codespaces/secrets/{secret_name}", + "/user/codespaces/secrets/{secret_name}/repositories", + "/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", + "/user/codespaces/{codespace_name}", + "/user/codespaces/{codespace_name}/exports", + "/user/codespaces/{codespace_name}/exports/{export_id}", + "/user/codespaces/{codespace_name}/machines", + "/user/codespaces/{codespace_name}/publish", + "/user/codespaces/{codespace_name}/start", + "/user/codespaces/{codespace_name}/stop", + "/user/docker/conflicts", + "/user/email/visibility", + "/user/emails", + "/user/followers", + "/user/following", + "/user/following/{username}", + "/user/gpg_keys", + "/user/gpg_keys/{gpg_key_id}", + "/user/installations", + "/user/installations/{installation_id}/repositories", + "/user/installations/{installation_id}/repositories/{repository_id}", + "/user/interaction-limits", + "/user/issues", + "/user/keys", + "/user/keys/{key_id}", + "/user/marketplace_purchases", + "/user/marketplace_purchases/stubbed", + "/user/memberships/orgs", + "/user/memberships/orgs/{org}", + "/user/migrations", + "/user/migrations/{migration_id}", + "/user/migrations/{migration_id}/archive", + "/user/migrations/{migration_id}/repos/{repo_name}/lock", + "/user/migrations/{migration_id}/repositories", + "/user/orgs", + "/user/packages", + "/user/packages/{package_type}/{package_name}", + "/user/packages/{package_type}/{package_name}/restore", + "/user/packages/{package_type}/{package_name}/versions", + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}", + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + "/user/projects", + "/user/public_emails", + "/user/repos", + "/user/repository_invitations", + "/user/repository_invitations/{invitation_id}", + "/user/social_accounts", + "/user/ssh_signing_keys", + "/user/ssh_signing_keys/{ssh_signing_key_id}", + "/user/starred", + "/user/starred/{owner}/{repo}", + "/user/subscriptions", + "/user/teams", + "/user/{account_id}", + "/users", + "/users/{username}", + "/users/{username}/attestations/{subject_digest}", + "/users/{username}/docker/conflicts", + "/users/{username}/events", + "/users/{username}/events/orgs/{org}", + "/users/{username}/events/public", + "/users/{username}/followers", + "/users/{username}/following", + "/users/{username}/following/{target_user}", + "/users/{username}/gists", + "/users/{username}/gpg_keys", + "/users/{username}/hovercard", + "/users/{username}/installation", + "/users/{username}/keys", + "/users/{username}/orgs", + "/users/{username}/packages", + "/users/{username}/packages/{package_type}/{package_name}", + "/users/{username}/packages/{package_type}/{package_name}/restore", + "/users/{username}/packages/{package_type}/{package_name}/versions", + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", + "/users/{username}/projects", + "/users/{username}/received_events", + "/users/{username}/received_events/public", + "/users/{username}/repos", + "/users/{username}/settings/billing/actions", + "/users/{username}/settings/billing/packages", + "/users/{username}/settings/billing/shared-storage", + "/users/{username}/social_accounts", + "/users/{username}/ssh_signing_keys", + "/users/{username}/starred", + "/users/{username}/subscriptions", + "/versions", + "/zen" +] diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index cca3be0c826..452f4afa7b3 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -1,7 +1,11 @@ """codspeed benchmarks for the URL dispatcher.""" import asyncio +import json import pathlib +import random +import string +from pathlib import Path from typing import NoReturn from unittest import mock @@ -46,11 +50,12 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() app.router.add_route("GET", "/", handler) app.freeze() + router = app.router request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: for _ in range(resolve_count): - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -67,11 +72,12 @@ def test_resolve_static_root_route( app = web.Application() app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) app.freeze() + router = app.router request = _mock_request(method="GET", path="/") async def run_url_dispatcher_benchmark() -> None: for _ in range(resolve_count): - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -92,11 +98,12 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) app.freeze() + router = app.router request = _mock_request(method="GET", path="/api/server/dispatch/1/update") async def run_url_dispatcher_benchmark() -> None: for _ in range(resolve_count): - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -116,6 +123,7 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250): app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) app.freeze() + router = app.router requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{count}/update") @@ -124,7 +132,7 @@ async def handler(request: web.Request) -> NoReturn: async def run_url_dispatcher_benchmark() -> None: for request in requests: - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -152,12 +160,13 @@ async def handler(request: web.Request) -> NoReturn: for url in urls: app.router.add_route("GET", url, handler) app.freeze() + router = app.router requests = [_mock_request(method="GET", path=url) for url in urls] async def run_url_dispatcher_benchmark() -> None: for request in requests: - await app._router.resolve(request) + await router.resolve(request) @benchmark def _run() -> None: @@ -178,6 +187,7 @@ async def handler(request: web.Request) -> NoReturn: app.router.add_route("GET", f"/api/server/other/{count}/update", handler) app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) app.freeze() + router = app.router requests = [ _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") @@ -186,7 +196,121 @@ async def handler(request: web.Request) -> NoReturn: async def run_url_dispatcher_benchmark() -> None: for request in requests: - await app._router.resolve(request) + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_gitapi( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve DynamicResource for simulated github API. + + The benchmark uses OpenAPI generated info for github. + To update the local data file please run the following command: + $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json + """ + + async def handler(request: web.Request) -> NoReturn: + assert False + + here = Path(__file__).parent + with (here / "github-urls.json").open() as f: + urls = json.load(f) + + app = web.Application() + for url in urls: + app.router.add_get(url, handler) + app.freeze() + router = app.router + + # PR reviews API was selected absolutely voluntary. + # It is not any special but sits somewhere in the middle of the urls list. + # If anybody has better idea please suggest. + + alnums = string.ascii_letters + string.digits + + requests = [] + for i in range(250): + owner = "".join(random.sample(alnums, 10)) + repo = "".join(random.sample(alnums, 10)) + pull_number = random.randint(0, 250) + requests.append( + _mock_request( + method="GET", path=f"/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ) + ) + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_gitapi_subapps( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve DynamicResource for simulated github API, grouped in subapps. + + The benchmark uses OpenAPI generated info for github. + To update the local data file please run the following command: + $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json + """ + + async def handler(request: web.Request) -> NoReturn: + assert False + + here = Path(__file__).parent + with (here / "github-urls.json").open() as f: + urls = json.load(f) + + subapps = { + "gists": web.Application(), + "orgs": web.Application(), + "projects": web.Application(), + "repos": web.Application(), + "teams": web.Application(), + "user": web.Application(), + "users": web.Application(), + } + + app = web.Application() + for url in urls: + parts = url.split("/") + subapp = subapps.get(parts[1], app) + subapp.router.add_get(url, handler) + for key, subapp in subapps.items(): + app.add_subapp("/" + key, subapp) + app.freeze() + router = app.router + + # PR reviews API was selected absolutely voluntary. + # It is not any special but sits somewhere in the middle of the urls list. + # If anybody has better idea please suggest. + + alnums = string.ascii_letters + string.digits + + requests = [] + for i in range(250): + owner = "".join(random.sample(alnums, 10)) + repo = "".join(random.sample(alnums, 10)) + pull_number = random.randint(0, 250) + requests.append( + _mock_request( + method="GET", path=f"/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ) + ) + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) @benchmark def _run() -> None: From 4a80add2e56acab95ac7a18ae8535ea830f8216a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 19:10:35 -0600 Subject: [PATCH 0963/1511] [PR #9929/73691e49 backport][3.12] Add couple benchmarks for dynamic routes (#9932) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 64 +++++++++++++++++++++- 1 file changed, 63 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 452f4afa7b3..16a59516b35 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -173,7 +173,7 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) -def test_resolve_dynamic_resource_url_with_many_routes( +def test_resolve_dynamic_resource_url_with_many_static_routes( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, ) -> None: @@ -203,6 +203,68 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) +def test_resolve_dynamic_resource_url_with_many_dynamic_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve different a DynamicResource when there are 250 DynamicResources registered.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route( + "GET", f"/api/server/other/{{customer}}/update{count}", handler + ) + app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) + app.freeze() + router = app.router + + requests = [ + _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_dynamic_resource_url_with_many_dynamic_routes_with_common_prefix( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve different a DynamicResource when there are 250 DynamicResources registered with the same common prefix.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/{{customer}}/show_{count}", handler) + app.router.add_route("GET", "/api/{customer}/update", handler) + app.freeze() + router = app.router + + requests = [ + _mock_request(method="GET", path=f"/api/{customer}/update") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + def test_resolve_gitapi( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, From 07bf925901f79dc2a13089712e08dc98fbd2a536 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 16 Nov 2024 19:11:00 -0600 Subject: [PATCH 0964/1511] [PR #9929/73691e49 backport][3.11] Add couple benchmarks for dynamic routes (#9931) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 64 +++++++++++++++++++++- 1 file changed, 63 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 452f4afa7b3..16a59516b35 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -173,7 +173,7 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) -def test_resolve_dynamic_resource_url_with_many_routes( +def test_resolve_dynamic_resource_url_with_many_static_routes( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, ) -> None: @@ -203,6 +203,68 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) +def test_resolve_dynamic_resource_url_with_many_dynamic_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve different a DynamicResource when there are 250 DynamicResources registered.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route( + "GET", f"/api/server/other/{{customer}}/update{count}", handler + ) + app.router.add_route("GET", "/api/server/dispatch/{customer}/update", handler) + app.freeze() + router = app.router + + requests = [ + _mock_request(method="GET", path=f"/api/server/dispatch/{customer}/update") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_dynamic_resource_url_with_many_dynamic_routes_with_common_prefix( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve different a DynamicResource when there are 250 DynamicResources registered with the same common prefix.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_route("GET", f"/api/{{customer}}/show_{count}", handler) + app.router.add_route("GET", "/api/{customer}/update", handler) + app.freeze() + router = app.router + + requests = [ + _mock_request(method="GET", path=f"/api/{customer}/update") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + def test_resolve_gitapi( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, From 9c8166743e3680cc2a50428daffe12d8b817bfff Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 08:05:41 +0000 Subject: [PATCH 0965/1511] [PR #9935/0c312496 backport][3.11] Add benchmark for routing sub_applications (#9937) **This is a backport of PR #9935 as merged into master (0c312496c6490f934cec360f21ae802e2c8d8679).** Router should be optimized for handling 2 kind of prefix resources (sub_apps and static files are handled equally now): 1. sub_apps are placed under non-overlapped top-level single-segment prefixed. `test_resolve_gitapi_subapps` benchmark test this case already. 2. sub_apps shares the same prefix segments, e.g. `/api/manager/plugin/{name}`. This PR covers the second case. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 34 ++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 16a59516b35..662a600c3b3 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -377,3 +377,37 @@ async def run_url_dispatcher_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_prefix_resources_many_prefix_many_plain( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve prefix resource (sub_app) whene 250 PlainResources registered and there are 250 subapps that shares the same sub_app path prefix.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_get(f"/api/server/other/{count}/update", handler) + for count in range(250): + subapp = web.Application() + # sub_apps exists for handling deep enough nested route trees + subapp.router.add_get("/deep/enough/sub/path", handler) + app.add_subapp(f"/api/path/to/plugin/{count}", subapp) + app.freeze() + router = app.router + + requests = [ + _mock_request(method="GET", path="/api/path/to/plugin/249/deep/enough/sub/path") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) From 5ca7604b677747358f86a114fd56a6b1ee2088f3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 08:16:23 +0000 Subject: [PATCH 0966/1511] [PR #9935/0c312496 backport][3.12] Add benchmark for routing sub_applications (#9938) **This is a backport of PR #9935 as merged into master (0c312496c6490f934cec360f21ae802e2c8d8679).** Router should be optimized for handling 2 kind of prefix resources (sub_apps and static files are handled equally now): 1. sub_apps are placed under non-overlapped top-level single-segment prefixed. `test_resolve_gitapi_subapps` benchmark test this case already. 2. sub_apps shares the same prefix segments, e.g. `/api/manager/plugin/{name}`. This PR covers the second case. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 34 ++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 16a59516b35..662a600c3b3 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -377,3 +377,37 @@ async def run_url_dispatcher_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) + + +def test_resolve_prefix_resources_many_prefix_many_plain( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve prefix resource (sub_app) whene 250 PlainResources registered and there are 250 subapps that shares the same sub_app path prefix.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for count in range(250): + app.router.add_get(f"/api/server/other/{count}/update", handler) + for count in range(250): + subapp = web.Application() + # sub_apps exists for handling deep enough nested route trees + subapp.router.add_get("/deep/enough/sub/path", handler) + app.add_subapp(f"/api/path/to/plugin/{count}", subapp) + app.freeze() + router = app.router + + requests = [ + _mock_request(method="GET", path="/api/path/to/plugin/249/deep/enough/sub/path") + for customer in range(250) + ] + + async def run_url_dispatcher_benchmark() -> None: + for request in requests: + await router.resolve(request) + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) From 6cf4497c8d97671846abe3f7b3149468e2fb91e2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 19:24:12 +0000 Subject: [PATCH 0967/1511] [PR #9940/9ca1a581 backport][3.11] Add benchmarks for creating web responses (#9942) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_response.py | 62 +++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 tests/test_benchmarks_web_response.py diff --git a/tests/test_benchmarks_web_response.py b/tests/test_benchmarks_web_response.py new file mode 100644 index 00000000000..fbf1fadf1e1 --- /dev/null +++ b/tests/test_benchmarks_web_response.py @@ -0,0 +1,62 @@ +"""codspeed benchmarks for the web responses.""" + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web + + +def test_simple_web_response(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 simple web.Response.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response() + + +def test_web_response_with_headers(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 web.Response with headers.""" + response_count = 100 + headers = { + "Content-Type": "text/plain", + "Server": "aiohttp", + "Date": "Sun, 01 Aug 2021 12:00:00 GMT", + } + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response(headers=headers) + + +def test_web_response_with_bytes_body( + benchmark: BenchmarkFixture, +) -> None: + """Benchmark creating 100 web.Response with bytes.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response(body=b"Hello, World!") + + +def test_web_response_with_text_body(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 web.Response with text.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response(text="Hello, World!") + + +def test_simple_web_stream_response(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 simple web.StreamResponse.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.StreamResponse() From d7d4a3af0152c051d624f3a92ee57d65b24217ab Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 19:24:21 +0000 Subject: [PATCH 0968/1511] [PR #9940/9ca1a581 backport][3.12] Add benchmarks for creating web responses (#9943) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_response.py | 62 +++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 tests/test_benchmarks_web_response.py diff --git a/tests/test_benchmarks_web_response.py b/tests/test_benchmarks_web_response.py new file mode 100644 index 00000000000..fbf1fadf1e1 --- /dev/null +++ b/tests/test_benchmarks_web_response.py @@ -0,0 +1,62 @@ +"""codspeed benchmarks for the web responses.""" + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web + + +def test_simple_web_response(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 simple web.Response.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response() + + +def test_web_response_with_headers(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 web.Response with headers.""" + response_count = 100 + headers = { + "Content-Type": "text/plain", + "Server": "aiohttp", + "Date": "Sun, 01 Aug 2021 12:00:00 GMT", + } + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response(headers=headers) + + +def test_web_response_with_bytes_body( + benchmark: BenchmarkFixture, +) -> None: + """Benchmark creating 100 web.Response with bytes.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response(body=b"Hello, World!") + + +def test_web_response_with_text_body(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 web.Response with text.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.Response(text="Hello, World!") + + +def test_simple_web_stream_response(benchmark: BenchmarkFixture) -> None: + """Benchmark creating 100 simple web.StreamResponse.""" + response_count = 100 + + @benchmark + def _run() -> None: + for _ in range(response_count): + web.StreamResponse() From b8367c9a335b5396cccc248d95385a02946b77a3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 20:23:31 +0000 Subject: [PATCH 0969/1511] [PR #9944/69ca7df6 backport][3.11] Use SPDX license expression (#9945) Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 2e0421f6c5d..a2b9e3b29e1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,7 +16,7 @@ long_description = file: README.rst long_description_content_type = text/x-rst maintainer = aiohttp team <team@aiohttp.org> maintainer_email = team@aiohttp.org -license = Apache 2 +license = Apache-2.0 license_files = LICENSE.txt classifiers = Development Status :: 5 - Production/Stable From be6a021ddcff6153f172ce636a8ed2fb1c4eeebf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 20:29:32 +0000 Subject: [PATCH 0970/1511] [PR #9944/69ca7df6 backport][3.12] Use SPDX license expression (#9946) Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 2e0421f6c5d..a2b9e3b29e1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,7 +16,7 @@ long_description = file: README.rst long_description_content_type = text/x-rst maintainer = aiohttp team <team@aiohttp.org> maintainer_email = team@aiohttp.org -license = Apache 2 +license = Apache-2.0 license_files = LICENSE.txt classifiers = Development Status :: 5 - Production/Stable From ce5225c996861e6bb78d656cc74ae099a95f104a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 22:08:39 +0000 Subject: [PATCH 0971/1511] [PR #9939/509fddfd backport][3.11] Fix test_resolve_gitapi_subapps benchmark (#9949) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 148 +++++++++++++++++---- 1 file changed, 120 insertions(+), 28 deletions(-) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 662a600c3b3..033e2543a73 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -6,7 +6,7 @@ import random import string from pathlib import Path -from typing import NoReturn +from typing import NoReturn, Optional from unittest import mock from multidict import CIMultiDict, CIMultiDictProxy @@ -53,9 +53,16 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for _ in range(resolve_count): - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/", ret.get_info() @benchmark def _run() -> None: @@ -70,14 +77,22 @@ def test_resolve_static_root_route( resolve_count = 100 app = web.Application() - app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) + here = pathlib.Path(aiohttp.__file__).parent + app.router.add_static("/", here) app.freeze() router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for _ in range(resolve_count): - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["directory"] == here, ret.get_info() @benchmark def _run() -> None: @@ -101,9 +116,16 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/api/server/dispatch/1/update") - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for _ in range(resolve_count): - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/api/server/dispatch/1/update", ret.get_info() @benchmark def _run() -> None: @@ -130,9 +152,15 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/api/server/dispatch/249/update", ret.get_info() @benchmark def _run() -> None: @@ -162,11 +190,18 @@ async def handler(request: web.Request) -> NoReturn: app.freeze() router = app.router - requests = [_mock_request(method="GET", path=url) for url in urls] + requests = [(_mock_request(method="GET", path=url), url) for url in urls] - async def run_url_dispatcher_benchmark() -> None: - for request in requests: - await router.resolve(request) + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None + for request, path in requests: + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == url, ret.get_info() @benchmark def _run() -> None: @@ -194,9 +229,18 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] == "/api/server/dispatch/{customer}/update" + ), ret.get_info() @benchmark def _run() -> None: @@ -226,9 +270,18 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] == "/api/server/dispatch/{customer}/update" + ), ret.get_info() @benchmark def _run() -> None: @@ -256,9 +309,16 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["formatter"] == "/api/{customer}/update", ret.get_info() @benchmark def _run() -> None: @@ -306,9 +366,18 @@ async def handler(request: web.Request) -> NoReturn: ) ) - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] + == "/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ), ret.get_info() @benchmark def _run() -> None: @@ -346,8 +415,14 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() for url in urls: parts = url.split("/") - subapp = subapps.get(parts[1], app) - subapp.router.add_get(url, handler) + subapp = subapps.get(parts[1]) + if subapp is not None: + sub_url = "/".join([""] + parts[2:]) + if not sub_url: + sub_url = "/" + subapp.router.add_get(sub_url, handler) + else: + app.router.add_get(url, handler) for key, subapp in subapps.items(): app.add_subapp("/" + key, subapp) app.freeze() @@ -370,9 +445,18 @@ async def handler(request: web.Request) -> NoReturn: ) ) - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] + == "/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ), ret.get_info() @benchmark def _run() -> None: @@ -404,9 +488,17 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["path"] == "/api/path/to/plugin/249/deep/enough/sub/path" + ), ret.get_info() @benchmark def _run() -> None: From 4c328c0d28895b4bb63e16843dfb0542231ee954 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 17 Nov 2024 22:12:56 +0000 Subject: [PATCH 0972/1511] [PR #9939/509fddfd backport][3.12] Fix test_resolve_gitapi_subapps benchmark (#9950) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 148 +++++++++++++++++---- 1 file changed, 120 insertions(+), 28 deletions(-) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 662a600c3b3..033e2543a73 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -6,7 +6,7 @@ import random import string from pathlib import Path -from typing import NoReturn +from typing import NoReturn, Optional from unittest import mock from multidict import CIMultiDict, CIMultiDictProxy @@ -53,9 +53,16 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for _ in range(resolve_count): - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/", ret.get_info() @benchmark def _run() -> None: @@ -70,14 +77,22 @@ def test_resolve_static_root_route( resolve_count = 100 app = web.Application() - app.router.add_static("/", pathlib.Path(aiohttp.__file__).parent) + here = pathlib.Path(aiohttp.__file__).parent + app.router.add_static("/", here) app.freeze() router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for _ in range(resolve_count): - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["directory"] == here, ret.get_info() @benchmark def _run() -> None: @@ -101,9 +116,16 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/api/server/dispatch/1/update") - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for _ in range(resolve_count): - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/api/server/dispatch/1/update", ret.get_info() @benchmark def _run() -> None: @@ -130,9 +152,15 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/api/server/dispatch/249/update", ret.get_info() @benchmark def _run() -> None: @@ -162,11 +190,18 @@ async def handler(request: web.Request) -> NoReturn: app.freeze() router = app.router - requests = [_mock_request(method="GET", path=url) for url in urls] + requests = [(_mock_request(method="GET", path=url), url) for url in urls] - async def run_url_dispatcher_benchmark() -> None: - for request in requests: - await router.resolve(request) + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None + for request, path in requests: + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == url, ret.get_info() @benchmark def _run() -> None: @@ -194,9 +229,18 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] == "/api/server/dispatch/{customer}/update" + ), ret.get_info() @benchmark def _run() -> None: @@ -226,9 +270,18 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] == "/api/server/dispatch/{customer}/update" + ), ret.get_info() @benchmark def _run() -> None: @@ -256,9 +309,16 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["formatter"] == "/api/{customer}/update", ret.get_info() @benchmark def _run() -> None: @@ -306,9 +366,18 @@ async def handler(request: web.Request) -> NoReturn: ) ) - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] + == "/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ), ret.get_info() @benchmark def _run() -> None: @@ -346,8 +415,14 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() for url in urls: parts = url.split("/") - subapp = subapps.get(parts[1], app) - subapp.router.add_get(url, handler) + subapp = subapps.get(parts[1]) + if subapp is not None: + sub_url = "/".join([""] + parts[2:]) + if not sub_url: + sub_url = "/" + subapp.router.add_get(sub_url, handler) + else: + app.router.add_get(url, handler) for key, subapp in subapps.items(): app.add_subapp("/" + key, subapp) app.freeze() @@ -370,9 +445,18 @@ async def handler(request: web.Request) -> NoReturn: ) ) - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["formatter"] + == "/repos/{owner}/{repo}/pulls/{pull_number}/reviews" + ), ret.get_info() @benchmark def _run() -> None: @@ -404,9 +488,17 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> None: + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None for request in requests: - await router.resolve(request) + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ( + ret.get_info()["path"] == "/api/path/to/plugin/249/deep/enough/sub/path" + ), ret.get_info() @benchmark def _run() -> None: From e573793f8777b776f2efe7eb4d04bad9e5b7efaa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 21:59:26 +0100 Subject: [PATCH 0973/1511] [PR #9953/27e23fb0 backport][3.12] Add .codspeed folder to .gitignore (#9957) **This is a backport of PR #9953 as merged into master (27e23fb0cff3b603563c70b3e3cb3a87d53d97d5).** The folder contains https://codspeed.io local run results Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 33546191693..0081b62ae7f 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,7 @@ .DS_Store .Python .cache +.codspeed .coverage .coverage.* .develop From 20b7a7b0c2463a3ae02073e13704ce4c7b478f30 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:09:30 +0100 Subject: [PATCH 0974/1511] [PR #9954/be31bed2 backport][3.11] Add url dispatcher benchmark for resolving root route when exists many plain routes in the subtree (#9959) **This is a backport of PR #9954 as merged into master (be31bed20210b6d7380aed8702a17994ea6a2d59).** Yet another benchmark. There is a tree: ``` / /api /api/server /api/server/dispatch /api/server/dispatch/123 /api/server/dispatch/123/update ``` The benchmark resolves the root route `/`. If the search starts from checking deep-most resource it cold be suboptimal. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 38 ++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 033e2543a73..5d151d984af 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -69,6 +69,44 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) +def test_resolve_root_route_with_many_fixed_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve top level PlainResources route 100 times.""" + resolve_count = 100 + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + for count in range(250): + app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + app.router.add_route("GET", f"/api/server/dispatch/{count}", handler) + app.router.add_route("GET", "/api/server/dispatch", handler) + app.router.add_route("GET", "/api/server", handler) + app.router.add_route("GET", "/api", handler) + app.freeze() + router = app.router + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None + for _ in range(resolve_count): + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/", ret.get_info() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + def test_resolve_static_root_route( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, From 97be030db63b5ccddd6b6e7e44a780f0f7464683 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:09:54 +0100 Subject: [PATCH 0975/1511] [PR #9953/27e23fb0 backport][3.11] Add .codspeed folder to .gitignore (#9956) **This is a backport of PR #9953 as merged into master (27e23fb0cff3b603563c70b3e3cb3a87d53d97d5).** The folder contains https://codspeed.io local run results Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 33546191693..0081b62ae7f 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,7 @@ .DS_Store .Python .cache +.codspeed .coverage .coverage.* .develop From 07eb9605db82d21b5958b46781d8fbe47c3abb18 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:10:29 +0100 Subject: [PATCH 0976/1511] [PR #9954/be31bed2 backport][3.12] Add url dispatcher benchmark for resolving root route when exists many plain routes in the subtree (#9960) **This is a backport of PR #9954 as merged into master (be31bed20210b6d7380aed8702a17994ea6a2d59).** Yet another benchmark. There is a tree: ``` / /api /api/server /api/server/dispatch /api/server/dispatch/123 /api/server/dispatch/123/update ``` The benchmark resolves the root route `/`. If the search starts from checking deep-most resource it cold be suboptimal. Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 38 ++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 033e2543a73..5d151d984af 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -69,6 +69,44 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) +def test_resolve_root_route_with_many_fixed_routes( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, +) -> None: + """Resolve top level PlainResources route 100 times.""" + resolve_count = 100 + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + app.router.add_route("GET", "/", handler) + for count in range(250): + app.router.add_route("GET", f"/api/server/dispatch/{count}/update", handler) + app.router.add_route("GET", f"/api/server/dispatch/{count}", handler) + app.router.add_route("GET", "/api/server/dispatch", handler) + app.router.add_route("GET", "/api/server", handler) + app.router.add_route("GET", "/api", handler) + app.freeze() + router = app.router + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None + for _ in range(resolve_count): + ret = await router.resolve(request) + + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/", ret.get_info() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + def test_resolve_static_root_route( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, From 902e7b244c04ba112067362a9f10515830326fa2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 17:54:28 -0600 Subject: [PATCH 0977/1511] [3.11] Restore `FlowControlDataQueue` class (#9963) fixes https://github.com/aio-libs/aiodocker/issues/918 --- CHANGES/9963.bugfix.rst | 3 ++ aiohttp/__init__.py | 2 + aiohttp/streams.py | 43 +++++++++++++++++ tests/test_flowcontrol_streams.py | 77 +++++++++++++++++++++++++++++++ 4 files changed, 125 insertions(+) create mode 100644 CHANGES/9963.bugfix.rst diff --git a/CHANGES/9963.bugfix.rst b/CHANGES/9963.bugfix.rst new file mode 100644 index 00000000000..0c05ebab24f --- /dev/null +++ b/CHANGES/9963.bugfix.rst @@ -0,0 +1,3 @@ +Restored the ``FlowControlDataQueue`` class -- by :user:`bdraco`. + +This class is no longer used internally, and will be permanently removed in the next major version. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4bac155c9d6..741b93a9b37 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -93,6 +93,7 @@ EMPTY_PAYLOAD as EMPTY_PAYLOAD, DataQueue as DataQueue, EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, StreamReader as StreamReader, ) from .tracing import ( @@ -148,6 +149,7 @@ "ConnectionTimeoutError", "ContentTypeError", "Fingerprint", + "FlowControlDataQueue", "InvalidURL", "InvalidUrlClientError", "InvalidUrlRedirectClientError", diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 59aa1dd0c3b..b97846171b1 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -677,3 +677,46 @@ async def read(self) -> _T: def __aiter__(self) -> AsyncStreamIterator[_T]: return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + + This class is deprecated and will be removed in version 4.0. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + self._size = 0 + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + self._size += size + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + if self._exception is not None: + raise self._exception + raise EofStream diff --git a/tests/test_flowcontrol_streams.py b/tests/test_flowcontrol_streams.py index 08f6be21a2c..68e623b6dd7 100644 --- a/tests/test_flowcontrol_streams.py +++ b/tests/test_flowcontrol_streams.py @@ -1,3 +1,4 @@ +import asyncio from unittest import mock import pytest @@ -15,6 +16,13 @@ def stream(loop, protocol): return streams.StreamReader(protocol, limit=1, loop=loop) +@pytest.fixture +def buffer(loop, protocol: mock.Mock) -> streams.FlowControlDataQueue: + out = streams.FlowControlDataQueue(protocol, limit=1, loop=loop) + out._allow_pause = True + return out + + class TestFlowControlStreamReader: async def test_read(self, stream) -> None: stream.feed_data(b"da", 2) @@ -103,3 +111,72 @@ async def test_read_nowait(self, stream) -> None: res = stream.read_nowait(5) assert res == b"" assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] + + +async def test_flow_control_data_queue_waiter_cancelled( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test that the waiter is cancelled it is cleared.""" + task = asyncio.create_task(buffer.read()) + await asyncio.sleep(0) + assert buffer._waiter is not None + buffer._waiter.cancel() + + with pytest.raises(asyncio.CancelledError): + await task + assert buffer._waiter is None + + +async def test_flow_control_data_queue_has_buffer( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test reading from the buffer.""" + data = object() + buffer.feed_data(data, 100) + assert buffer._size == 100 + read_data = await buffer.read() + assert read_data is data + assert buffer._size == 0 + + +async def test_flow_control_data_queue_read_with_exception( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test reading when the buffer is empty and an exception is set.""" + buffer.set_exception(ValueError("unique_string")) + with pytest.raises(ValueError, match="unique_string"): + await buffer.read() + + +def test_flow_control_data_queue_feed_pause( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test feeding data and pausing the reader.""" + buffer._protocol._reading_paused = False + buffer.feed_data(object(), 100) + assert buffer._protocol.pause_reading.called + + buffer._protocol._reading_paused = True + buffer._protocol.pause_reading.reset_mock() + buffer.feed_data(object(), 100) + assert not buffer._protocol.pause_reading.called + + +async def test_flow_control_data_queue_resume_on_read( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test that the reader is resumed when reading.""" + buffer.feed_data(object(), 100) + + buffer._protocol._reading_paused = True + await buffer.read() + assert buffer._protocol.resume_reading.called + + +async def test_flow_control_data_queue_read_eof( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test that reading after eof raises EofStream.""" + buffer.feed_eof() + with pytest.raises(streams.EofStream): + await buffer.read() From 90558c3af9e5f11f0b278ba8f59a2f386ad27b8c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 18:10:20 -0600 Subject: [PATCH 0978/1511] Release 3.11.3 (#9964) --- CHANGES.rst | 40 ++++++++++++++++++++++++++++++++++++++++ CHANGES/9899.misc.rst | 1 - CHANGES/9918.bugfix.rst | 1 - CHANGES/9963.bugfix.rst | 3 --- aiohttp/__init__.py | 2 +- 5 files changed, 41 insertions(+), 6 deletions(-) delete mode 100644 CHANGES/9899.misc.rst delete mode 100644 CHANGES/9918.bugfix.rst delete mode 100644 CHANGES/9963.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 2391c9576c2..896713180eb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,46 @@ .. towncrier release notes start +3.11.3 (2024-11-18) +=================== + +Bug fixes +--------- + +- Removed non-existing ``__author__`` from ``dir(aiohttp)`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`9918`. + + + +- Restored the ``FlowControlDataQueue`` class -- by :user:`bdraco`. + + This class is no longer used internally, and will be permanently removed in the next major version. + + + *Related issues and pull requests on GitHub:* + :issue:`9963`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of resolving resources when multiple methods are registered for the same route -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9899`. + + + + +---- + + 3.11.2 (2024-11-14) =================== diff --git a/CHANGES/9899.misc.rst b/CHANGES/9899.misc.rst deleted file mode 100644 index 53243495d36..00000000000 --- a/CHANGES/9899.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of resolving resources when multiple methods are registered for the same route -- by :user:`bdraco`. diff --git a/CHANGES/9918.bugfix.rst b/CHANGES/9918.bugfix.rst deleted file mode 100644 index bdf69abe848..00000000000 --- a/CHANGES/9918.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Removed non-existing ``__author__`` from ``dir(aiohttp)`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9963.bugfix.rst b/CHANGES/9963.bugfix.rst deleted file mode 100644 index 0c05ebab24f..00000000000 --- a/CHANGES/9963.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Restored the ``FlowControlDataQueue`` class -- by :user:`bdraco`. - -This class is no longer used internally, and will be permanently removed in the next major version. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 741b93a9b37..5509a376d15 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.3.dev0" +__version__ = "3.11.3" from typing import TYPE_CHECKING, Tuple From cc4cbc7019d635eea770e4ed99608519405e3fd1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 00:45:34 +0000 Subject: [PATCH 0979/1511] [PR #9963/902e7b24 forwardport][3.12] Restore `FlowControlDataQueue` class (#9965) Co-authored-by: J. Nick Koston <nick@koston.org> fixes https://github.com/aio-libs/aiodocker/issues/918 --- CHANGES/9963.bugfix.rst | 3 ++ aiohttp/__init__.py | 2 + aiohttp/streams.py | 43 +++++++++++++++++ tests/test_flowcontrol_streams.py | 77 +++++++++++++++++++++++++++++++ 4 files changed, 125 insertions(+) create mode 100644 CHANGES/9963.bugfix.rst diff --git a/CHANGES/9963.bugfix.rst b/CHANGES/9963.bugfix.rst new file mode 100644 index 00000000000..0c05ebab24f --- /dev/null +++ b/CHANGES/9963.bugfix.rst @@ -0,0 +1,3 @@ +Restored the ``FlowControlDataQueue`` class -- by :user:`bdraco`. + +This class is no longer used internally, and will be permanently removed in the next major version. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 1338dae487c..49eaf4541de 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -93,6 +93,7 @@ EMPTY_PAYLOAD as EMPTY_PAYLOAD, DataQueue as DataQueue, EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, StreamReader as StreamReader, ) from .tracing import ( @@ -148,6 +149,7 @@ "ConnectionTimeoutError", "ContentTypeError", "Fingerprint", + "FlowControlDataQueue", "InvalidURL", "InvalidUrlClientError", "InvalidUrlRedirectClientError", diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 59aa1dd0c3b..b97846171b1 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -677,3 +677,46 @@ async def read(self) -> _T: def __aiter__(self) -> AsyncStreamIterator[_T]: return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + + This class is deprecated and will be removed in version 4.0. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + self._size = 0 + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + self._size += size + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + if self._exception is not None: + raise self._exception + raise EofStream diff --git a/tests/test_flowcontrol_streams.py b/tests/test_flowcontrol_streams.py index 08f6be21a2c..68e623b6dd7 100644 --- a/tests/test_flowcontrol_streams.py +++ b/tests/test_flowcontrol_streams.py @@ -1,3 +1,4 @@ +import asyncio from unittest import mock import pytest @@ -15,6 +16,13 @@ def stream(loop, protocol): return streams.StreamReader(protocol, limit=1, loop=loop) +@pytest.fixture +def buffer(loop, protocol: mock.Mock) -> streams.FlowControlDataQueue: + out = streams.FlowControlDataQueue(protocol, limit=1, loop=loop) + out._allow_pause = True + return out + + class TestFlowControlStreamReader: async def test_read(self, stream) -> None: stream.feed_data(b"da", 2) @@ -103,3 +111,72 @@ async def test_read_nowait(self, stream) -> None: res = stream.read_nowait(5) assert res == b"" assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] + + +async def test_flow_control_data_queue_waiter_cancelled( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test that the waiter is cancelled it is cleared.""" + task = asyncio.create_task(buffer.read()) + await asyncio.sleep(0) + assert buffer._waiter is not None + buffer._waiter.cancel() + + with pytest.raises(asyncio.CancelledError): + await task + assert buffer._waiter is None + + +async def test_flow_control_data_queue_has_buffer( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test reading from the buffer.""" + data = object() + buffer.feed_data(data, 100) + assert buffer._size == 100 + read_data = await buffer.read() + assert read_data is data + assert buffer._size == 0 + + +async def test_flow_control_data_queue_read_with_exception( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test reading when the buffer is empty and an exception is set.""" + buffer.set_exception(ValueError("unique_string")) + with pytest.raises(ValueError, match="unique_string"): + await buffer.read() + + +def test_flow_control_data_queue_feed_pause( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test feeding data and pausing the reader.""" + buffer._protocol._reading_paused = False + buffer.feed_data(object(), 100) + assert buffer._protocol.pause_reading.called + + buffer._protocol._reading_paused = True + buffer._protocol.pause_reading.reset_mock() + buffer.feed_data(object(), 100) + assert not buffer._protocol.pause_reading.called + + +async def test_flow_control_data_queue_resume_on_read( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test that the reader is resumed when reading.""" + buffer.feed_data(object(), 100) + + buffer._protocol._reading_paused = True + await buffer.read() + assert buffer._protocol.resume_reading.called + + +async def test_flow_control_data_queue_read_eof( + buffer: streams.FlowControlDataQueue, +) -> None: + """Test that reading after eof raises EofStream.""" + buffer.feed_eof() + with pytest.raises(streams.EofStream): + await buffer.read() From 85ef646d124b4e5c63783d97511afdf15881ddac Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 21:13:03 -0600 Subject: [PATCH 0980/1511] Increment version to 3.11.4.dev0 (#9968) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 5509a376d15..88f1eb0fd91 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.3" +__version__ = "3.11.4.dev0" from typing import TYPE_CHECKING, Tuple From c451b2421f5c770c0231d96d54be6242b4d7ca91 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 22:04:08 -0600 Subject: [PATCH 0981/1511] [PR #9895/d8ec1b4 backport][3.12] Defer creation of SimpleCookie objects in the web server until needed (#9970) --- CHANGES/9895.misc.rst | 1 + aiohttp/web_response.py | 13 +++++++------ tests/test_web_response.py | 6 ++++++ 3 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 CHANGES/9895.misc.rst diff --git a/CHANGES/9895.misc.rst b/CHANGES/9895.misc.rst new file mode 100644 index 00000000000..1049d5d3f4b --- /dev/null +++ b/CHANGES/9895.misc.rst @@ -0,0 +1 @@ +Improved performance of creating web responses when there are no cookies -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 73063890f9a..59c9b54784a 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -93,7 +93,7 @@ def __init__( self._compression = False self._compression_strategy: int = zlib.Z_DEFAULT_STRATEGY self._compression_force: Optional[ContentCoding] = None - self._cookies = SimpleCookie() + self._cookies: Optional[SimpleCookie] = None self._req: Optional[BaseRequest] = None self._payload_writer: Optional[AbstractStreamWriter] = None @@ -209,6 +209,8 @@ def headers(self) -> "CIMultiDict[str]": @property def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() return self._cookies def set_cookie( @@ -230,10 +232,8 @@ def set_cookie( Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ - old = self._cookies.get(name) - if old is not None and old.coded_value == "": - # deleted cookie - self._cookies.pop(name, None) + if self._cookies is None: + self._cookies = SimpleCookie() self._cookies[name] = value c = self._cookies[name] @@ -277,7 +277,8 @@ def del_cookie( Creates new empty expired cookie. """ # TODO: do we need domain/path here? - self._cookies.pop(name, None) + if self._cookies is not None: + self._cookies.pop(name, None) self.set_cookie( name, "", diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 330b4b33939..dc9908fd149 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -862,6 +862,10 @@ def test_response_cookies() -> None: assert resp.cookies == {} assert str(resp.cookies) == "" + resp.set_cookie("name", "value") + assert str(resp.cookies) == "Set-Cookie: name=value; Path=/" + resp.set_cookie("name", "") + assert str(resp.cookies) == 'Set-Cookie: name=""; Path=/' resp.set_cookie("name", "value") assert str(resp.cookies) == "Set-Cookie: name=value; Path=/" resp.set_cookie("name", "other_value") @@ -879,6 +883,8 @@ def test_response_cookies() -> None: "expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/" ) assert Matches(expected) == str(resp.cookies) + resp.del_cookie("name") + assert str(resp.cookies) == Matches(expected) resp.set_cookie("name", "value", domain="local.host") expected = "Set-Cookie: name=value; Domain=local.host; Path=/" From 454f914b3b7f552f575e1ab8964453362eb4ce1b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 04:44:56 +0000 Subject: [PATCH 0982/1511] [PR #9895/d8ec1b4 backport][3.11] Defer creation of SimpleCookie objects in the web server until needed (#9971) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9895.misc.rst | 1 + aiohttp/web_response.py | 13 +++++++------ tests/test_web_response.py | 6 ++++++ 3 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 CHANGES/9895.misc.rst diff --git a/CHANGES/9895.misc.rst b/CHANGES/9895.misc.rst new file mode 100644 index 00000000000..1049d5d3f4b --- /dev/null +++ b/CHANGES/9895.misc.rst @@ -0,0 +1 @@ +Improved performance of creating web responses when there are no cookies -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 73063890f9a..59c9b54784a 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -93,7 +93,7 @@ def __init__( self._compression = False self._compression_strategy: int = zlib.Z_DEFAULT_STRATEGY self._compression_force: Optional[ContentCoding] = None - self._cookies = SimpleCookie() + self._cookies: Optional[SimpleCookie] = None self._req: Optional[BaseRequest] = None self._payload_writer: Optional[AbstractStreamWriter] = None @@ -209,6 +209,8 @@ def headers(self) -> "CIMultiDict[str]": @property def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() return self._cookies def set_cookie( @@ -230,10 +232,8 @@ def set_cookie( Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ - old = self._cookies.get(name) - if old is not None and old.coded_value == "": - # deleted cookie - self._cookies.pop(name, None) + if self._cookies is None: + self._cookies = SimpleCookie() self._cookies[name] = value c = self._cookies[name] @@ -277,7 +277,8 @@ def del_cookie( Creates new empty expired cookie. """ # TODO: do we need domain/path here? - self._cookies.pop(name, None) + if self._cookies is not None: + self._cookies.pop(name, None) self.set_cookie( name, "", diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 330b4b33939..dc9908fd149 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -862,6 +862,10 @@ def test_response_cookies() -> None: assert resp.cookies == {} assert str(resp.cookies) == "" + resp.set_cookie("name", "value") + assert str(resp.cookies) == "Set-Cookie: name=value; Path=/" + resp.set_cookie("name", "") + assert str(resp.cookies) == 'Set-Cookie: name=""; Path=/' resp.set_cookie("name", "value") assert str(resp.cookies) == "Set-Cookie: name=value; Path=/" resp.set_cookie("name", "other_value") @@ -879,6 +883,8 @@ def test_response_cookies() -> None: "expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/" ) assert Matches(expected) == str(resp.cookies) + resp.del_cookie("name") + assert str(resp.cookies) == Matches(expected) resp.set_cookie("name", "value", domain="local.host") expected = "Set-Cookie: name=value; Domain=local.host; Path=/" From 39f6832c910b7244f3cdadccb2bb99e418f7d243 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 04:54:51 +0000 Subject: [PATCH 0983/1511] [PR #9972/9916d329 backport][3.11] Revert "Only construct the `allowed_methods` `set` once for a `StaticResource`" (#9973) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9972.bugfix.rst | 3 +++ aiohttp/web_urldispatcher.py | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9972.bugfix.rst diff --git a/CHANGES/9972.bugfix.rst b/CHANGES/9972.bugfix.rst new file mode 100644 index 00000000000..d8d69d09044 --- /dev/null +++ b/CHANGES/9972.bugfix.rst @@ -0,0 +1,3 @@ +Reverted an optimization to avoid rebuilding the allowed methods for ``StaticResource`` on every request -- by :user:`bdraco`. + +``aiohttp-cors`` needs to be able to modify the allowed methods at run time via this internal. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index e4d43514ebe..025962bc594 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -580,7 +580,6 @@ def __init__( "HEAD", self._handle, self, expect_handler=expect_handler ), } - self._allowed_methods = set(self._routes) def url_for( # type: ignore[override] self, @@ -650,7 +649,7 @@ async def resolve(self, request: Request) -> _Resolve: if not path.startswith(self._prefix2) and path != self._prefix: return None, set() - allowed_methods = self._allowed_methods + allowed_methods = set(self._routes) if method not in allowed_methods: return None, allowed_methods From 007d9b16cad02c112fcaa8ec20a83fe1aaf33141 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 04:58:53 +0000 Subject: [PATCH 0984/1511] [PR #9972/9916d329 backport][3.12] Revert "Only construct the `allowed_methods` `set` once for a `StaticResource`" (#9974) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9972.bugfix.rst | 3 +++ aiohttp/web_urldispatcher.py | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 CHANGES/9972.bugfix.rst diff --git a/CHANGES/9972.bugfix.rst b/CHANGES/9972.bugfix.rst new file mode 100644 index 00000000000..d8d69d09044 --- /dev/null +++ b/CHANGES/9972.bugfix.rst @@ -0,0 +1,3 @@ +Reverted an optimization to avoid rebuilding the allowed methods for ``StaticResource`` on every request -- by :user:`bdraco`. + +``aiohttp-cors`` needs to be able to modify the allowed methods at run time via this internal. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index e4d43514ebe..025962bc594 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -580,7 +580,6 @@ def __init__( "HEAD", self._handle, self, expect_handler=expect_handler ), } - self._allowed_methods = set(self._routes) def url_for( # type: ignore[override] self, @@ -650,7 +649,7 @@ async def resolve(self, request: Request) -> _Resolve: if not path.startswith(self._prefix2) and path != self._prefix: return None, set() - allowed_methods = self._allowed_methods + allowed_methods = set(self._routes) if method not in allowed_methods: return None, allowed_methods From 72935db03ddf3290fd37bbb8dd7d6a6d7eef5c2b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 23:26:46 -0600 Subject: [PATCH 0985/1511] [PR #9975/8704bc7 backport][3.11] Add test coverage for accessing StaticResource._routes (#9977) --- CHANGES/9975.bugfix.rst | 1 + tests/test_urldispatch.py | 38 +++++++++++++++++++++++++++++++++++++- 2 files changed, 38 insertions(+), 1 deletion(-) create mode 120000 CHANGES/9975.bugfix.rst diff --git a/CHANGES/9975.bugfix.rst b/CHANGES/9975.bugfix.rst new file mode 120000 index 00000000000..18bf311de3b --- /dev/null +++ b/CHANGES/9975.bugfix.rst @@ -0,0 +1 @@ +9972.bugfix.rst \ No newline at end of file diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 8c3eaed13b7..72555adfe76 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,6 +1,7 @@ import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized +from typing import NoReturn from urllib.parse import quote, unquote import pytest @@ -486,7 +487,42 @@ async def test_static_not_match(router) -> None: assert (None, set()) == ret -def test_dynamic_with_trailing_slash(router) -> None: +async def test_add_static_access_resources(router: web.UrlDispatcher) -> None: + """Test accessing resource._routes externally. + + aiohttp-cors accesses the resource._routes, this test ensures that this + continues to work. + """ + # https://github.com/aio-libs/aiohttp-cors/blob/38c6c17bffc805e46baccd7be1b4fd8c69d95dc3/aiohttp_cors/urldispatcher_router_adapter.py#L187 + resource = router.add_static( + "/st", pathlib.Path(aiohttp.__file__).parent, name="static" + ) + resource._routes[hdrs.METH_OPTIONS] = resource._routes[hdrs.METH_GET] + mapping, allowed_methods = await resource.resolve( + make_mocked_request("OPTIONS", "/st/path") + ) + assert mapping is not None + assert allowed_methods == {hdrs.METH_GET, hdrs.METH_OPTIONS, hdrs.METH_HEAD} + + +async def test_add_static_set_options_route(router: web.UrlDispatcher) -> None: + """Ensure set_options_route works as expected.""" + resource = router.add_static( + "/st", pathlib.Path(aiohttp.__file__).parent, name="static" + ) + + async def handler(request: web.Request) -> NoReturn: + assert False + + resource.set_options_route(handler) + mapping, allowed_methods = await resource.resolve( + make_mocked_request("OPTIONS", "/st/path") + ) + assert mapping is not None + assert allowed_methods == {hdrs.METH_GET, hdrs.METH_OPTIONS, hdrs.METH_HEAD} + + +def test_dynamic_with_trailing_slash(router: web.UrlDispatcher) -> None: handler = make_handler() router.add_route("GET", "/get/{name}/", handler, name="name") route = router["name"] From 97d25b14354a560bb0d80bdf00e51a14a94b3333 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 23:29:01 -0600 Subject: [PATCH 0986/1511] [PR #9975/8704bc7 backport][3.12] Add test coverage for accessing StaticResource._routes (#9978) --- CHANGES/9975.bugfix.rst | 1 + tests/test_urldispatch.py | 38 +++++++++++++++++++++++++++++++++++++- 2 files changed, 38 insertions(+), 1 deletion(-) create mode 120000 CHANGES/9975.bugfix.rst diff --git a/CHANGES/9975.bugfix.rst b/CHANGES/9975.bugfix.rst new file mode 120000 index 00000000000..18bf311de3b --- /dev/null +++ b/CHANGES/9975.bugfix.rst @@ -0,0 +1 @@ +9972.bugfix.rst \ No newline at end of file diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 8c3eaed13b7..72555adfe76 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,6 +1,7 @@ import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized +from typing import NoReturn from urllib.parse import quote, unquote import pytest @@ -486,7 +487,42 @@ async def test_static_not_match(router) -> None: assert (None, set()) == ret -def test_dynamic_with_trailing_slash(router) -> None: +async def test_add_static_access_resources(router: web.UrlDispatcher) -> None: + """Test accessing resource._routes externally. + + aiohttp-cors accesses the resource._routes, this test ensures that this + continues to work. + """ + # https://github.com/aio-libs/aiohttp-cors/blob/38c6c17bffc805e46baccd7be1b4fd8c69d95dc3/aiohttp_cors/urldispatcher_router_adapter.py#L187 + resource = router.add_static( + "/st", pathlib.Path(aiohttp.__file__).parent, name="static" + ) + resource._routes[hdrs.METH_OPTIONS] = resource._routes[hdrs.METH_GET] + mapping, allowed_methods = await resource.resolve( + make_mocked_request("OPTIONS", "/st/path") + ) + assert mapping is not None + assert allowed_methods == {hdrs.METH_GET, hdrs.METH_OPTIONS, hdrs.METH_HEAD} + + +async def test_add_static_set_options_route(router: web.UrlDispatcher) -> None: + """Ensure set_options_route works as expected.""" + resource = router.add_static( + "/st", pathlib.Path(aiohttp.__file__).parent, name="static" + ) + + async def handler(request: web.Request) -> NoReturn: + assert False + + resource.set_options_route(handler) + mapping, allowed_methods = await resource.resolve( + make_mocked_request("OPTIONS", "/st/path") + ) + assert mapping is not None + assert allowed_methods == {hdrs.METH_GET, hdrs.METH_OPTIONS, hdrs.METH_HEAD} + + +def test_dynamic_with_trailing_slash(router: web.UrlDispatcher) -> None: handler = make_handler() router.add_route("GET", "/get/{name}/", handler, name="name") route = router["name"] From ee0657dfc26dc9f2aecf87f651b9dae0d14d9740 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 05:43:09 +0000 Subject: [PATCH 0987/1511] [PR #9976/2b40f6b7 backport][3.11] Restore only construct the allowed_methods set once for a `StaticResource` (#9979) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9972.bugfix.rst | 4 +--- CHANGES/9976.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 4 +++- tests/test_urldispatch.py | 1 + 4 files changed, 6 insertions(+), 4 deletions(-) create mode 120000 CHANGES/9976.bugfix.rst diff --git a/CHANGES/9972.bugfix.rst b/CHANGES/9972.bugfix.rst index d8d69d09044..c3ea8ef19d7 100644 --- a/CHANGES/9972.bugfix.rst +++ b/CHANGES/9972.bugfix.rst @@ -1,3 +1 @@ -Reverted an optimization to avoid rebuilding the allowed methods for ``StaticResource`` on every request -- by :user:`bdraco`. - -``aiohttp-cors`` needs to be able to modify the allowed methods at run time via this internal. +Fixed ``StaticResource`` not allowing the ``OPTIONS`` method after calling ``set_options_route`` -- by :user:`bdraco`. diff --git a/CHANGES/9976.bugfix.rst b/CHANGES/9976.bugfix.rst new file mode 120000 index 00000000000..18bf311de3b --- /dev/null +++ b/CHANGES/9976.bugfix.rst @@ -0,0 +1 @@ +9972.bugfix.rst \ No newline at end of file diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 025962bc594..5e3b71ff2e6 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -580,6 +580,7 @@ def __init__( "HEAD", self._handle, self, expect_handler=expect_handler ), } + self._allowed_methods = set(self._routes) def url_for( # type: ignore[override] self, @@ -642,6 +643,7 @@ def set_options_route(self, handler: Handler) -> None: self._routes["OPTIONS"] = ResourceRoute( "OPTIONS", handler, self, expect_handler=self._expect_handler ) + self._allowed_methods.add("OPTIONS") async def resolve(self, request: Request) -> _Resolve: path = request.rel_url.path_safe @@ -649,7 +651,7 @@ async def resolve(self, request: Request) -> _Resolve: if not path.startswith(self._prefix2) and path != self._prefix: return None, set() - allowed_methods = set(self._routes) + allowed_methods = self._allowed_methods if method not in allowed_methods: return None, allowed_methods diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 72555adfe76..79eda49e196 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -498,6 +498,7 @@ async def test_add_static_access_resources(router: web.UrlDispatcher) -> None: "/st", pathlib.Path(aiohttp.__file__).parent, name="static" ) resource._routes[hdrs.METH_OPTIONS] = resource._routes[hdrs.METH_GET] + resource._allowed_methods.add(hdrs.METH_OPTIONS) mapping, allowed_methods = await resource.resolve( make_mocked_request("OPTIONS", "/st/path") ) From 3cc00d3cdbab8ec6cf511700b15e33858a608d40 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 05:51:53 +0000 Subject: [PATCH 0988/1511] [PR #9976/2b40f6b7 backport][3.12] Restore only construct the allowed_methods set once for a `StaticResource` (#9980) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/9972.bugfix.rst | 4 +--- CHANGES/9976.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 4 +++- tests/test_urldispatch.py | 1 + 4 files changed, 6 insertions(+), 4 deletions(-) create mode 120000 CHANGES/9976.bugfix.rst diff --git a/CHANGES/9972.bugfix.rst b/CHANGES/9972.bugfix.rst index d8d69d09044..c3ea8ef19d7 100644 --- a/CHANGES/9972.bugfix.rst +++ b/CHANGES/9972.bugfix.rst @@ -1,3 +1 @@ -Reverted an optimization to avoid rebuilding the allowed methods for ``StaticResource`` on every request -- by :user:`bdraco`. - -``aiohttp-cors`` needs to be able to modify the allowed methods at run time via this internal. +Fixed ``StaticResource`` not allowing the ``OPTIONS`` method after calling ``set_options_route`` -- by :user:`bdraco`. diff --git a/CHANGES/9976.bugfix.rst b/CHANGES/9976.bugfix.rst new file mode 120000 index 00000000000..18bf311de3b --- /dev/null +++ b/CHANGES/9976.bugfix.rst @@ -0,0 +1 @@ +9972.bugfix.rst \ No newline at end of file diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 025962bc594..5e3b71ff2e6 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -580,6 +580,7 @@ def __init__( "HEAD", self._handle, self, expect_handler=expect_handler ), } + self._allowed_methods = set(self._routes) def url_for( # type: ignore[override] self, @@ -642,6 +643,7 @@ def set_options_route(self, handler: Handler) -> None: self._routes["OPTIONS"] = ResourceRoute( "OPTIONS", handler, self, expect_handler=self._expect_handler ) + self._allowed_methods.add("OPTIONS") async def resolve(self, request: Request) -> _Resolve: path = request.rel_url.path_safe @@ -649,7 +651,7 @@ async def resolve(self, request: Request) -> _Resolve: if not path.startswith(self._prefix2) and path != self._prefix: return None, set() - allowed_methods = set(self._routes) + allowed_methods = self._allowed_methods if method not in allowed_methods: return None, allowed_methods diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 72555adfe76..79eda49e196 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -498,6 +498,7 @@ async def test_add_static_access_resources(router: web.UrlDispatcher) -> None: "/st", pathlib.Path(aiohttp.__file__).parent, name="static" ) resource._routes[hdrs.METH_OPTIONS] = resource._routes[hdrs.METH_GET] + resource._allowed_methods.add(hdrs.METH_OPTIONS) mapping, allowed_methods = await resource.resolve( make_mocked_request("OPTIONS", "/st/path") ) From f3d27ac4990abb86b0625c587bdd7d1e1af123b5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 18 Nov 2024 23:56:54 -0600 Subject: [PATCH 0989/1511] Release 3.11.4 (#9981) --- CHANGES.rst | 30 ++++++++++++++++++++++++++++++ CHANGES/9895.misc.rst | 1 - CHANGES/9972.bugfix.rst | 1 - CHANGES/9975.bugfix.rst | 1 - CHANGES/9976.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 31 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/9895.misc.rst delete mode 100644 CHANGES/9972.bugfix.rst delete mode 120000 CHANGES/9975.bugfix.rst delete mode 120000 CHANGES/9976.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 896713180eb..121ef0b2db7 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,36 @@ .. towncrier release notes start +3.11.4 (2024-11-18) +=================== + +Bug fixes +--------- + +- Fixed ``StaticResource`` not allowing the ``OPTIONS`` method after calling ``set_options_route`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9972`, :issue:`9975`, :issue:`9976`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of creating web responses when there are no cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9895`. + + + + +---- + + 3.11.3 (2024-11-18) =================== diff --git a/CHANGES/9895.misc.rst b/CHANGES/9895.misc.rst deleted file mode 100644 index 1049d5d3f4b..00000000000 --- a/CHANGES/9895.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of creating web responses when there are no cookies -- by :user:`bdraco`. diff --git a/CHANGES/9972.bugfix.rst b/CHANGES/9972.bugfix.rst deleted file mode 100644 index c3ea8ef19d7..00000000000 --- a/CHANGES/9972.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``StaticResource`` not allowing the ``OPTIONS`` method after calling ``set_options_route`` -- by :user:`bdraco`. diff --git a/CHANGES/9975.bugfix.rst b/CHANGES/9975.bugfix.rst deleted file mode 120000 index 18bf311de3b..00000000000 --- a/CHANGES/9975.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -9972.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9976.bugfix.rst b/CHANGES/9976.bugfix.rst deleted file mode 120000 index 18bf311de3b..00000000000 --- a/CHANGES/9976.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -9972.bugfix.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 88f1eb0fd91..f9a695716be 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.4.dev0" +__version__ = "3.11.4" from typing import TYPE_CHECKING, Tuple From 7af8bbfabb864fb947027c2a35ebbabcf8cc7ed4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 00:28:02 -0600 Subject: [PATCH 0990/1511] Increment version to 3.11.5.dev0 (#9983) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f9a695716be..98b2b436b5d 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.4" +__version__ = "3.11.5.dev0" from typing import TYPE_CHECKING, Tuple From 17b005a1df0b02aa31fd710858a231f98290308f Mon Sep 17 00:00:00 2001 From: Andrew Svetlov <andrew.svetlov@gmail.com> Date: Tue, 19 Nov 2024 08:21:50 +0100 Subject: [PATCH 0991/1511] Add target-version to black config (#9962) (#9986) Add `target-version` option to black config in `pyproject.toml` and reformat code. https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#t-target-version (cherry picked from commit 00fd4eb62582caccfc721cb0c36e61f0350c2009) <!-- Thank you for your contribution! --> ## What do these changes do? <!-- Please give a short brief about these changes. --> ## Are there changes in behavior for the user? <!-- Outline any notable behaviour for the end users. --> ## Is it a substantial burden for the maintainers to support this? <!-- Stop right there! Pause. Just for a minute... Can you think of anything obvious that would complicate the ongoing development of this project? Try to consider if you'd be able to maintain it throughout the next 5 years. Does it seem viable? Tell us your thoughts! We'd very much love to hear what the consequences of merging this patch might be... This will help us assess if your change is something we'd want to entertain early in the review process. Thank you in advance! --> ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- aiohttp/pytest_plugin.py | 10 +- aiohttp/resolver.py | 2 +- aiohttp/web_server.py | 2 +- pyproject.toml | 5 + tests/test_client_functional.py | 7 +- tests/test_client_session.py | 22 ++-- tests/test_client_ws.py | 29 +++-- tests/test_client_ws_functional.py | 13 ++- tests/test_connector.py | 155 ++++++++++++++++--------- tests/test_http_writer.py | 9 +- tests/test_proxy.py | 80 +++++++------ tests/test_proxy_functional.py | 17 +-- tests/test_web_response.py | 7 +- tests/test_web_websocket_functional.py | 13 ++- 14 files changed, 227 insertions(+), 144 deletions(-) diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 6da4852ab46..7ce60faa4a4 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -43,7 +43,7 @@ async def __call__( __param: Application, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[Request, Application]: ... @overload async def __call__( @@ -51,7 +51,7 @@ async def __call__( __param: BaseTestServer, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[BaseRequest, None]: ... @@ -379,7 +379,7 @@ async def go( __param: Application, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[Request, Application]: ... @overload @@ -387,14 +387,14 @@ async def go( __param: BaseTestServer, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[BaseRequest, None]: ... async def go( __param: Union[Application, BaseTestServer], *args: Any, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[Any, Any]: if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] __param, (Application, BaseTestServer) diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index a988b0bf47f..9c744514fae 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -85,7 +85,7 @@ def __init__( self, loop: Optional[asyncio.AbstractEventLoop] = None, *args: Any, - **kwargs: Any + **kwargs: Any, ) -> None: if aiodns is None: raise RuntimeError("Resolver requires aiodns library") diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index ffc198d5780..b6ac25ac1a5 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -20,7 +20,7 @@ def __init__( request_factory: Optional[_RequestFactory] = None, handler_cancellation: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any + **kwargs: Any, ) -> None: self._loop = loop or asyncio.get_running_loop() self._connections: Dict[RequestHandler, asyncio.Transport] = {} diff --git a/pyproject.toml b/pyproject.toml index 33962686919..69f8a6b58b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,3 +87,8 @@ ignore-words-list = 'te' # TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 # is available in all supported cpython versions exclude-modules = "(^aiohttp\\.helpers)" + +[tool.black] +# TODO: Remove when project metadata is moved here. +# Black can read the value from [project.requires-python]. +target-version = ["py39", "py310", "py311", "py312"] diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 73e5929ee3b..b34ccdb600d 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2955,9 +2955,10 @@ async def close(self) -> None: connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) - async with aiohttp.ClientSession(connector=connector) as client, client.get( - url_from, auth=aiohttp.BasicAuth("user", "pass") - ) as resp: + async with ( + aiohttp.ClientSession(connector=connector) as client, + client.get(url_from, auth=aiohttp.BasicAuth("user", "pass")) as resp, + ): assert len(resp.history) == 1 assert str(resp.url) == "http://example.com" assert resp.status == 200 diff --git a/tests/test_client_session.py b/tests/test_client_session.py index d5752d0fd97..65f80b6abe9 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -515,11 +515,12 @@ async def create_connection(req, traces, timeout): return create_mocked_conn() connector = session._connector - with mock.patch.object(connector, "connect", connect), mock.patch.object( - connector, "_create_connection", create_connection - ), mock.patch.object(connector, "_release"), mock.patch( - "aiohttp.client.os" - ) as m_os: + with ( + mock.patch.object(connector, "connect", connect), + mock.patch.object(connector, "_create_connection", create_connection), + mock.patch.object(connector, "_release"), + mock.patch("aiohttp.client.os") as m_os, + ): m_os.urandom.return_value = key_data await session.ws_connect(f"{protocol}://example") @@ -576,11 +577,12 @@ async def create_connection( return create_mocked_conn() connector = session._connector - with mock.patch.object(connector, "connect", connect), mock.patch.object( - connector, "_create_connection", create_connection - ), mock.patch.object(connector, "_release"), mock.patch( - "aiohttp.client.os" - ) as m_os: + with ( + mock.patch.object(connector, "connect", connect), + mock.patch.object(connector, "_create_connection", create_connection), + mock.patch.object(connector, "_release"), + mock.patch("aiohttp.client.os") as m_os, + ): m_os.urandom.return_value = key_data await session.ws_connect(f"{protocol}://example") diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index eedf65f86a1..92b5d117db7 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -51,9 +51,10 @@ async def test_ws_connect_read_timeout_is_reset_to_inf( hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } resp.connection.protocol.read_timeout = 0.5 - with mock.patch("aiohttp.client.os") as m_os, mock.patch( - "aiohttp.client.ClientSession.request" - ) as m_req: + with ( + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) @@ -80,9 +81,10 @@ async def test_ws_connect_read_timeout_stays_inf( hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } resp.connection.protocol.read_timeout = None - with mock.patch("aiohttp.client.os") as m_os, mock.patch( - "aiohttp.client.ClientSession.request" - ) as m_req: + with ( + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) @@ -111,9 +113,10 @@ async def test_ws_connect_read_timeout_reset_to_max( hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } resp.connection.protocol.read_timeout = 0.5 - with mock.patch("aiohttp.client.os") as m_os, mock.patch( - "aiohttp.client.ClientSession.request" - ) as m_req: + with ( + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) @@ -416,9 +419,11 @@ async def test_close_connection_lost( hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } mresp.connection.protocol.read_timeout = None - with mock.patch("aiohttp.client.WebSocketWriter"), mock.patch( - "aiohttp.client.os" - ) as m_os, mock.patch("aiohttp.client.ClientSession.request") as m_req: + with ( + mock.patch("aiohttp.client.WebSocketWriter"), + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(mresp) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 9ab5dc52b1c..7ede7432adf 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -756,11 +756,14 @@ async def handler(request: web.Request) -> NoReturn: # since if we closed the connection normally, the client would # would cancel the heartbeat task and we wouldn't get a ping assert resp._conn is not None - with mock.patch.object( - resp._conn.transport, "write", side_effect=ClientConnectionResetError - ), mock.patch.object( - resp._writer, "send_frame", wraps=resp._writer.send_frame - ) as send_frame: + with ( + mock.patch.object( + resp._conn.transport, "write", side_effect=ClientConnectionResetError + ), + mock.patch.object( + resp._writer, "send_frame", wraps=resp._writer.send_frame + ) as send_frame, + ): await resp.receive() ping_count = send_frame.call_args_list.count(mock.call(b"", WSMsgType.PING)) # Connection should be closed roughly after 1.5x heartbeat. diff --git a/tests/test_connector.py b/tests/test_connector.py index 32184130dda..483759a4180 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -783,16 +783,24 @@ def get_extra_info(param): assert False - with mock.patch.object( - conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host - ), mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=create_connection, - ), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + with ( + mock.patch.object( + conn, + "_resolve_host", + autospec=True, + spec_set=True, + side_effect=_resolve_host, + ), + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ), ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -945,16 +953,24 @@ async def create_connection(*args, **kwargs): pr = create_mocked_conn(loop) return tr, pr - with mock.patch.object( - conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host - ), mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=create_connection, - ), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + with ( + mock.patch.object( + conn, + "_resolve_host", + autospec=True, + spec_set=True, + side_effect=_resolve_host, + ), + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ), ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -1113,16 +1129,24 @@ async def create_connection( pr = create_mocked_conn(loop) return tr, pr - with mock.patch.object( - conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host - ), mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=create_connection, - ), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + with ( + mock.patch.object( + conn, + "_resolve_host", + autospec=True, + spec_set=True, + side_effect=_resolve_host, + ), + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ), ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -1594,8 +1618,11 @@ async def test_exception_during_connetion_create_tracing( assert not conn._acquired assert key not in conn._acquired_per_host - with pytest.raises(asyncio.CancelledError), mock.patch.object( - conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + with ( + pytest.raises(asyncio.CancelledError), + mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ), ): await conn.connect(req, traces, ClientTimeout()) @@ -1625,8 +1652,11 @@ async def test_exception_during_connection_queued_tracing( assert not conn._acquired assert key not in conn._acquired_per_host - with pytest.raises(asyncio.CancelledError), mock.patch.object( - conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + with ( + pytest.raises(asyncio.CancelledError), + mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ), ): resp1 = await conn.connect(req, traces, ClientTimeout()) assert resp1 @@ -1663,8 +1693,11 @@ async def test_exception_during_connection_reuse_tracing( assert not conn._acquired assert key not in conn._acquired_per_host - with pytest.raises(asyncio.CancelledError), mock.patch.object( - conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + with ( + pytest.raises(asyncio.CancelledError), + mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ), ): resp = await conn.connect(req, traces, ClientTimeout()) with mock.patch.object(resp.protocol, "should_close", False): @@ -1888,8 +1921,9 @@ async def test_cleanup_closed( async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None: """Ensure that enable_cleanup_closed is a noop on fixed Python versions.""" - with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False), pytest.warns( - DeprecationWarning, match="cleanup_closed ignored" + with ( + mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False), + pytest.warns(DeprecationWarning, match="cleanup_closed ignored"), ): conn = aiohttp.BaseConnector(enable_cleanup_closed=True) assert conn._cleanup_closed_disabled is True @@ -2143,9 +2177,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2193,9 +2230,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2243,9 +2283,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2301,9 +2344,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2366,9 +2412,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index e43b448bc0f..0ed0e615700 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -257,9 +257,12 @@ async def test_write_payload_deflate_compression_chunked_connection_lost( msg.enable_compression("deflate") msg.enable_chunking() await msg.write(b"data") - with pytest.raises( - ClientConnectionResetError, match="Cannot write to closing transport" - ), mock.patch.object(transport, "is_closing", return_value=True): + with ( + pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ), + mock.patch.object(transport, "is_closing", return_value=True), + ): await msg.write_eof(b"end") diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 2fedafb4595..1679b68909f 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -417,18 +417,21 @@ def close(self) -> None: fingerprint_mock.check.side_effect = aiohttp.ServerFingerprintMismatch( b"exp", b"got", "example.com", 8080 ) - with mock.patch.object( - proxy_req, - "send", - autospec=True, - spec_set=True, - return_value=proxy_resp, - ), mock.patch.object( - proxy_resp, - "start", - autospec=True, - spec_set=True, - return_value=mock.Mock(status=200), + with ( + mock.patch.object( + proxy_req, + "send", + autospec=True, + spec_set=True, + return_value=proxy_resp, + ), + mock.patch.object( + proxy_resp, + "start", + autospec=True, + spec_set=True, + return_value=mock.Mock(status=200), + ), ): connector = self.loop.run_until_complete(make_conn()) host = [ @@ -441,30 +444,35 @@ def close(self) -> None: "flags": 0, } ] - with mock.patch.object( - connector, - "_resolve_host", - autospec=True, - spec_set=True, - return_value=host, - ), mock.patch.object( - connector, - "_get_fingerprint", - autospec=True, - spec_set=True, - return_value=fingerprint_mock, - ), mock.patch.object( # Called on connection to http://proxy.example.com - self.loop, - "create_connection", - autospec=True, - spec_set=True, - return_value=(mock.Mock(), mock.Mock()), - ), mock.patch.object( # Called on connection to https://www.python.org - self.loop, - "start_tls", - autospec=True, - spec_set=True, - return_value=TransportMock(), + with ( + mock.patch.object( + connector, + "_resolve_host", + autospec=True, + spec_set=True, + return_value=host, + ), + mock.patch.object( + connector, + "_get_fingerprint", + autospec=True, + spec_set=True, + return_value=fingerprint_mock, + ), + mock.patch.object( # Called on connection to http://proxy.example.com + self.loop, + "create_connection", + autospec=True, + spec_set=True, + return_value=(mock.Mock(), mock.Mock()), + ), + mock.patch.object( # Called on connection to https://www.python.org + self.loop, + "start_tls", + autospec=True, + spec_set=True, + return_value=TransportMock(), + ), ): req = ClientRequest( "GET", diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 5283b375834..0921d5487bb 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -180,13 +180,16 @@ async def test_https_proxy_unsupported_tls_in_tls( r"$" ) - with pytest.warns( - RuntimeWarning, - match=expected_warning_text, - ), pytest.raises( - ClientConnectionError, - match=expected_exception_reason, - ) as conn_err: + with ( + pytest.warns( + RuntimeWarning, + match=expected_warning_text, + ), + pytest.raises( + ClientConnectionError, + match=expected_exception_reason, + ) as conn_err, + ): async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx): pass diff --git a/tests/test_web_response.py b/tests/test_web_response.py index dc9908fd149..f4acf23f61b 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -491,9 +491,10 @@ async def test_force_compression_deflate_large_payload() -> None: resp.enable_compression(ContentCoding.deflate) assert resp.compression - with pytest.warns( - Warning, match="Synchronous compression of large response bodies" - ), mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2): + with ( + pytest.warns(Warning, match="Synchronous compression of large response bodies"), + mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2), + ): msg = await resp.prepare(req) assert msg is not None assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index ebd94607f24..b7494d9265f 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -738,11 +738,14 @@ async def handler(request: web.Request) -> NoReturn: # We patch write here to simulate a connection reset error # since if we closed the connection normally, the server would # would cancel the heartbeat task and we wouldn't get a ping - with mock.patch.object( - ws_server._req.transport, "write", side_effect=ConnectionResetError - ), mock.patch.object( - ws_server._writer, "send_frame", wraps=ws_server._writer.send_frame - ) as send_frame: + with ( + mock.patch.object( + ws_server._req.transport, "write", side_effect=ConnectionResetError + ), + mock.patch.object( + ws_server._writer, "send_frame", wraps=ws_server._writer.send_frame + ) as send_frame, + ): try: await ws_server.receive() finally: From 79f2a9453b90f1a0ad838542e37ce95e40df0f97 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov <andrew.svetlov@gmail.com> Date: Tue, 19 Nov 2024 08:22:32 +0100 Subject: [PATCH 0992/1511] Add target-version to black config (#9962) (#9985) Add `target-version` option to black config in `pyproject.toml` and reformat code. https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#t-target-version (cherry picked from commit 00fd4eb62582caccfc721cb0c36e61f0350c2009) <!-- Thank you for your contribution! --> ## What do these changes do? <!-- Please give a short brief about these changes. --> ## Are there changes in behavior for the user? <!-- Outline any notable behaviour for the end users. --> ## Is it a substantial burden for the maintainers to support this? <!-- Stop right there! Pause. Just for a minute... Can you think of anything obvious that would complicate the ongoing development of this project? Try to consider if you'd be able to maintain it throughout the next 5 years. Does it seem viable? Tell us your thoughts! We'd very much love to hear what the consequences of merging this patch might be... This will help us assess if your change is something we'd want to entertain early in the review process. Thank you in advance! --> ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- aiohttp/pytest_plugin.py | 10 +- aiohttp/resolver.py | 2 +- aiohttp/web_server.py | 2 +- pyproject.toml | 5 + tests/test_client_functional.py | 7 +- tests/test_client_session.py | 22 ++-- tests/test_client_ws.py | 29 +++-- tests/test_client_ws_functional.py | 13 ++- tests/test_connector.py | 155 ++++++++++++++++--------- tests/test_http_writer.py | 9 +- tests/test_proxy.py | 80 +++++++------ tests/test_proxy_functional.py | 17 +-- tests/test_web_response.py | 7 +- tests/test_web_websocket_functional.py | 13 ++- 14 files changed, 227 insertions(+), 144 deletions(-) diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 6da4852ab46..7ce60faa4a4 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -43,7 +43,7 @@ async def __call__( __param: Application, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[Request, Application]: ... @overload async def __call__( @@ -51,7 +51,7 @@ async def __call__( __param: BaseTestServer, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[BaseRequest, None]: ... @@ -379,7 +379,7 @@ async def go( __param: Application, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[Request, Application]: ... @overload @@ -387,14 +387,14 @@ async def go( __param: BaseTestServer, *, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[BaseRequest, None]: ... async def go( __param: Union[Application, BaseTestServer], *args: Any, server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any + **kwargs: Any, ) -> TestClient[Any, Any]: if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] __param, (Application, BaseTestServer) diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index a988b0bf47f..9c744514fae 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -85,7 +85,7 @@ def __init__( self, loop: Optional[asyncio.AbstractEventLoop] = None, *args: Any, - **kwargs: Any + **kwargs: Any, ) -> None: if aiodns is None: raise RuntimeError("Resolver requires aiodns library") diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index ffc198d5780..b6ac25ac1a5 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -20,7 +20,7 @@ def __init__( request_factory: Optional[_RequestFactory] = None, handler_cancellation: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any + **kwargs: Any, ) -> None: self._loop = loop or asyncio.get_running_loop() self._connections: Dict[RequestHandler, asyncio.Transport] = {} diff --git a/pyproject.toml b/pyproject.toml index 33962686919..69f8a6b58b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,3 +87,8 @@ ignore-words-list = 'te' # TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 # is available in all supported cpython versions exclude-modules = "(^aiohttp\\.helpers)" + +[tool.black] +# TODO: Remove when project metadata is moved here. +# Black can read the value from [project.requires-python]. +target-version = ["py39", "py310", "py311", "py312"] diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 73e5929ee3b..b34ccdb600d 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2955,9 +2955,10 @@ async def close(self) -> None: connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) - async with aiohttp.ClientSession(connector=connector) as client, client.get( - url_from, auth=aiohttp.BasicAuth("user", "pass") - ) as resp: + async with ( + aiohttp.ClientSession(connector=connector) as client, + client.get(url_from, auth=aiohttp.BasicAuth("user", "pass")) as resp, + ): assert len(resp.history) == 1 assert str(resp.url) == "http://example.com" assert resp.status == 200 diff --git a/tests/test_client_session.py b/tests/test_client_session.py index d5752d0fd97..65f80b6abe9 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -515,11 +515,12 @@ async def create_connection(req, traces, timeout): return create_mocked_conn() connector = session._connector - with mock.patch.object(connector, "connect", connect), mock.patch.object( - connector, "_create_connection", create_connection - ), mock.patch.object(connector, "_release"), mock.patch( - "aiohttp.client.os" - ) as m_os: + with ( + mock.patch.object(connector, "connect", connect), + mock.patch.object(connector, "_create_connection", create_connection), + mock.patch.object(connector, "_release"), + mock.patch("aiohttp.client.os") as m_os, + ): m_os.urandom.return_value = key_data await session.ws_connect(f"{protocol}://example") @@ -576,11 +577,12 @@ async def create_connection( return create_mocked_conn() connector = session._connector - with mock.patch.object(connector, "connect", connect), mock.patch.object( - connector, "_create_connection", create_connection - ), mock.patch.object(connector, "_release"), mock.patch( - "aiohttp.client.os" - ) as m_os: + with ( + mock.patch.object(connector, "connect", connect), + mock.patch.object(connector, "_create_connection", create_connection), + mock.patch.object(connector, "_release"), + mock.patch("aiohttp.client.os") as m_os, + ): m_os.urandom.return_value = key_data await session.ws_connect(f"{protocol}://example") diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index eedf65f86a1..92b5d117db7 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -51,9 +51,10 @@ async def test_ws_connect_read_timeout_is_reset_to_inf( hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } resp.connection.protocol.read_timeout = 0.5 - with mock.patch("aiohttp.client.os") as m_os, mock.patch( - "aiohttp.client.ClientSession.request" - ) as m_req: + with ( + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) @@ -80,9 +81,10 @@ async def test_ws_connect_read_timeout_stays_inf( hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } resp.connection.protocol.read_timeout = None - with mock.patch("aiohttp.client.os") as m_os, mock.patch( - "aiohttp.client.ClientSession.request" - ) as m_req: + with ( + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) @@ -111,9 +113,10 @@ async def test_ws_connect_read_timeout_reset_to_max( hdrs.SEC_WEBSOCKET_PROTOCOL: "chat", } resp.connection.protocol.read_timeout = 0.5 - with mock.patch("aiohttp.client.os") as m_os, mock.patch( - "aiohttp.client.ClientSession.request" - ) as m_req: + with ( + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) @@ -416,9 +419,11 @@ async def test_close_connection_lost( hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } mresp.connection.protocol.read_timeout = None - with mock.patch("aiohttp.client.WebSocketWriter"), mock.patch( - "aiohttp.client.os" - ) as m_os, mock.patch("aiohttp.client.ClientSession.request") as m_req: + with ( + mock.patch("aiohttp.client.WebSocketWriter"), + mock.patch("aiohttp.client.os") as m_os, + mock.patch("aiohttp.client.ClientSession.request") as m_req, + ): m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(mresp) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 9ab5dc52b1c..7ede7432adf 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -756,11 +756,14 @@ async def handler(request: web.Request) -> NoReturn: # since if we closed the connection normally, the client would # would cancel the heartbeat task and we wouldn't get a ping assert resp._conn is not None - with mock.patch.object( - resp._conn.transport, "write", side_effect=ClientConnectionResetError - ), mock.patch.object( - resp._writer, "send_frame", wraps=resp._writer.send_frame - ) as send_frame: + with ( + mock.patch.object( + resp._conn.transport, "write", side_effect=ClientConnectionResetError + ), + mock.patch.object( + resp._writer, "send_frame", wraps=resp._writer.send_frame + ) as send_frame, + ): await resp.receive() ping_count = send_frame.call_args_list.count(mock.call(b"", WSMsgType.PING)) # Connection should be closed roughly after 1.5x heartbeat. diff --git a/tests/test_connector.py b/tests/test_connector.py index 32184130dda..483759a4180 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -783,16 +783,24 @@ def get_extra_info(param): assert False - with mock.patch.object( - conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host - ), mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=create_connection, - ), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + with ( + mock.patch.object( + conn, + "_resolve_host", + autospec=True, + spec_set=True, + side_effect=_resolve_host, + ), + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ), ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -945,16 +953,24 @@ async def create_connection(*args, **kwargs): pr = create_mocked_conn(loop) return tr, pr - with mock.patch.object( - conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host - ), mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=create_connection, - ), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + with ( + mock.patch.object( + conn, + "_resolve_host", + autospec=True, + spec_set=True, + side_effect=_resolve_host, + ), + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ), ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -1113,16 +1129,24 @@ async def create_connection( pr = create_mocked_conn(loop) return tr, pr - with mock.patch.object( - conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host - ), mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=create_connection, - ), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + with ( + mock.patch.object( + conn, + "_resolve_host", + autospec=True, + spec_set=True, + side_effect=_resolve_host, + ), + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=create_connection, + ), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", start_connection + ), ): established_connection = await conn.connect(req, [], ClientTimeout()) @@ -1594,8 +1618,11 @@ async def test_exception_during_connetion_create_tracing( assert not conn._acquired assert key not in conn._acquired_per_host - with pytest.raises(asyncio.CancelledError), mock.patch.object( - conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + with ( + pytest.raises(asyncio.CancelledError), + mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ), ): await conn.connect(req, traces, ClientTimeout()) @@ -1625,8 +1652,11 @@ async def test_exception_during_connection_queued_tracing( assert not conn._acquired assert key not in conn._acquired_per_host - with pytest.raises(asyncio.CancelledError), mock.patch.object( - conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + with ( + pytest.raises(asyncio.CancelledError), + mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ), ): resp1 = await conn.connect(req, traces, ClientTimeout()) assert resp1 @@ -1663,8 +1693,11 @@ async def test_exception_during_connection_reuse_tracing( assert not conn._acquired assert key not in conn._acquired_per_host - with pytest.raises(asyncio.CancelledError), mock.patch.object( - conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + with ( + pytest.raises(asyncio.CancelledError), + mock.patch.object( + conn, "_create_connection", autospec=True, spec_set=True, return_value=proto + ), ): resp = await conn.connect(req, traces, ClientTimeout()) with mock.patch.object(resp.protocol, "should_close", False): @@ -1888,8 +1921,9 @@ async def test_cleanup_closed( async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None: """Ensure that enable_cleanup_closed is a noop on fixed Python versions.""" - with mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False), pytest.warns( - DeprecationWarning, match="cleanup_closed ignored" + with ( + mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False), + pytest.warns(DeprecationWarning, match="cleanup_closed ignored"), ): conn = aiohttp.BaseConnector(enable_cleanup_closed=True) assert conn._cleanup_closed_disabled is True @@ -2143,9 +2177,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2193,9 +2230,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2243,9 +2283,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2301,9 +2344,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) @@ -2366,9 +2412,12 @@ async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: req = ClientRequest( "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() ) - with mock.patch.object(conn._resolver, "resolve", delay_resolve), mock.patch( - "aiohttp.connector.aiohappyeyeballs.start_connection", - side_effect=OSError(1, "Forced connection to fail"), + with ( + mock.patch.object(conn._resolver, "resolve", delay_resolve), + mock.patch( + "aiohttp.connector.aiohappyeyeballs.start_connection", + side_effect=OSError(1, "Forced connection to fail"), + ), ): task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout())) diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index e43b448bc0f..0ed0e615700 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -257,9 +257,12 @@ async def test_write_payload_deflate_compression_chunked_connection_lost( msg.enable_compression("deflate") msg.enable_chunking() await msg.write(b"data") - with pytest.raises( - ClientConnectionResetError, match="Cannot write to closing transport" - ), mock.patch.object(transport, "is_closing", return_value=True): + with ( + pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ), + mock.patch.object(transport, "is_closing", return_value=True), + ): await msg.write_eof(b"end") diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 2fedafb4595..1679b68909f 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -417,18 +417,21 @@ def close(self) -> None: fingerprint_mock.check.side_effect = aiohttp.ServerFingerprintMismatch( b"exp", b"got", "example.com", 8080 ) - with mock.patch.object( - proxy_req, - "send", - autospec=True, - spec_set=True, - return_value=proxy_resp, - ), mock.patch.object( - proxy_resp, - "start", - autospec=True, - spec_set=True, - return_value=mock.Mock(status=200), + with ( + mock.patch.object( + proxy_req, + "send", + autospec=True, + spec_set=True, + return_value=proxy_resp, + ), + mock.patch.object( + proxy_resp, + "start", + autospec=True, + spec_set=True, + return_value=mock.Mock(status=200), + ), ): connector = self.loop.run_until_complete(make_conn()) host = [ @@ -441,30 +444,35 @@ def close(self) -> None: "flags": 0, } ] - with mock.patch.object( - connector, - "_resolve_host", - autospec=True, - spec_set=True, - return_value=host, - ), mock.patch.object( - connector, - "_get_fingerprint", - autospec=True, - spec_set=True, - return_value=fingerprint_mock, - ), mock.patch.object( # Called on connection to http://proxy.example.com - self.loop, - "create_connection", - autospec=True, - spec_set=True, - return_value=(mock.Mock(), mock.Mock()), - ), mock.patch.object( # Called on connection to https://www.python.org - self.loop, - "start_tls", - autospec=True, - spec_set=True, - return_value=TransportMock(), + with ( + mock.patch.object( + connector, + "_resolve_host", + autospec=True, + spec_set=True, + return_value=host, + ), + mock.patch.object( + connector, + "_get_fingerprint", + autospec=True, + spec_set=True, + return_value=fingerprint_mock, + ), + mock.patch.object( # Called on connection to http://proxy.example.com + self.loop, + "create_connection", + autospec=True, + spec_set=True, + return_value=(mock.Mock(), mock.Mock()), + ), + mock.patch.object( # Called on connection to https://www.python.org + self.loop, + "start_tls", + autospec=True, + spec_set=True, + return_value=TransportMock(), + ), ): req = ClientRequest( "GET", diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 5283b375834..0921d5487bb 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -180,13 +180,16 @@ async def test_https_proxy_unsupported_tls_in_tls( r"$" ) - with pytest.warns( - RuntimeWarning, - match=expected_warning_text, - ), pytest.raises( - ClientConnectionError, - match=expected_exception_reason, - ) as conn_err: + with ( + pytest.warns( + RuntimeWarning, + match=expected_warning_text, + ), + pytest.raises( + ClientConnectionError, + match=expected_exception_reason, + ) as conn_err, + ): async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx): pass diff --git a/tests/test_web_response.py b/tests/test_web_response.py index dc9908fd149..f4acf23f61b 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -491,9 +491,10 @@ async def test_force_compression_deflate_large_payload() -> None: resp.enable_compression(ContentCoding.deflate) assert resp.compression - with pytest.warns( - Warning, match="Synchronous compression of large response bodies" - ), mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2): + with ( + pytest.warns(Warning, match="Synchronous compression of large response bodies"), + mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2), + ): msg = await resp.prepare(req) assert msg is not None assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index ebd94607f24..b7494d9265f 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -738,11 +738,14 @@ async def handler(request: web.Request) -> NoReturn: # We patch write here to simulate a connection reset error # since if we closed the connection normally, the server would # would cancel the heartbeat task and we wouldn't get a ping - with mock.patch.object( - ws_server._req.transport, "write", side_effect=ConnectionResetError - ), mock.patch.object( - ws_server._writer, "send_frame", wraps=ws_server._writer.send_frame - ) as send_frame: + with ( + mock.patch.object( + ws_server._req.transport, "write", side_effect=ConnectionResetError + ), + mock.patch.object( + ws_server._writer, "send_frame", wraps=ws_server._writer.send_frame + ) as send_frame, + ): try: await ws_server.receive() finally: From b235e7656f8a4b07e1ad5ec74f28f6c05933620b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 08:29:57 -0600 Subject: [PATCH 0993/1511] [PR #9987/7199e09 backport][3.11] Fix ``ANY`` method not appearing in the ``UrlDispatcher`` routes (#9988) --- CHANGES/9899.bugfix.rst | 1 + CHANGES/9987.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 5 ++--- tests/test_urldispatch.py | 8 +++++++- 4 files changed, 11 insertions(+), 4 deletions(-) create mode 120000 CHANGES/9899.bugfix.rst create mode 100644 CHANGES/9987.bugfix.rst diff --git a/CHANGES/9899.bugfix.rst b/CHANGES/9899.bugfix.rst new file mode 120000 index 00000000000..e27324697dc --- /dev/null +++ b/CHANGES/9899.bugfix.rst @@ -0,0 +1 @@ +9987.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9987.bugfix.rst b/CHANGES/9987.bugfix.rst new file mode 100644 index 00000000000..c2f172ac612 --- /dev/null +++ b/CHANGES/9987.bugfix.rst @@ -0,0 +1 @@ +Fixed the ``ANY`` method not appearing in :meth:`~aiohttp.web.UrlDispatcher.routes` -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 5e3b71ff2e6..6443c500a33 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -378,9 +378,8 @@ def register_route(self, route: "ResourceRoute") -> None: ), f"Instance of Route class is required, got {route!r}" if route.method == hdrs.METH_ANY: self._any_route = route - else: - self._allowed_methods.add(route.method) - self._routes[route.method] = route + self._allowed_methods.add(route.method) + self._routes[route.method] = route async def resolve(self, request: Request) -> _Resolve: if (match_dict := self._match(request.rel_url.path_safe)) is None: diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 79eda49e196..8ee3df33202 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -257,7 +257,13 @@ async def test_any_method(router) -> None: assert info1.route is info2.route -async def test_match_second_result_in_table(router) -> None: +async def test_any_method_appears_in_routes(router: web.UrlDispatcher) -> None: + handler = make_handler() + route = router.add_route(hdrs.METH_ANY, "/", handler) + assert route in router.routes() + + +async def test_match_second_result_in_table(router: web.UrlDispatcher) -> None: handler1 = make_handler() handler2 = make_handler() router.add_route("GET", "/h1", handler1) From 6d192e795a104730da6cd96d5d9ba2fbaa22eee6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 08:39:34 -0600 Subject: [PATCH 0994/1511] [PR #9987/7199e09 backport][3.12] Fix ``ANY`` method not appearing in the ``UrlDispatcher`` routes (#9989) --- CHANGES/9899.bugfix.rst | 1 + CHANGES/9987.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 5 ++--- tests/test_urldispatch.py | 8 +++++++- 4 files changed, 11 insertions(+), 4 deletions(-) create mode 120000 CHANGES/9899.bugfix.rst create mode 100644 CHANGES/9987.bugfix.rst diff --git a/CHANGES/9899.bugfix.rst b/CHANGES/9899.bugfix.rst new file mode 120000 index 00000000000..e27324697dc --- /dev/null +++ b/CHANGES/9899.bugfix.rst @@ -0,0 +1 @@ +9987.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9987.bugfix.rst b/CHANGES/9987.bugfix.rst new file mode 100644 index 00000000000..c2f172ac612 --- /dev/null +++ b/CHANGES/9987.bugfix.rst @@ -0,0 +1 @@ +Fixed the ``ANY`` method not appearing in :meth:`~aiohttp.web.UrlDispatcher.routes` -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 5e3b71ff2e6..6443c500a33 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -378,9 +378,8 @@ def register_route(self, route: "ResourceRoute") -> None: ), f"Instance of Route class is required, got {route!r}" if route.method == hdrs.METH_ANY: self._any_route = route - else: - self._allowed_methods.add(route.method) - self._routes[route.method] = route + self._allowed_methods.add(route.method) + self._routes[route.method] = route async def resolve(self, request: Request) -> _Resolve: if (match_dict := self._match(request.rel_url.path_safe)) is None: diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 79eda49e196..8ee3df33202 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -257,7 +257,13 @@ async def test_any_method(router) -> None: assert info1.route is info2.route -async def test_match_second_result_in_table(router) -> None: +async def test_any_method_appears_in_routes(router: web.UrlDispatcher) -> None: + handler = make_handler() + route = router.add_route(hdrs.METH_ANY, "/", handler) + assert route in router.routes() + + +async def test_match_second_result_in_table(router: web.UrlDispatcher) -> None: handler1 = make_handler() handler2 = make_handler() router.add_route("GET", "/h1", handler1) From e836d3aa3d324af42c0e4b29b1631bfd14394dd7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 08:47:50 -0600 Subject: [PATCH 0995/1511] Release 3.11.5 (#9990) --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/9899.bugfix.rst | 1 - CHANGES/9987.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 19 insertions(+), 3 deletions(-) delete mode 120000 CHANGES/9899.bugfix.rst delete mode 100644 CHANGES/9987.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 121ef0b2db7..cc03af28632 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.11.5 (2024-11-19) +=================== + +Bug fixes +--------- + +- Fixed the ``ANY`` method not appearing in :meth:`~aiohttp.web.UrlDispatcher.routes` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9899`, :issue:`9987`. + + + + +---- + + 3.11.4 (2024-11-18) =================== diff --git a/CHANGES/9899.bugfix.rst b/CHANGES/9899.bugfix.rst deleted file mode 120000 index e27324697dc..00000000000 --- a/CHANGES/9899.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -9987.bugfix.rst \ No newline at end of file diff --git a/CHANGES/9987.bugfix.rst b/CHANGES/9987.bugfix.rst deleted file mode 100644 index c2f172ac612..00000000000 --- a/CHANGES/9987.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the ``ANY`` method not appearing in :meth:`~aiohttp.web.UrlDispatcher.routes` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 98b2b436b5d..22610ffdb83 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.5.dev0" +__version__ = "3.11.5" from typing import TYPE_CHECKING, Tuple From 25b49dceadfc318df0ad695a29b65b9b82f29925 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 09:07:54 -0600 Subject: [PATCH 0996/1511] Increment version to 3.11.6.dev0 (#9993) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 22610ffdb83..d4a9b99b54c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.5" +__version__ = "3.11.6.dev0" from typing import TYPE_CHECKING, Tuple From 81c33ec147a9ee21edb80970a37e68df4e039567 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:41:18 +0000 Subject: [PATCH 0997/1511] [PR #9991/7bbbd126 backport][3.11] Increase allowed benchmark run time to 7 minutes (#9995) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 24107f89e10..2a9dbfa273b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -244,7 +244,7 @@ jobs: needs: gen_llhttp runs-on: ubuntu-latest - timeout-minutes: 5 + timeout-minutes: 7 steps: - name: Checkout project uses: actions/checkout@v4 From f6c7050001982440df35f98f9106d71acfbbdd9e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:41:25 +0000 Subject: [PATCH 0998/1511] [PR #9991/7bbbd126 backport][3.12] Increase allowed benchmark run time to 7 minutes (#9996) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 24107f89e10..2a9dbfa273b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -244,7 +244,7 @@ jobs: needs: gen_llhttp runs-on: ubuntu-latest - timeout-minutes: 5 + timeout-minutes: 7 steps: - name: Checkout project uses: actions/checkout@v4 From b7e3b300369d2b8c1a1bd1e269bf23b51dc0d8ce Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 12:43:51 -0600 Subject: [PATCH 0999/1511] [PR #9997/8c36b51 backport][3.11] Restore the ``force_close`` method to the ``ResponseHandler`` (#9998) --- CHANGES/9997.bugfix.rst | 1 + aiohttp/client_proto.py | 3 +++ tests/test_client_proto.py | 14 +++++++++++++- 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9997.bugfix.rst diff --git a/CHANGES/9997.bugfix.rst b/CHANGES/9997.bugfix.rst new file mode 100644 index 00000000000..2081ab6855b --- /dev/null +++ b/CHANGES/9997.bugfix.rst @@ -0,0 +1 @@ +Restored the ``force_close`` method to the ``ResponseHandler`` -- by :user:`bdraco`. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 2c1fc6af3ef..b899908d786 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -60,6 +60,9 @@ def should_close(self) -> bool: or self._tail ) + def force_close(self) -> None: + self._should_close = True + def close(self) -> None: transport = self.transport if transport is not None: diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index ba45d6a6839..a70dc62e135 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -1,3 +1,4 @@ +import asyncio from unittest import mock from yarl import URL @@ -9,7 +10,18 @@ from aiohttp.helpers import TimerNoop -async def test_oserror(loop) -> None: +async def test_force_close(loop: asyncio.AbstractEventLoop) -> None: + """Ensure that the force_close method sets the should_close attribute to True. + + This is used externally in aiodocker + https://github.com/aio-libs/aiodocker/issues/920 + """ + proto = ResponseHandler(loop=loop) + proto.force_close() + assert proto.should_close + + +async def test_oserror(loop: asyncio.AbstractEventLoop) -> None: proto = ResponseHandler(loop=loop) transport = mock.Mock() proto.connection_made(transport) From c266fc770ab167da6e9af11ca2f345b694270b7b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 12:52:40 -0600 Subject: [PATCH 1000/1511] [PR #9997/8c36b51 backport][3.12] Restore the ``force_close`` method to the ``ResponseHandler`` (#9999) --- CHANGES/9997.bugfix.rst | 1 + aiohttp/client_proto.py | 3 +++ tests/test_client_proto.py | 14 +++++++++++++- 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9997.bugfix.rst diff --git a/CHANGES/9997.bugfix.rst b/CHANGES/9997.bugfix.rst new file mode 100644 index 00000000000..2081ab6855b --- /dev/null +++ b/CHANGES/9997.bugfix.rst @@ -0,0 +1 @@ +Restored the ``force_close`` method to the ``ResponseHandler`` -- by :user:`bdraco`. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 2c1fc6af3ef..b899908d786 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -60,6 +60,9 @@ def should_close(self) -> bool: or self._tail ) + def force_close(self) -> None: + self._should_close = True + def close(self) -> None: transport = self.transport if transport is not None: diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index ba45d6a6839..a70dc62e135 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -1,3 +1,4 @@ +import asyncio from unittest import mock from yarl import URL @@ -9,7 +10,18 @@ from aiohttp.helpers import TimerNoop -async def test_oserror(loop) -> None: +async def test_force_close(loop: asyncio.AbstractEventLoop) -> None: + """Ensure that the force_close method sets the should_close attribute to True. + + This is used externally in aiodocker + https://github.com/aio-libs/aiodocker/issues/920 + """ + proto = ResponseHandler(loop=loop) + proto.force_close() + assert proto.should_close + + +async def test_oserror(loop: asyncio.AbstractEventLoop) -> None: proto = ResponseHandler(loop=loop) transport = mock.Mock() proto.connection_made(transport) From ff9602d5bea624a6aa05e64a3dea56499a9d7c52 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 13:04:14 -0600 Subject: [PATCH 1001/1511] Release 3.11.6 (#10000) --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/9997.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/9997.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index cc03af28632..4a8fc39b1f5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.11.6 (2024-11-19) +=================== + +Bug fixes +--------- + +- Restored the ``force_close`` method to the ``ResponseHandler`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`9997`. + + + + +---- + + 3.11.5 (2024-11-19) =================== diff --git a/CHANGES/9997.bugfix.rst b/CHANGES/9997.bugfix.rst deleted file mode 100644 index 2081ab6855b..00000000000 --- a/CHANGES/9997.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Restored the ``force_close`` method to the ``ResponseHandler`` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index d4a9b99b54c..03f92752f81 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.6.dev0" +__version__ = "3.11.6" from typing import TYPE_CHECKING, Tuple From d030c059478730458219db170b2c51c5bad8237b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 19 Nov 2024 13:34:16 -0600 Subject: [PATCH 1002/1511] Increment version to 3.11.7.dev0 (#10002) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 03f92752f81..c03badec692 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.6" +__version__ = "3.11.7.dev0" from typing import TYPE_CHECKING, Tuple From 496f649c9900fd0ea1d9babf7204f2200b4cae39 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 22:01:48 +0000 Subject: [PATCH 1003/1511] [PR #10004/a334eef7 backport][3.11] Avoid building `skip_headers` in `ClientSession._request` if it will be thrown away (#10006) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index 31c76ff98af..56fd3925fa1 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -540,10 +540,15 @@ async def _request( if url.scheme not in self._connector.allowed_protocol_schema_set: raise NonHttpUrlClientError(url) - skip_headers = set(self._skip_auto_headers) + skip_headers: Optional[Iterable[istr]] if skip_auto_headers is not None: - for i in skip_auto_headers: - skip_headers.add(istr(i)) + skip_headers = { + istr(i) for i in skip_auto_headers + } | self._skip_auto_headers + elif self._skip_auto_headers: + skip_headers = self._skip_auto_headers + else: + skip_headers = None if proxy is None: proxy = self._default_proxy @@ -670,7 +675,7 @@ async def _request( url, params=params, headers=headers, - skip_auto_headers=skip_headers if skip_headers else None, + skip_auto_headers=skip_headers, data=data, cookies=all_cookies, auth=auth, From 1f68eedae5fe818febf2fb73ab736871b7a59d9c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 22:01:54 +0000 Subject: [PATCH 1004/1511] [PR #10004/a334eef7 backport][3.12] Avoid building `skip_headers` in `ClientSession._request` if it will be thrown away (#10007) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/aiohttp/client.py b/aiohttp/client.py index 31c76ff98af..56fd3925fa1 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -540,10 +540,15 @@ async def _request( if url.scheme not in self._connector.allowed_protocol_schema_set: raise NonHttpUrlClientError(url) - skip_headers = set(self._skip_auto_headers) + skip_headers: Optional[Iterable[istr]] if skip_auto_headers is not None: - for i in skip_auto_headers: - skip_headers.add(istr(i)) + skip_headers = { + istr(i) for i in skip_auto_headers + } | self._skip_auto_headers + elif self._skip_auto_headers: + skip_headers = self._skip_auto_headers + else: + skip_headers = None if proxy is None: proxy = self._default_proxy @@ -670,7 +675,7 @@ async def _request( url, params=params, headers=headers, - skip_auto_headers=skip_headers if skip_headers else None, + skip_auto_headers=skip_headers, data=data, cookies=all_cookies, auth=auth, From c82c58aa97e660a548ea2d57529eaf4b51840cbe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 20 Nov 2024 16:15:01 -0600 Subject: [PATCH 1005/1511] [PR #10003/78d1be5 backport][3.11] Fix client connection header not reflecting connector `force_close` value (#10009) --- CHANGES/10003.bugfix.rst | 1 + aiohttp/client_reqrep.py | 29 +++---------- tests/test_benchmarks_client_request.py | 6 +++ tests/test_client_request.py | 58 +++++++++---------------- tests/test_web_functional.py | 5 +-- 5 files changed, 37 insertions(+), 62 deletions(-) create mode 100644 CHANGES/10003.bugfix.rst diff --git a/CHANGES/10003.bugfix.rst b/CHANGES/10003.bugfix.rst new file mode 100644 index 00000000000..69aa554591d --- /dev/null +++ b/CHANGES/10003.bugfix.rst @@ -0,0 +1 @@ +Fixed the HTTP client not considering the connector's ``force_close`` value when setting the ``Connection`` header -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 267b509b0e6..a0fa093d92e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -634,15 +634,6 @@ def update_proxy( proxy_headers = CIMultiDict(proxy_headers) self.proxy_headers = proxy_headers - def keep_alive(self) -> bool: - if self.version >= HttpVersion11: - return self.headers.get(hdrs.CONNECTION) != "close" - if self.version == HttpVersion10: - # no headers means we close for Http 1.0 - return self.headers.get(hdrs.CONNECTION) == "keep-alive" - # keep alive not supported at all - return False - async def write_bytes( self, writer: AbstractStreamWriter, conn: "Connection" ) -> None: @@ -737,21 +728,15 @@ async def send(self, conn: "Connection") -> "ClientResponse": ): self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - # set the connection header - connection = self.headers.get(hdrs.CONNECTION) - if not connection: - if self.keep_alive(): - if self.version == HttpVersion10: - connection = "keep-alive" - else: - if self.version == HttpVersion11: - connection = "close" - - if connection is not None: - self.headers[hdrs.CONNECTION] = connection + v = self.version + if hdrs.CONNECTION not in self.headers: + if conn._connector.force_close: + if v == HttpVersion11: + self.headers[hdrs.CONNECTION] = "close" + elif v == HttpVersion10: + self.headers[hdrs.CONNECTION] = "keep-alive" # status + headers - v = self.version status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) task: Optional["asyncio.Task[None]"] diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 0cdf1f2d776..65667995185 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -100,10 +100,16 @@ async def _drain_helper(self) -> None: def start_timeout(self) -> None: """Swallow start_timeout.""" + class MockConnector: + + def __init__(self) -> None: + self.force_close = False + class MockConnection: def __init__(self) -> None: self.transport = None self.protocol = MockProtocol() + self._connector = MockConnector() conn = MockConnection() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 20ccf6c03d1..324eddf7f6e 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -23,7 +23,7 @@ _gen_default_accept_encoding, _merge_ssl_params, ) -from aiohttp.http import HttpVersion +from aiohttp.http import HttpVersion10, HttpVersion11 from aiohttp.test_utils import make_mocked_coro @@ -141,30 +141,6 @@ def test_version_err(make_request) -> None: make_request("get", "http://python.org/", version="1.c") -def test_keep_alive(make_request) -> None: - req = make_request("get", "http://python.org/", version=(0, 9)) - assert not req.keep_alive() - - req = make_request("get", "http://python.org/", version=(1, 0)) - assert not req.keep_alive() - - req = make_request( - "get", - "http://python.org/", - version=(1, 0), - headers={"connection": "keep-alive"}, - ) - assert req.keep_alive() - - req = make_request("get", "http://python.org/", version=(1, 1)) - assert req.keep_alive() - - req = make_request( - "get", "http://python.org/", version=(1, 1), headers={"connection": "close"} - ) - assert not req.keep_alive() - - def test_host_port_default_http(make_request) -> None: req = make_request("get", "http://python.org/") assert req.host == "python.org" @@ -628,32 +604,40 @@ def test_gen_netloc_no_port(make_request) -> None: ) -async def test_connection_header(loop, conn) -> None: +async def test_connection_header( + loop: asyncio.AbstractEventLoop, conn: mock.Mock +) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) - req.keep_alive = mock.Mock() req.headers.clear() - req.keep_alive.return_value = True - req.version = HttpVersion(1, 1) + req.version = HttpVersion11 req.headers.clear() - await req.send(conn) + with mock.patch.object(conn._connector, "force_close", False): + await req.send(conn) assert req.headers.get("CONNECTION") is None - req.version = HttpVersion(1, 0) + req.version = HttpVersion10 req.headers.clear() - await req.send(conn) + with mock.patch.object(conn._connector, "force_close", False): + await req.send(conn) assert req.headers.get("CONNECTION") == "keep-alive" - req.keep_alive.return_value = False - req.version = HttpVersion(1, 1) + req.version = HttpVersion11 req.headers.clear() - await req.send(conn) + with mock.patch.object(conn._connector, "force_close", True): + await req.send(conn) assert req.headers.get("CONNECTION") == "close" - await req.close() + req.version = HttpVersion10 + req.headers.clear() + with mock.patch.object(conn._connector, "force_close", True): + await req.send(conn) + assert not req.headers.get("CONNECTION") -async def test_no_content_length(loop, conn) -> None: +async def test_no_content_length( + loop: asyncio.AbstractEventLoop, conn: mock.Mock +) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) resp = await req.send(conn) assert req.headers.get("CONTENT-LENGTH") is None diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index e46a23c5857..a3a990141a1 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -696,9 +696,8 @@ async def handler(request): await resp.release() -@pytest.mark.xfail -async def test_http10_keep_alive_default(aiohttp_client) -> None: - async def handler(request): +async def test_http10_keep_alive_default(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: return web.Response() app = web.Application() From 503d6f42bbfba871cb0d60e1082c12289aae683d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 20 Nov 2024 16:15:03 -0600 Subject: [PATCH 1006/1511] [PR #10003/78d1be5 backport][3.12] Fix client connection header not reflecting connector `force_close` value (#10010) --- CHANGES/10003.bugfix.rst | 1 + aiohttp/client_reqrep.py | 29 +++---------- tests/test_benchmarks_client_request.py | 6 +++ tests/test_client_request.py | 58 +++++++++---------------- tests/test_web_functional.py | 5 +-- 5 files changed, 37 insertions(+), 62 deletions(-) create mode 100644 CHANGES/10003.bugfix.rst diff --git a/CHANGES/10003.bugfix.rst b/CHANGES/10003.bugfix.rst new file mode 100644 index 00000000000..69aa554591d --- /dev/null +++ b/CHANGES/10003.bugfix.rst @@ -0,0 +1 @@ +Fixed the HTTP client not considering the connector's ``force_close`` value when setting the ``Connection`` header -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 267b509b0e6..a0fa093d92e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -634,15 +634,6 @@ def update_proxy( proxy_headers = CIMultiDict(proxy_headers) self.proxy_headers = proxy_headers - def keep_alive(self) -> bool: - if self.version >= HttpVersion11: - return self.headers.get(hdrs.CONNECTION) != "close" - if self.version == HttpVersion10: - # no headers means we close for Http 1.0 - return self.headers.get(hdrs.CONNECTION) == "keep-alive" - # keep alive not supported at all - return False - async def write_bytes( self, writer: AbstractStreamWriter, conn: "Connection" ) -> None: @@ -737,21 +728,15 @@ async def send(self, conn: "Connection") -> "ClientResponse": ): self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - # set the connection header - connection = self.headers.get(hdrs.CONNECTION) - if not connection: - if self.keep_alive(): - if self.version == HttpVersion10: - connection = "keep-alive" - else: - if self.version == HttpVersion11: - connection = "close" - - if connection is not None: - self.headers[hdrs.CONNECTION] = connection + v = self.version + if hdrs.CONNECTION not in self.headers: + if conn._connector.force_close: + if v == HttpVersion11: + self.headers[hdrs.CONNECTION] = "close" + elif v == HttpVersion10: + self.headers[hdrs.CONNECTION] = "keep-alive" # status + headers - v = self.version status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) task: Optional["asyncio.Task[None]"] diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 0cdf1f2d776..65667995185 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -100,10 +100,16 @@ async def _drain_helper(self) -> None: def start_timeout(self) -> None: """Swallow start_timeout.""" + class MockConnector: + + def __init__(self) -> None: + self.force_close = False + class MockConnection: def __init__(self) -> None: self.transport = None self.protocol = MockProtocol() + self._connector = MockConnector() conn = MockConnection() diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 20ccf6c03d1..324eddf7f6e 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -23,7 +23,7 @@ _gen_default_accept_encoding, _merge_ssl_params, ) -from aiohttp.http import HttpVersion +from aiohttp.http import HttpVersion10, HttpVersion11 from aiohttp.test_utils import make_mocked_coro @@ -141,30 +141,6 @@ def test_version_err(make_request) -> None: make_request("get", "http://python.org/", version="1.c") -def test_keep_alive(make_request) -> None: - req = make_request("get", "http://python.org/", version=(0, 9)) - assert not req.keep_alive() - - req = make_request("get", "http://python.org/", version=(1, 0)) - assert not req.keep_alive() - - req = make_request( - "get", - "http://python.org/", - version=(1, 0), - headers={"connection": "keep-alive"}, - ) - assert req.keep_alive() - - req = make_request("get", "http://python.org/", version=(1, 1)) - assert req.keep_alive() - - req = make_request( - "get", "http://python.org/", version=(1, 1), headers={"connection": "close"} - ) - assert not req.keep_alive() - - def test_host_port_default_http(make_request) -> None: req = make_request("get", "http://python.org/") assert req.host == "python.org" @@ -628,32 +604,40 @@ def test_gen_netloc_no_port(make_request) -> None: ) -async def test_connection_header(loop, conn) -> None: +async def test_connection_header( + loop: asyncio.AbstractEventLoop, conn: mock.Mock +) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) - req.keep_alive = mock.Mock() req.headers.clear() - req.keep_alive.return_value = True - req.version = HttpVersion(1, 1) + req.version = HttpVersion11 req.headers.clear() - await req.send(conn) + with mock.patch.object(conn._connector, "force_close", False): + await req.send(conn) assert req.headers.get("CONNECTION") is None - req.version = HttpVersion(1, 0) + req.version = HttpVersion10 req.headers.clear() - await req.send(conn) + with mock.patch.object(conn._connector, "force_close", False): + await req.send(conn) assert req.headers.get("CONNECTION") == "keep-alive" - req.keep_alive.return_value = False - req.version = HttpVersion(1, 1) + req.version = HttpVersion11 req.headers.clear() - await req.send(conn) + with mock.patch.object(conn._connector, "force_close", True): + await req.send(conn) assert req.headers.get("CONNECTION") == "close" - await req.close() + req.version = HttpVersion10 + req.headers.clear() + with mock.patch.object(conn._connector, "force_close", True): + await req.send(conn) + assert not req.headers.get("CONNECTION") -async def test_no_content_length(loop, conn) -> None: +async def test_no_content_length( + loop: asyncio.AbstractEventLoop, conn: mock.Mock +) -> None: req = ClientRequest("get", URL("http://python.org"), loop=loop) resp = await req.send(conn) assert req.headers.get("CONTENT-LENGTH") is None diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index e46a23c5857..a3a990141a1 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -696,9 +696,8 @@ async def handler(request): await resp.release() -@pytest.mark.xfail -async def test_http10_keep_alive_default(aiohttp_client) -> None: - async def handler(request): +async def test_http10_keep_alive_default(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: return web.Response() app = web.Application() From 5bcf07d0af0a4b619500365fe5bcf69c904bfc30 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 23:41:30 +0000 Subject: [PATCH 1007/1511] [PR #9961/c984a44b backport][3.11] Disable Python 3.14 builds (#10012) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- .github/workflows/ci-cd.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2a9dbfa273b..b468a3f3f4c 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -139,10 +139,10 @@ jobs: no-extensions: 'Y' os: ubuntu experimental: false - - os: ubuntu - pyver: "3.14" - experimental: true - no-extensions: 'Y' + # - os: ubuntu + # pyver: "3.14" + # experimental: true + # no-extensions: 'Y' fail-fast: true runs-on: ${{ matrix.os }}-latest continue-on-error: ${{ matrix.experimental }} From 996c20f825cb946a4e6a7b166345c07e1044b82c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 23:51:41 +0000 Subject: [PATCH 1008/1511] [PR #9961/c984a44b backport][3.12] Disable Python 3.14 builds (#10013) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- .github/workflows/ci-cd.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2a9dbfa273b..b468a3f3f4c 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -139,10 +139,10 @@ jobs: no-extensions: 'Y' os: ubuntu experimental: false - - os: ubuntu - pyver: "3.14" - experimental: true - no-extensions: 'Y' + # - os: ubuntu + # pyver: "3.14" + # experimental: true + # no-extensions: 'Y' fail-fast: true runs-on: ${{ matrix.os }}-latest continue-on-error: ${{ matrix.experimental }} From ed15e8879981158d0560960f42fafd4fa238b9ed Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 03:38:01 +0000 Subject: [PATCH 1009/1511] [PR #10014/50d23aee backport][3.11] Improve performance of serializing headers (#10016) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10014.misc.rst | 1 + aiohttp/_http_writer.pyx | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 CHANGES/10014.misc.rst diff --git a/CHANGES/10014.misc.rst b/CHANGES/10014.misc.rst new file mode 100644 index 00000000000..8a27657cdb9 --- /dev/null +++ b/CHANGES/10014.misc.rst @@ -0,0 +1 @@ +Improved performance of serializing HTTP headers -- by :user:`bdraco`. diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index d19c20d76cc..287371334f8 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -100,10 +100,9 @@ cdef inline int _write_str(Writer* writer, str s): # --------------- _serialize_headers ---------------------- cdef str to_str(object s): - typ = type(s) - if typ is str: + if type(s) is str: return <str>s - elif typ is _istr: + elif type(s) is _istr: return PyObject_Str(s) elif not isinstance(s, str): raise TypeError("Cannot serialize non-str key {!r}".format(s)) @@ -111,19 +110,14 @@ cdef str to_str(object s): return str(s) -cdef void _safe_header(str string) except *: - if "\r" in string or "\n" in string: - raise ValueError( - "Newline or carriage return character detected in HTTP status message or " - "header. This is a potential security issue." - ) - def _serialize_headers(str status_line, headers): cdef Writer writer cdef object key cdef object val cdef bytes ret + cdef str key_str + cdef str val_str _init_writer(&writer) @@ -136,16 +130,22 @@ def _serialize_headers(str status_line, headers): raise for key, val in headers.items(): - _safe_header(to_str(key)) - _safe_header(to_str(val)) + key_str = to_str(key) + val_str = to_str(val) + + if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) - if _write_str(&writer, to_str(key)) < 0: + if _write_str(&writer, key_str) < 0: raise if _write_byte(&writer, b':') < 0: raise if _write_byte(&writer, b' ') < 0: raise - if _write_str(&writer, to_str(val)) < 0: + if _write_str(&writer, val_str) < 0: raise if _write_byte(&writer, b'\r') < 0: raise From 246dffcd77c5347fe3f404e40263586439df488f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 03:42:16 +0000 Subject: [PATCH 1010/1511] [PR #10014/50d23aee backport][3.12] Improve performance of serializing headers (#10017) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10014.misc.rst | 1 + aiohttp/_http_writer.pyx | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 CHANGES/10014.misc.rst diff --git a/CHANGES/10014.misc.rst b/CHANGES/10014.misc.rst new file mode 100644 index 00000000000..8a27657cdb9 --- /dev/null +++ b/CHANGES/10014.misc.rst @@ -0,0 +1 @@ +Improved performance of serializing HTTP headers -- by :user:`bdraco`. diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index d19c20d76cc..287371334f8 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -100,10 +100,9 @@ cdef inline int _write_str(Writer* writer, str s): # --------------- _serialize_headers ---------------------- cdef str to_str(object s): - typ = type(s) - if typ is str: + if type(s) is str: return <str>s - elif typ is _istr: + elif type(s) is _istr: return PyObject_Str(s) elif not isinstance(s, str): raise TypeError("Cannot serialize non-str key {!r}".format(s)) @@ -111,19 +110,14 @@ cdef str to_str(object s): return str(s) -cdef void _safe_header(str string) except *: - if "\r" in string or "\n" in string: - raise ValueError( - "Newline or carriage return character detected in HTTP status message or " - "header. This is a potential security issue." - ) - def _serialize_headers(str status_line, headers): cdef Writer writer cdef object key cdef object val cdef bytes ret + cdef str key_str + cdef str val_str _init_writer(&writer) @@ -136,16 +130,22 @@ def _serialize_headers(str status_line, headers): raise for key, val in headers.items(): - _safe_header(to_str(key)) - _safe_header(to_str(val)) + key_str = to_str(key) + val_str = to_str(val) + + if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) - if _write_str(&writer, to_str(key)) < 0: + if _write_str(&writer, key_str) < 0: raise if _write_byte(&writer, b':') < 0: raise if _write_byte(&writer, b' ') < 0: raise - if _write_str(&writer, to_str(val)) < 0: + if _write_str(&writer, val_str) < 0: raise if _write_byte(&writer, b'\r') < 0: raise From bf04a1bf53df93d2ab381e151225db3cf466082f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 21 Nov 2024 08:28:34 -0600 Subject: [PATCH 1011/1511] Release 3.11.7 (#10019) --- CHANGES.rst | 30 ++++++++++++++++++++++++++++++ CHANGES/10003.bugfix.rst | 1 - CHANGES/10014.misc.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 31 insertions(+), 3 deletions(-) delete mode 100644 CHANGES/10003.bugfix.rst delete mode 100644 CHANGES/10014.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 4a8fc39b1f5..e204f07b370 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,36 @@ .. towncrier release notes start +3.11.7 (2024-11-21) +=================== + +Bug fixes +--------- + +- Fixed the HTTP client not considering the connector's ``force_close`` value when setting the ``Connection`` header -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10003`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of serializing HTTP headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10014`. + + + + +---- + + 3.11.6 (2024-11-19) =================== diff --git a/CHANGES/10003.bugfix.rst b/CHANGES/10003.bugfix.rst deleted file mode 100644 index 69aa554591d..00000000000 --- a/CHANGES/10003.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the HTTP client not considering the connector's ``force_close`` value when setting the ``Connection`` header -- by :user:`bdraco`. diff --git a/CHANGES/10014.misc.rst b/CHANGES/10014.misc.rst deleted file mode 100644 index 8a27657cdb9..00000000000 --- a/CHANGES/10014.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of serializing HTTP headers -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index c03badec692..8c5b96c99de 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.7.dev0" +__version__ = "3.11.7" from typing import TYPE_CHECKING, Tuple From 285d6e85b43b4a79e6ee21f280ec444e0e1cfb05 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 21 Nov 2024 09:09:11 -0600 Subject: [PATCH 1012/1511] Increment version to 3.11.8.dev0 (#10021) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 8c5b96c99de..838c31a5fcd 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.7" +__version__ = "3.11.8.dev0" from typing import TYPE_CHECKING, Tuple From c8426a8900a1ac13b7f4adbb7de7982018ea6348 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 15:40:44 +0000 Subject: [PATCH 1013/1511] [PR #10018/e79b2d5d backport][3.11] Add url dispatcher benchmark for resolving root route for github simulated routes tree (#10023) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 78 +++++++++++++++------- 1 file changed, 55 insertions(+), 23 deletions(-) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 5d151d984af..936ed6320ed 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -6,9 +6,10 @@ import random import string from pathlib import Path -from typing import NoReturn, Optional +from typing import NoReturn, Optional, cast from unittest import mock +import pytest from multidict import CIMultiDict, CIMultiDictProxy from pytest_codspeed import BenchmarkFixture from yarl import URL @@ -18,6 +19,20 @@ from aiohttp.http import HttpVersion, RawRequestMessage +@pytest.fixture +def github_urls() -> list[str]: + """GitHub api urls.""" + # The fixture provides OpenAPI generated info for github. + # To update the local data file please run the following command: + # $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json + + here = Path(__file__).parent + with (here / "github-urls.json").open() as f: + urls = json.load(f) + + return cast(list[str], urls) + + def _mock_request(method: str, path: str) -> web.Request: message = RawRequestMessage( method, @@ -366,23 +381,15 @@ def _run() -> None: def test_resolve_gitapi( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, + github_urls: list[str], ) -> None: - """Resolve DynamicResource for simulated github API. - - The benchmark uses OpenAPI generated info for github. - To update the local data file please run the following command: - $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json - """ + """Resolve DynamicResource for simulated github API.""" async def handler(request: web.Request) -> NoReturn: assert False - here = Path(__file__).parent - with (here / "github-urls.json").open() as f: - urls = json.load(f) - app = web.Application() - for url in urls: + for url in github_urls: app.router.add_get(url, handler) app.freeze() router = app.router @@ -425,21 +432,13 @@ def _run() -> None: def test_resolve_gitapi_subapps( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, + github_urls: list[str], ) -> None: - """Resolve DynamicResource for simulated github API, grouped in subapps. - - The benchmark uses OpenAPI generated info for github. - To update the local data file please run the following command: - $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json - """ + """Resolve DynamicResource for simulated github API, grouped in subapps.""" async def handler(request: web.Request) -> NoReturn: assert False - here = Path(__file__).parent - with (here / "github-urls.json").open() as f: - urls = json.load(f) - subapps = { "gists": web.Application(), "orgs": web.Application(), @@ -451,7 +450,7 @@ async def handler(request: web.Request) -> NoReturn: } app = web.Application() - for url in urls: + for url in github_urls: parts = url.split("/") subapp = subapps.get(parts[1]) if subapp is not None: @@ -501,6 +500,39 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) +def test_resolve_gitapi_root( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, + github_urls: list[str], +) -> None: + """Resolve the plain root for simulated github API.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for url in github_urls: + app.router.add_get(url, handler) + app.freeze() + router = app.router + + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None + for i in range(250): + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/", ret.get_info() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + def test_resolve_prefix_resources_many_prefix_many_plain( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, From 53992e924bf805677f559cb736d6aed58210ed4c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 15:54:07 +0000 Subject: [PATCH 1014/1511] [PR #10018/e79b2d5d backport][3.12] Add url dispatcher benchmark for resolving root route for github simulated routes tree (#10024) Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com> --- tests/test_benchmarks_web_urldispatcher.py | 78 +++++++++++++++------- 1 file changed, 55 insertions(+), 23 deletions(-) diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 5d151d984af..936ed6320ed 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -6,9 +6,10 @@ import random import string from pathlib import Path -from typing import NoReturn, Optional +from typing import NoReturn, Optional, cast from unittest import mock +import pytest from multidict import CIMultiDict, CIMultiDictProxy from pytest_codspeed import BenchmarkFixture from yarl import URL @@ -18,6 +19,20 @@ from aiohttp.http import HttpVersion, RawRequestMessage +@pytest.fixture +def github_urls() -> list[str]: + """GitHub api urls.""" + # The fixture provides OpenAPI generated info for github. + # To update the local data file please run the following command: + # $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json + + here = Path(__file__).parent + with (here / "github-urls.json").open() as f: + urls = json.load(f) + + return cast(list[str], urls) + + def _mock_request(method: str, path: str) -> web.Request: message = RawRequestMessage( method, @@ -366,23 +381,15 @@ def _run() -> None: def test_resolve_gitapi( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, + github_urls: list[str], ) -> None: - """Resolve DynamicResource for simulated github API. - - The benchmark uses OpenAPI generated info for github. - To update the local data file please run the following command: - $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json - """ + """Resolve DynamicResource for simulated github API.""" async def handler(request: web.Request) -> NoReturn: assert False - here = Path(__file__).parent - with (here / "github-urls.json").open() as f: - urls = json.load(f) - app = web.Application() - for url in urls: + for url in github_urls: app.router.add_get(url, handler) app.freeze() router = app.router @@ -425,21 +432,13 @@ def _run() -> None: def test_resolve_gitapi_subapps( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, + github_urls: list[str], ) -> None: - """Resolve DynamicResource for simulated github API, grouped in subapps. - - The benchmark uses OpenAPI generated info for github. - To update the local data file please run the following command: - $ curl https://raw.githubusercontent.com/github/rest-api-description/refs/heads/main/descriptions/api.github.com/api.github.com.json | jq ".paths | keys" > github-urls.json - """ + """Resolve DynamicResource for simulated github API, grouped in subapps.""" async def handler(request: web.Request) -> NoReturn: assert False - here = Path(__file__).parent - with (here / "github-urls.json").open() as f: - urls = json.load(f) - subapps = { "gists": web.Application(), "orgs": web.Application(), @@ -451,7 +450,7 @@ async def handler(request: web.Request) -> NoReturn: } app = web.Application() - for url in urls: + for url in github_urls: parts = url.split("/") subapp = subapps.get(parts[1]) if subapp is not None: @@ -501,6 +500,39 @@ def _run() -> None: loop.run_until_complete(run_url_dispatcher_benchmark()) +def test_resolve_gitapi_root( + loop: asyncio.AbstractEventLoop, + benchmark: BenchmarkFixture, + github_urls: list[str], +) -> None: + """Resolve the plain root for simulated github API.""" + + async def handler(request: web.Request) -> NoReturn: + assert False + + app = web.Application() + for url in github_urls: + app.router.add_get(url, handler) + app.freeze() + router = app.router + + request = _mock_request(method="GET", path="/") + + async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + ret = None + for i in range(250): + ret = await router.resolve(request) + return ret + + ret = loop.run_until_complete(run_url_dispatcher_benchmark()) + assert ret is not None + assert ret.get_info()["path"] == "/", ret.get_info() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_url_dispatcher_benchmark()) + + def test_resolve_prefix_resources_many_prefix_many_plain( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture, From dc61477e3a85345c1fac8cfb9d3619c11ef65ce3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 24 Nov 2024 10:33:32 -0800 Subject: [PATCH 1015/1511] [PR #10030/cdc01cd backport][3.11] Reduce initialization overhead in `ClientResponse` (#10031) --- CHANGES/10030.misc.rst | 1 + aiohttp/client_reqrep.py | 34 ++++++++++++++++++---------------- tests/test_client_response.py | 18 ++++++++++++++++++ 3 files changed, 37 insertions(+), 16 deletions(-) create mode 100644 CHANGES/10030.misc.rst diff --git a/CHANGES/10030.misc.rst b/CHANGES/10030.misc.rst new file mode 100644 index 00000000000..68ed7d058d6 --- /dev/null +++ b/CHANGES/10030.misc.rst @@ -0,0 +1 @@ +Improved performance of creating :class:`aiohttp.ClientResponse` objects -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a0fa093d92e..ef9c95f0f59 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -819,18 +819,25 @@ class ClientResponse(HeadersMixin): status: int = None # type: ignore[assignment] # Status-Code reason: Optional[str] = None # Reason-Phrase - content: StreamReader = None # type: ignore[assignment] # Payload stream + content: StreamReader = None # type: ignore[assignment] # Payload stream + _body: Optional[bytes] = None _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] + _history: Tuple["ClientResponse", ...] = () _raw_headers: RawHeaders = None # type: ignore[assignment] - _connection = None # current connection + _connection: Optional["Connection"] = None # current connection + _continue: Optional["asyncio.Future[bool]"] = None _source_traceback: Optional[traceback.StackSummary] = None + _session: Optional["ClientSession"] = None # set up by ClientRequest after ClientResponse object creation # post-init stage allows to not change ctor signature _closed = True # to allow __del__ for non-initialized properly response _released = False _in_context = False - __writer = None + + _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" + + __writer: Optional["asyncio.Task[None]"] = None def __init__( self, @@ -845,34 +852,29 @@ def __init__( loop: asyncio.AbstractEventLoop, session: "ClientSession", ) -> None: - assert isinstance(url, URL) + # URL forbids subclasses, so a simple type check is enough. + assert type(url) is URL self.method = method self.cookies = SimpleCookie() self._real_url = url self._url = url.with_fragment(None) if url.raw_fragment else url - self._body: Optional[bytes] = None if writer is not None: self._writer = writer - self._continue = continue100 # None by default - self._closed = True - self._history: Tuple[ClientResponse, ...] = () + if continue100 is not None: + self._continue = continue100 self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() self._cache: Dict[str, Any] = {} self._traces = traces self._loop = loop - # store a reference to session #1985 - self._session: Optional[ClientSession] = session # Save reference to _resolve_charset, so that get_encoding() will still # work after the response has finished reading the body. - if session is None: - # TODO: Fix session=None in tests (see ClientRequest.__init__). - self._resolve_charset: Callable[["ClientResponse", bytes], str] = ( - lambda *_: "utf-8" - ) - else: + # TODO: Fix session=None in tests (see ClientRequest.__init__). + if session is not None: + # store a reference to session #1985 + self._session = session self._resolve_charset = session._resolve_charset if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) diff --git a/tests/test_client_response.py b/tests/test_client_response.py index be25a87e425..24b3c667326 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -981,6 +981,24 @@ def test_content_disposition_no_header() -> None: assert response.content_disposition is None +def test_default_encoding_is_utf8() -> None: + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=mock.Mock(), + session=None, # type: ignore[arg-type] + ) + response._headers = CIMultiDictProxy(CIMultiDict({})) + response._body = b"" + + assert response.get_encoding() == "utf-8" + + def test_response_request_info() -> None: url = "http://def-cl-resp.org" headers = {"Content-Type": "application/json;charset=cp1251"} From 426c6cc68025847a6ee324bfee1d4cf69c43566e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 24 Nov 2024 10:34:51 -0800 Subject: [PATCH 1016/1511] [PR #10030/cdc01cd backport][3.12] Reduce initialization overhead in `ClientResponse` (#10032) --- CHANGES/10030.misc.rst | 1 + aiohttp/client_reqrep.py | 34 ++++++++++++++++++---------------- tests/test_client_response.py | 18 ++++++++++++++++++ 3 files changed, 37 insertions(+), 16 deletions(-) create mode 100644 CHANGES/10030.misc.rst diff --git a/CHANGES/10030.misc.rst b/CHANGES/10030.misc.rst new file mode 100644 index 00000000000..68ed7d058d6 --- /dev/null +++ b/CHANGES/10030.misc.rst @@ -0,0 +1 @@ +Improved performance of creating :class:`aiohttp.ClientResponse` objects -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a0fa093d92e..ef9c95f0f59 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -819,18 +819,25 @@ class ClientResponse(HeadersMixin): status: int = None # type: ignore[assignment] # Status-Code reason: Optional[str] = None # Reason-Phrase - content: StreamReader = None # type: ignore[assignment] # Payload stream + content: StreamReader = None # type: ignore[assignment] # Payload stream + _body: Optional[bytes] = None _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] + _history: Tuple["ClientResponse", ...] = () _raw_headers: RawHeaders = None # type: ignore[assignment] - _connection = None # current connection + _connection: Optional["Connection"] = None # current connection + _continue: Optional["asyncio.Future[bool]"] = None _source_traceback: Optional[traceback.StackSummary] = None + _session: Optional["ClientSession"] = None # set up by ClientRequest after ClientResponse object creation # post-init stage allows to not change ctor signature _closed = True # to allow __del__ for non-initialized properly response _released = False _in_context = False - __writer = None + + _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" + + __writer: Optional["asyncio.Task[None]"] = None def __init__( self, @@ -845,34 +852,29 @@ def __init__( loop: asyncio.AbstractEventLoop, session: "ClientSession", ) -> None: - assert isinstance(url, URL) + # URL forbids subclasses, so a simple type check is enough. + assert type(url) is URL self.method = method self.cookies = SimpleCookie() self._real_url = url self._url = url.with_fragment(None) if url.raw_fragment else url - self._body: Optional[bytes] = None if writer is not None: self._writer = writer - self._continue = continue100 # None by default - self._closed = True - self._history: Tuple[ClientResponse, ...] = () + if continue100 is not None: + self._continue = continue100 self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() self._cache: Dict[str, Any] = {} self._traces = traces self._loop = loop - # store a reference to session #1985 - self._session: Optional[ClientSession] = session # Save reference to _resolve_charset, so that get_encoding() will still # work after the response has finished reading the body. - if session is None: - # TODO: Fix session=None in tests (see ClientRequest.__init__). - self._resolve_charset: Callable[["ClientResponse", bytes], str] = ( - lambda *_: "utf-8" - ) - else: + # TODO: Fix session=None in tests (see ClientRequest.__init__). + if session is not None: + # store a reference to session #1985 + self._session = session self._resolve_charset = session._resolve_charset if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) diff --git a/tests/test_client_response.py b/tests/test_client_response.py index be25a87e425..24b3c667326 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -981,6 +981,24 @@ def test_content_disposition_no_header() -> None: assert response.content_disposition is None +def test_default_encoding_is_utf8() -> None: + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=mock.Mock(), + session=None, # type: ignore[arg-type] + ) + response._headers = CIMultiDictProxy(CIMultiDict({})) + response._body = b"" + + assert response.get_encoding() == "utf-8" + + def test_response_request_info() -> None: url = "http://def-cl-resp.org" headers = {"Content-Type": "application/json;charset=cp1251"} From 3d2b8290287c6d5eee2df3da77209dc7d41106c4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 24 Nov 2024 11:16:40 -0800 Subject: [PATCH 1017/1511] [PR #10029/5f5729d backport][3.11] Defer creation of cookies for client responses until needed (#10033) --- CHANGES/10029.misc.rst | 1 + aiohttp/client.py | 2 +- aiohttp/client_reqrep.py | 25 +++++++++++++++++++------ tests/test_client_response.py | 26 ++++++++++++++++++++++++-- 4 files changed, 45 insertions(+), 9 deletions(-) create mode 100644 CHANGES/10029.misc.rst diff --git a/CHANGES/10029.misc.rst b/CHANGES/10029.misc.rst new file mode 100644 index 00000000000..d98729ecac8 --- /dev/null +++ b/CHANGES/10029.misc.rst @@ -0,0 +1 @@ +Improved performance of creating :class:`aiohttp.ClientResponse` objects when there are no cookies -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 56fd3925fa1..e04a6ff989a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -744,7 +744,7 @@ async def _request( raise raise ClientOSError(*exc.args) from exc - if cookies := resp.cookies: + if cookies := resp._cookies: self._cookie_jar.update_cookies(cookies, resp.url) # redirects diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ef9c95f0f59..a072ad37d76 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -826,6 +826,7 @@ class ClientResponse(HeadersMixin): _raw_headers: RawHeaders = None # type: ignore[assignment] _connection: Optional["Connection"] = None # current connection + _cookies: Optional[SimpleCookie] = None _continue: Optional["asyncio.Future[bool]"] = None _source_traceback: Optional[traceback.StackSummary] = None _session: Optional["ClientSession"] = None @@ -856,7 +857,6 @@ def __init__( assert type(url) is URL self.method = method - self.cookies = SimpleCookie() self._real_url = url self._url = url.with_fragment(None) if url.raw_fragment else url @@ -905,6 +905,16 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: else: writer.add_done_callback(self.__reset_writer) + @property + def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() + return self._cookies + + @cookies.setter + def cookies(self, cookies: SimpleCookie) -> None: + self._cookies = cookies + @reify def url(self) -> URL: return self._url @@ -1068,11 +1078,14 @@ async def start(self, connection: "Connection") -> "ClientResponse": self.content = payload # cookies - for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): - try: - self.cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) + if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): + cookies = SimpleCookie() + for hdr in cookie_hdrs: + try: + cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + self._cookies = cookies return self def _response_eof(self) -> None: diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 24b3c667326..18ba6c5149d 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -1,5 +1,6 @@ # Tests for aiohttp/client.py +import asyncio import gc import sys from typing import Callable @@ -10,7 +11,7 @@ from yarl import URL import aiohttp -from aiohttp import http +from aiohttp import ClientSession, http from aiohttp.client_reqrep import ClientResponse, RequestInfo from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -1139,7 +1140,28 @@ def side_effect(*args, **kwargs): ) -def test_response_real_url(loop, session) -> None: +def test_response_cookies( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: + response = ClientResponse( + "get", + URL("http://python.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + cookies = response.cookies + # Ensure the same cookies object is returned each time + assert response.cookies is cookies + + +def test_response_real_url( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: url = URL("http://def-cl-resp.org/#urlfragment") response = ClientResponse( "get", From 24eb11deed7f7fdcac3d47b4e3c299c8cac580ac Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 24 Nov 2024 11:21:25 -0800 Subject: [PATCH 1018/1511] [PR #10029/5f5729d backport][3.12] Defer creation of cookies for client responses until needed (#10034) --- CHANGES/10029.misc.rst | 1 + aiohttp/client.py | 2 +- aiohttp/client_reqrep.py | 25 +++++++++++++++++++------ tests/test_client_response.py | 26 ++++++++++++++++++++++++-- 4 files changed, 45 insertions(+), 9 deletions(-) create mode 100644 CHANGES/10029.misc.rst diff --git a/CHANGES/10029.misc.rst b/CHANGES/10029.misc.rst new file mode 100644 index 00000000000..d98729ecac8 --- /dev/null +++ b/CHANGES/10029.misc.rst @@ -0,0 +1 @@ +Improved performance of creating :class:`aiohttp.ClientResponse` objects when there are no cookies -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 56fd3925fa1..e04a6ff989a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -744,7 +744,7 @@ async def _request( raise raise ClientOSError(*exc.args) from exc - if cookies := resp.cookies: + if cookies := resp._cookies: self._cookie_jar.update_cookies(cookies, resp.url) # redirects diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ef9c95f0f59..a072ad37d76 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -826,6 +826,7 @@ class ClientResponse(HeadersMixin): _raw_headers: RawHeaders = None # type: ignore[assignment] _connection: Optional["Connection"] = None # current connection + _cookies: Optional[SimpleCookie] = None _continue: Optional["asyncio.Future[bool]"] = None _source_traceback: Optional[traceback.StackSummary] = None _session: Optional["ClientSession"] = None @@ -856,7 +857,6 @@ def __init__( assert type(url) is URL self.method = method - self.cookies = SimpleCookie() self._real_url = url self._url = url.with_fragment(None) if url.raw_fragment else url @@ -905,6 +905,16 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: else: writer.add_done_callback(self.__reset_writer) + @property + def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() + return self._cookies + + @cookies.setter + def cookies(self, cookies: SimpleCookie) -> None: + self._cookies = cookies + @reify def url(self) -> URL: return self._url @@ -1068,11 +1078,14 @@ async def start(self, connection: "Connection") -> "ClientResponse": self.content = payload # cookies - for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): - try: - self.cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) + if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): + cookies = SimpleCookie() + for hdr in cookie_hdrs: + try: + cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + self._cookies = cookies return self def _response_eof(self) -> None: diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 24b3c667326..18ba6c5149d 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -1,5 +1,6 @@ # Tests for aiohttp/client.py +import asyncio import gc import sys from typing import Callable @@ -10,7 +11,7 @@ from yarl import URL import aiohttp -from aiohttp import http +from aiohttp import ClientSession, http from aiohttp.client_reqrep import ClientResponse, RequestInfo from aiohttp.helpers import TimerNoop from aiohttp.test_utils import make_mocked_coro @@ -1139,7 +1140,28 @@ def side_effect(*args, **kwargs): ) -def test_response_real_url(loop, session) -> None: +def test_response_cookies( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: + response = ClientResponse( + "get", + URL("http://python.org"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + cookies = response.cookies + # Ensure the same cookies object is returned each time + assert response.cookies is cookies + + +def test_response_real_url( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: url = URL("http://def-cl-resp.org/#urlfragment") response = ClientResponse( "get", From 5e4ad95e394305fc29aff21b1470cd94e6e2e237 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 00:18:39 +0000 Subject: [PATCH 1019/1511] [PR #10038/6f4e9615 backport][3.12] Small speed up to `StreamWriter.__init__` (#10040) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/abc.py | 4 ++-- aiohttp/http_writer.py | 16 ++++++---------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 868f0e94898..d6f9f782b0f 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -195,8 +195,8 @@ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": class AbstractStreamWriter(ABC): """Abstract stream writer.""" - buffer_size = 0 - output_size = 0 + buffer_size: int = 0 + output_size: int = 0 length: Optional[int] = 0 @abstractmethod diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index c6c80edc3c4..c66fda3d8d0 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -38,6 +38,12 @@ class HttpVersion(NamedTuple): class StreamWriter(AbstractStreamWriter): + + length: Optional[int] = None + chunked: bool = False + _eof: bool = False + _compress: Optional[ZLibCompressor] = None + def __init__( self, protocol: BaseProtocol, @@ -46,17 +52,7 @@ def __init__( on_headers_sent: _T_OnHeadersSent = None, ) -> None: self._protocol = protocol - self.loop = loop - self.length = None - self.chunked = False - self.buffer_size = 0 - self.output_size = 0 - - self._eof = False - self._compress: Optional[ZLibCompressor] = None - self._drain_waiter = None - self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent self._on_headers_sent: _T_OnHeadersSent = on_headers_sent From 65dab0ee40109b97fca2b938ba76c8f68968ce49 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 00:30:10 +0000 Subject: [PATCH 1020/1511] [PR #10038/6f4e9615 backport][3.11] Small speed up to `StreamWriter.__init__` (#10039) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/abc.py | 4 ++-- aiohttp/http_writer.py | 16 ++++++---------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 868f0e94898..d6f9f782b0f 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -195,8 +195,8 @@ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": class AbstractStreamWriter(ABC): """Abstract stream writer.""" - buffer_size = 0 - output_size = 0 + buffer_size: int = 0 + output_size: int = 0 length: Optional[int] = 0 @abstractmethod diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index c6c80edc3c4..c66fda3d8d0 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -38,6 +38,12 @@ class HttpVersion(NamedTuple): class StreamWriter(AbstractStreamWriter): + + length: Optional[int] = None + chunked: bool = False + _eof: bool = False + _compress: Optional[ZLibCompressor] = None + def __init__( self, protocol: BaseProtocol, @@ -46,17 +52,7 @@ def __init__( on_headers_sent: _T_OnHeadersSent = None, ) -> None: self._protocol = protocol - self.loop = loop - self.length = None - self.chunked = False - self.buffer_size = 0 - self.output_size = 0 - - self._eof = False - self._compress: Optional[ZLibCompressor] = None - self._drain_waiter = None - self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent self._on_headers_sent: _T_OnHeadersSent = on_headers_sent From 3dfd7ae86413e164d3c924cac925980fa9ceddff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 11:23:19 +0000 Subject: [PATCH 1021/1511] Bump pypa/cibuildwheel from 2.21.3 to 2.22.0 (#10042) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.21.3 to 2.22.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>Version 2.22.0</h2> <ul> <li>🌟 Added a new <code>CIBW_ENABLE</code>/<code>enable</code> feature that replaces <code>CIBW_FREETHREADED_SUPPORT</code>/<code>free-threaded-support</code> and <code>CIBW_PRERELEASE_PYTHONS</code> with a system that supports both. In cibuildwheel 3, this will also include a PyPy setting and the deprecated options will be removed. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2048">#2048</a>)</li> <li>🌟 <a href="https://peps.python.org/pep-0735/">Dependency groups</a> are now supported for tests. Use <code>CIBW_TEST_GROUPS</code>/<code>test-groups</code> to specify groups in <code>[dependency-groups]</code> for testing. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2063">#2063</a>)</li> <li>🌟 Support for the experimental Ubuntu-based ARMv7l manylinux image (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2052">#2052</a>)</li> <li>✨ Show a warning when cibuildwheel is run from Python 3.10 or older; cibuildwheel 3.0 will require Python 3.11 or newer as host (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2050">#2050</a>)</li> <li>🐛 Fix issue with stderr interfering with checking the docker version (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2074">#2074</a>)</li> <li>🛠 Python 3.9 is now used in <code>CIBW_BEFORE_ALL</code>/<code>before-all</code> on linux, replacing 3.8, which is now EoL (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2043">#2043</a>)</li> <li>🛠 Error messages for producing a pure-Python wheel are slightly more informative (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2044">#2044</a>)</li> <li>🛠 Better error when <code>uname -m</code> fails on ARM (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2049">#2049</a>)</li> <li>🛠 Better error when repair fails and docs for abi3audit on Windows (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2058">#2058</a>)</li> <li>🛠 Better error when <code>manylinux-interpreters ensure</code> fails (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2066">#2066</a>)</li> <li>🛠 Update Pyodide to 0.26.4, and adapt to the unbundled pyodide-build (now 0.29) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2090">#2090</a>)</li> <li>🛠 Now cibuildwheel uses dependency-groups for development dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2064">#2064</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2085">#2085</a>)</li> <li>📚 Docs updates and tidy ups (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2061">#2061</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2067">#2067</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2072">#2072</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.22.0</h3> <p><em>23 November 2024</em></p> <ul> <li>🌟 Added a new <code>CIBW_ENABLE</code>/<code>enable</code> feature that replaces <code>CIBW_FREETHREADED_SUPPORT</code>/<code>free-threaded-support</code> and <code>CIBW_PRERELEASE_PYTHONS</code> with a system that supports both. In cibuildwheel 3, this will also include a PyPy setting and the deprecated options will be removed. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2048">#2048</a>)</li> <li>🌟 <a href="https://peps.python.org/pep-0735/">Dependency groups</a> are now supported for tests. Use <code>CIBW_TEST_GROUPS</code>/<code>test-groups</code> to specify groups in <code>[dependency-groups]</code> for testing. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2063">#2063</a>)</li> <li>🌟 Support for the experimental Ubuntu-based ARMv7l manylinux image (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2052">#2052</a>)</li> <li>✨ Show a warning when cibuildwheel is run from Python 3.10 or older; cibuildwheel 3.0 will require Python 3.11 or newer as host (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2050">#2050</a>)</li> <li>🐛 Fix issue with stderr interfering with checking the docker version (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2074">#2074</a>)</li> <li>🛠 Python 3.9 is now used in <code>CIBW_BEFORE_ALL</code>/<code>before-all</code> on linux, replacing 3.8, which is now EoL (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2043">#2043</a>)</li> <li>🛠 Error messages for producing a pure-Python wheel are slightly more informative (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2044">#2044</a>)</li> <li>🛠 Better error when <code>uname -m</code> fails on ARM (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2049">#2049</a>)</li> <li>🛠 Better error when repair fails and docs for abi3audit on Windows (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2058">#2058</a>)</li> <li>🛠 Better error when <code>manylinux-interpreters ensure</code> fails (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2066">#2066</a>)</li> <li>🛠 Update Pyodide to 0.26.4, and adapt to the unbundled pyodide-build (now 0.29) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2090">#2090</a>)</li> <li>🛠 Now cibuildwheel uses dependency-groups for development dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2064">#2064</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2085">#2085</a>)</li> <li>📚 Docs updates and tidy ups (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2061">#2061</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2067">#2067</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2072">#2072</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/ee63bf16da6cddfb925f542f2c7b59ad50e93969"><code>ee63bf1</code></a> Bump version: v2.22.0</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/d3eeba79672a0a2eb2881d547ec1366129c236ab"><code>d3eeba7</code></a> chore: bump Ruff to 0.8.0 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2092">#2092</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/109020ed2d0b528335ac75cb0cf2a3ff2e25fa61"><code>109020e</code></a> Updates for Pyodide builds after <code>pyodide-build</code> was unvendored (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2090">#2090</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/fd990007be2cc7eb1beb5501970039bf4e1dde57"><code>fd99000</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2087">#2087</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/e158f22d511580ea7e1c2853fdb002555c15e993"><code>e158f22</code></a> ci: update gitlab for dependency-groups (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2089">#2089</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/8f21eb17be321024854494f69d8b45610e791c65"><code>8f21eb1</code></a> chore: use dependency-groups (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2064">#2064</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/aac31ae2fb6f1bd1224a90f60f2e65cd419a54a3"><code>aac31ae</code></a> docs: fix update scripts usage (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2067">#2067</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/b882b84e470860fb5fa558f32edad35e1d8f6bf1"><code>b882b84</code></a> fix: update the macOS image used on Cirrus CI (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2085">#2085</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/243085f047937c9fe7d228bcaf0e684a64053a22"><code>243085f</code></a> [Bot] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2086">#2086</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/df6f8863900b4519a8dfee992548b11b4329d12b"><code>df6f886</code></a> feat: add manylinux armv7l (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2052">#2052</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.21.3...v2.22.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.21.3&new-version=2.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b468a3f3f4c..765047b933f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -396,7 +396,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.21.3 + uses: pypa/cibuildwheel@v2.22.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From d411bc511d4348e543a5690786c7100dc47d8eb9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 25 Nov 2024 14:29:45 -0800 Subject: [PATCH 1022/1511] [PR #10043/5255cec backport][3.11] Avoid constructing headers mulitidict twice for ``web.Response`` (#10045) --- CHANGES/10043.misc.rst | 1 + aiohttp/web_response.py | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10043.misc.rst diff --git a/CHANGES/10043.misc.rst b/CHANGES/10043.misc.rst new file mode 100644 index 00000000000..cfd4e88ee24 --- /dev/null +++ b/CHANGES/10043.misc.rst @@ -0,0 +1 @@ +Improved performance of constructing :class:`aiohttp.web.Response` with headers -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 59c9b54784a..d3d8afe5433 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -86,7 +86,15 @@ def __init__( status: int = 200, reason: Optional[str] = None, headers: Optional[LooseHeaders] = None, + _real_headers: Optional[CIMultiDict[str]] = None, ) -> None: + """Initialize a new stream response object. + + _real_headers is an internal parameter used to pass a pre-populated + headers object. It is used by the `Response` class to avoid copying + the headers when creating a new response object. It is not intended + to be used by external code. + """ self._body = None self._keep_alive: Optional[bool] = None self._chunked = False @@ -102,7 +110,9 @@ def __init__( self._body_length = 0 self._state: Dict[str, Any] = {} - if headers is not None: + if _real_headers is not None: + self._headers = _real_headers + elif headers is not None: self._headers: CIMultiDict[str] = CIMultiDict(headers) else: self._headers = CIMultiDict() @@ -660,7 +670,7 @@ def __init__( content_type += "; charset=" + charset real_headers[hdrs.CONTENT_TYPE] = content_type - super().__init__(status=status, reason=reason, headers=real_headers) + super().__init__(status=status, reason=reason, _real_headers=real_headers) if text is not None: self.text = text From d8e9a6b784e248da823db3f1ed57fbb21212ee9b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 25 Nov 2024 14:42:56 -0800 Subject: [PATCH 1023/1511] [PR #10043/5255cec backport][3.12] Avoid constructing headers mulitidict twice for ``web.Response`` (#10046) --- CHANGES/10043.misc.rst | 1 + aiohttp/web_response.py | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10043.misc.rst diff --git a/CHANGES/10043.misc.rst b/CHANGES/10043.misc.rst new file mode 100644 index 00000000000..cfd4e88ee24 --- /dev/null +++ b/CHANGES/10043.misc.rst @@ -0,0 +1 @@ +Improved performance of constructing :class:`aiohttp.web.Response` with headers -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 59c9b54784a..d3d8afe5433 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -86,7 +86,15 @@ def __init__( status: int = 200, reason: Optional[str] = None, headers: Optional[LooseHeaders] = None, + _real_headers: Optional[CIMultiDict[str]] = None, ) -> None: + """Initialize a new stream response object. + + _real_headers is an internal parameter used to pass a pre-populated + headers object. It is used by the `Response` class to avoid copying + the headers when creating a new response object. It is not intended + to be used by external code. + """ self._body = None self._keep_alive: Optional[bool] = None self._chunked = False @@ -102,7 +110,9 @@ def __init__( self._body_length = 0 self._state: Dict[str, Any] = {} - if headers is not None: + if _real_headers is not None: + self._headers = _real_headers + elif headers is not None: self._headers: CIMultiDict[str] = CIMultiDict(headers) else: self._headers = CIMultiDict() @@ -660,7 +670,7 @@ def __init__( content_type += "; charset=" + charset real_headers[hdrs.CONTENT_TYPE] = content_type - super().__init__(status=status, reason=reason, headers=real_headers) + super().__init__(status=status, reason=reason, _real_headers=real_headers) if text is not None: self.text = text From 653302e225428a07e15e758b28422262bd98d5de Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 17:07:38 -0600 Subject: [PATCH 1024/1511] [PR #10049/006fbc37 backport][3.11] Improve client performance when there are no auto headers to skip (#10051) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10049.misc.rst | 1 + aiohttp/client_reqrep.py | 37 +++++++++++++++++++++--------------- tests/test_client_request.py | 1 + 3 files changed, 24 insertions(+), 15 deletions(-) create mode 100644 CHANGES/10049.misc.rst diff --git a/CHANGES/10049.misc.rst b/CHANGES/10049.misc.rst new file mode 100644 index 00000000000..58f61d48420 --- /dev/null +++ b/CHANGES/10049.misc.rst @@ -0,0 +1 @@ +Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a072ad37d76..e97c40ce0e5 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -271,6 +271,8 @@ class ClientRequest: __writer = None # async task for streaming data _continue = None # waiter future for '100 Continue' response + _skip_auto_headers: Optional["CIMultiDict[None]"] = None + # N.B. # Adding __del__ method with self._writer closing doesn't make sense # because _writer is instance method, thus it keeps a reference to self. @@ -358,6 +360,10 @@ def __init__( def __reset_writer(self, _: object = None) -> None: self.__writer = None + @property + def skip_auto_headers(self) -> CIMultiDict[None]: + return self._skip_auto_headers or CIMultiDict() + @property def _writer(self) -> Optional["asyncio.Task[None]"]: return self.__writer @@ -469,20 +475,19 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: if skip_auto_headers is not None: - self.skip_auto_headers = CIMultiDict( + self._skip_auto_headers = CIMultiDict( (hdr, None) for hdr in sorted(skip_auto_headers) ) used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] else: # Fast path when there are no headers to skip # which is the most common case. - self.skip_auto_headers = CIMultiDict() used_headers = self.headers for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: - self.headers.add(hdr, val) + self.headers[hdr] = val if hdrs.USER_AGENT not in used_headers: self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE @@ -584,21 +589,20 @@ def update_body_from_data(self, body: Any) -> None: self.body = body # enable chunked encoding if needed - if not self.chunked: - if hdrs.CONTENT_LENGTH not in self.headers: - size = body.size - if size is None: - self.chunked = True - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(size) + if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: + if (size := body.size) is not None: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + else: + self.chunked = True # copy payload headers assert body.headers + headers = self.headers + skip_headers = self._skip_auto_headers for key, value in body.headers.items(): - if key in self.headers or key in self.skip_auto_headers: + if key in headers or (skip_headers is not None and key in skip_headers): continue - self.headers[key] = value + headers[key] = value def update_expect_continue(self, expect: bool = False) -> None: if expect: @@ -723,7 +727,10 @@ async def send(self, conn: "Connection") -> "ClientResponse": # set default content-type if ( self.method in self.POST_METHODS - and hdrs.CONTENT_TYPE not in self.skip_auto_headers + and ( + self._skip_auto_headers is None + or hdrs.CONTENT_TYPE not in self._skip_auto_headers + ) and hdrs.CONTENT_TYPE not in self.headers ): self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 324eddf7f6e..f86ff5d7587 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -687,6 +687,7 @@ async def test_content_type_skip_auto_header_bytes(loop, conn) -> None: skip_auto_headers={"Content-Type"}, loop=loop, ) + assert req.skip_auto_headers == CIMultiDict({"CONTENT-TYPE": None}) resp = await req.send(conn) assert "CONTENT-TYPE" not in req.headers resp.close() From c9eb8e7b080999249651380c99f9f404c7ca25b6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 23:14:29 +0000 Subject: [PATCH 1025/1511] [PR #10049/006fbc37 backport][3.12] Improve client performance when there are no auto headers to skip (#10052) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10049.misc.rst | 1 + aiohttp/client_reqrep.py | 37 +++++++++++++++++++++--------------- tests/test_client_request.py | 1 + 3 files changed, 24 insertions(+), 15 deletions(-) create mode 100644 CHANGES/10049.misc.rst diff --git a/CHANGES/10049.misc.rst b/CHANGES/10049.misc.rst new file mode 100644 index 00000000000..58f61d48420 --- /dev/null +++ b/CHANGES/10049.misc.rst @@ -0,0 +1 @@ +Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a072ad37d76..e97c40ce0e5 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -271,6 +271,8 @@ class ClientRequest: __writer = None # async task for streaming data _continue = None # waiter future for '100 Continue' response + _skip_auto_headers: Optional["CIMultiDict[None]"] = None + # N.B. # Adding __del__ method with self._writer closing doesn't make sense # because _writer is instance method, thus it keeps a reference to self. @@ -358,6 +360,10 @@ def __init__( def __reset_writer(self, _: object = None) -> None: self.__writer = None + @property + def skip_auto_headers(self) -> CIMultiDict[None]: + return self._skip_auto_headers or CIMultiDict() + @property def _writer(self) -> Optional["asyncio.Task[None]"]: return self.__writer @@ -469,20 +475,19 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: if skip_auto_headers is not None: - self.skip_auto_headers = CIMultiDict( + self._skip_auto_headers = CIMultiDict( (hdr, None) for hdr in sorted(skip_auto_headers) ) used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] else: # Fast path when there are no headers to skip # which is the most common case. - self.skip_auto_headers = CIMultiDict() used_headers = self.headers for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: - self.headers.add(hdr, val) + self.headers[hdr] = val if hdrs.USER_AGENT not in used_headers: self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE @@ -584,21 +589,20 @@ def update_body_from_data(self, body: Any) -> None: self.body = body # enable chunked encoding if needed - if not self.chunked: - if hdrs.CONTENT_LENGTH not in self.headers: - size = body.size - if size is None: - self.chunked = True - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(size) + if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: + if (size := body.size) is not None: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + else: + self.chunked = True # copy payload headers assert body.headers + headers = self.headers + skip_headers = self._skip_auto_headers for key, value in body.headers.items(): - if key in self.headers or key in self.skip_auto_headers: + if key in headers or (skip_headers is not None and key in skip_headers): continue - self.headers[key] = value + headers[key] = value def update_expect_continue(self, expect: bool = False) -> None: if expect: @@ -723,7 +727,10 @@ async def send(self, conn: "Connection") -> "ClientResponse": # set default content-type if ( self.method in self.POST_METHODS - and hdrs.CONTENT_TYPE not in self.skip_auto_headers + and ( + self._skip_auto_headers is None + or hdrs.CONTENT_TYPE not in self._skip_auto_headers + ) and hdrs.CONTENT_TYPE not in self.headers ): self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 324eddf7f6e..f86ff5d7587 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -687,6 +687,7 @@ async def test_content_type_skip_auto_header_bytes(loop, conn) -> None: skip_auto_headers={"Content-Type"}, loop=loop, ) + assert req.skip_auto_headers == CIMultiDict({"CONTENT-TYPE": None}) resp = await req.send(conn) assert "CONTENT-TYPE" not in req.headers resp.close() From 1a6fafe8f05acb705029e16c9ba43df7aaba2473 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 26 Nov 2024 16:00:37 -0800 Subject: [PATCH 1026/1511] [PR #10037/2e369db backport][3.11] Refactor requests and responses to use classvar defaults to avoid multiple `__init__`s (#10053) --- CHANGES/10037.misc.rst | 1 + aiohttp/helpers.py | 3 ++- aiohttp/web_app.py | 2 ++ aiohttp/web_request.py | 21 ++++++--------------- aiohttp/web_response.py | 32 ++++++++++++++++---------------- aiohttp/web_ws.py | 32 ++++++++++++++++---------------- 6 files changed, 43 insertions(+), 48 deletions(-) create mode 100644 CHANGES/10037.misc.rst diff --git a/CHANGES/10037.misc.rst b/CHANGES/10037.misc.rst new file mode 100644 index 00000000000..655c804c995 --- /dev/null +++ b/CHANGES/10037.misc.rst @@ -0,0 +1 @@ +Improved performances of creating objects during the HTTP request lifecycle -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 522cce2972b..8038931ebec 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -694,10 +694,11 @@ def ceil_timeout( class HeadersMixin: + """Mixin for handling headers.""" + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) _headers: MultiMapping[str] - _content_type: Optional[str] = None _content_dict: Optional[Dict[str, str]] = None _stored_content_type: Union[str, None, _SENTINEL] = sentinel diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 5d542ab9222..4bdc54034de 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -498,6 +498,8 @@ def _make_request( task: "asyncio.Task[None]", _cls: Type[Request] = Request, ) -> Request: + if TYPE_CHECKING: + assert self._loop is not None return _cls( message, payload, diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 502a93d247a..f11d49020a0 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -146,6 +146,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): "_transport_peername", ] ) + _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + _read_bytes: Optional[bytes] = None def __init__( self, @@ -162,8 +164,6 @@ def __init__( host: Optional[str] = None, remote: Optional[str] = None, ) -> None: - if state is None: - state = {} self._message = message self._protocol = protocol self._payload_writer = payload_writer @@ -187,20 +187,18 @@ def __init__( self._cache["scheme"] = url.scheme self._rel_url = url.relative() else: - self._rel_url = message.url + self._rel_url = url if scheme is not None: self._cache["scheme"] = scheme if host is not None: self._cache["host"] = host - self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None - self._read_bytes: Optional[bytes] = None - self._state = state + self._state = {} if state is None else state self._task = task self._client_max_size = client_max_size self._loop = loop - transport = self._protocol.transport + transport = protocol.transport assert transport is not None self._transport_sslcontext = transport.get_extra_info("sslcontext") self._transport_peername = transport.get_extra_info("peername") @@ -847,14 +845,7 @@ class Request(BaseRequest): ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - # matchdict, route_name, handler - # or information about traversal lookup - - # initialized after route resolving - self._match_info: Optional[UrlMappingMatchInfo] = None + _match_info: Optional["UrlMappingMatchInfo"] = None if DEBUG: diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index d3d8afe5433..e05799ca7b7 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -76,9 +76,20 @@ class ContentCoding(enum.Enum): class StreamResponse(BaseClass, HeadersMixin): - _length_check = True - _body: Union[None, bytes, bytearray, Payload] + _length_check = True + _body = None + _keep_alive: Optional[bool] = None + _chunked: bool = False + _compression: bool = False + _compression_strategy: int = zlib.Z_DEFAULT_STRATEGY + _compression_force: Optional[ContentCoding] = None + _req: Optional["BaseRequest"] = None + _payload_writer: Optional[AbstractStreamWriter] = None + _eof_sent: bool = False + _must_be_empty_body: Optional[bool] = None + _body_length = 0 + _cookies: Optional[SimpleCookie] = None def __init__( self, @@ -95,19 +106,6 @@ def __init__( the headers when creating a new response object. It is not intended to be used by external code. """ - self._body = None - self._keep_alive: Optional[bool] = None - self._chunked = False - self._compression = False - self._compression_strategy: int = zlib.Z_DEFAULT_STRATEGY - self._compression_force: Optional[ContentCoding] = None - self._cookies: Optional[SimpleCookie] = None - - self._req: Optional[BaseRequest] = None - self._payload_writer: Optional[AbstractStreamWriter] = None - self._eof_sent = False - self._must_be_empty_body: Optional[bool] = None - self._body_length = 0 self._state: Dict[str, Any] = {} if _real_headers is not None: @@ -613,6 +611,9 @@ def __eq__(self, other: object) -> bool: class Response(StreamResponse): + + _compressed_body: Optional[bytes] = None + def __init__( self, *, @@ -677,7 +678,6 @@ def __init__( else: self.body = body - self._compressed_body: Optional[bytes] = None self._zlib_executor_size = zlib_executor_size self._zlib_executor = zlib_executor diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index c18f88eaf00..0fb1549a3aa 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -61,7 +61,22 @@ def __bool__(self) -> bool: class WebSocketResponse(StreamResponse): - _length_check = False + _length_check: bool = False + _ws_protocol: Optional[str] = None + _writer: Optional[WebSocketWriter] = None + _reader: Optional[WebSocketDataQueue] = None + _closed: bool = False + _closing: bool = False + _conn_lost: int = 0 + _close_code: Optional[int] = None + _loop: Optional[asyncio.AbstractEventLoop] = None + _waiting: bool = False + _close_wait: Optional[asyncio.Future[None]] = None + _exception: Optional[BaseException] = None + _heartbeat_when: float = 0.0 + _heartbeat_cb: Optional[asyncio.TimerHandle] = None + _pong_response_cb: Optional[asyncio.TimerHandle] = None + _ping_task: Optional[asyncio.Task[None]] = None def __init__( self, @@ -78,30 +93,15 @@ def __init__( ) -> None: super().__init__(status=101) self._protocols = protocols - self._ws_protocol: Optional[str] = None - self._writer: Optional[WebSocketWriter] = None - self._reader: Optional[WebSocketDataQueue] = None - self._closed = False - self._closing = False - self._conn_lost = 0 - self._close_code: Optional[int] = None - self._loop: Optional[asyncio.AbstractEventLoop] = None - self._waiting: bool = False - self._close_wait: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None self._timeout = timeout self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat - self._heartbeat_when = 0.0 - self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._compress: Union[bool, int] = compress self._max_msg_size = max_msg_size - self._ping_task: Optional[asyncio.Task[None]] = None self._writer_limit = writer_limit def _cancel_heartbeat(self) -> None: From e5dd82a16b413b0cebeb97fad3be563935313f11 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 26 Nov 2024 16:04:27 -0800 Subject: [PATCH 1027/1511] [PR #10037/2e369db backport][3.12] Refactor requests and responses to use classvar defaults to avoid multiple `__init__`s (#10054) --- CHANGES/10037.misc.rst | 1 + aiohttp/helpers.py | 3 ++- aiohttp/web_app.py | 2 ++ aiohttp/web_request.py | 21 ++++++--------------- aiohttp/web_response.py | 32 ++++++++++++++++---------------- aiohttp/web_ws.py | 32 ++++++++++++++++---------------- 6 files changed, 43 insertions(+), 48 deletions(-) create mode 100644 CHANGES/10037.misc.rst diff --git a/CHANGES/10037.misc.rst b/CHANGES/10037.misc.rst new file mode 100644 index 00000000000..655c804c995 --- /dev/null +++ b/CHANGES/10037.misc.rst @@ -0,0 +1 @@ +Improved performances of creating objects during the HTTP request lifecycle -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 522cce2972b..8038931ebec 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -694,10 +694,11 @@ def ceil_timeout( class HeadersMixin: + """Mixin for handling headers.""" + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) _headers: MultiMapping[str] - _content_type: Optional[str] = None _content_dict: Optional[Dict[str, str]] = None _stored_content_type: Union[str, None, _SENTINEL] = sentinel diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 5d542ab9222..4bdc54034de 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -498,6 +498,8 @@ def _make_request( task: "asyncio.Task[None]", _cls: Type[Request] = Request, ) -> Request: + if TYPE_CHECKING: + assert self._loop is not None return _cls( message, payload, diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 502a93d247a..f11d49020a0 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -146,6 +146,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): "_transport_peername", ] ) + _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + _read_bytes: Optional[bytes] = None def __init__( self, @@ -162,8 +164,6 @@ def __init__( host: Optional[str] = None, remote: Optional[str] = None, ) -> None: - if state is None: - state = {} self._message = message self._protocol = protocol self._payload_writer = payload_writer @@ -187,20 +187,18 @@ def __init__( self._cache["scheme"] = url.scheme self._rel_url = url.relative() else: - self._rel_url = message.url + self._rel_url = url if scheme is not None: self._cache["scheme"] = scheme if host is not None: self._cache["host"] = host - self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None - self._read_bytes: Optional[bytes] = None - self._state = state + self._state = {} if state is None else state self._task = task self._client_max_size = client_max_size self._loop = loop - transport = self._protocol.transport + transport = protocol.transport assert transport is not None self._transport_sslcontext = transport.get_extra_info("sslcontext") self._transport_peername = transport.get_extra_info("peername") @@ -847,14 +845,7 @@ class Request(BaseRequest): ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - # matchdict, route_name, handler - # or information about traversal lookup - - # initialized after route resolving - self._match_info: Optional[UrlMappingMatchInfo] = None + _match_info: Optional["UrlMappingMatchInfo"] = None if DEBUG: diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index d3d8afe5433..e05799ca7b7 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -76,9 +76,20 @@ class ContentCoding(enum.Enum): class StreamResponse(BaseClass, HeadersMixin): - _length_check = True - _body: Union[None, bytes, bytearray, Payload] + _length_check = True + _body = None + _keep_alive: Optional[bool] = None + _chunked: bool = False + _compression: bool = False + _compression_strategy: int = zlib.Z_DEFAULT_STRATEGY + _compression_force: Optional[ContentCoding] = None + _req: Optional["BaseRequest"] = None + _payload_writer: Optional[AbstractStreamWriter] = None + _eof_sent: bool = False + _must_be_empty_body: Optional[bool] = None + _body_length = 0 + _cookies: Optional[SimpleCookie] = None def __init__( self, @@ -95,19 +106,6 @@ def __init__( the headers when creating a new response object. It is not intended to be used by external code. """ - self._body = None - self._keep_alive: Optional[bool] = None - self._chunked = False - self._compression = False - self._compression_strategy: int = zlib.Z_DEFAULT_STRATEGY - self._compression_force: Optional[ContentCoding] = None - self._cookies: Optional[SimpleCookie] = None - - self._req: Optional[BaseRequest] = None - self._payload_writer: Optional[AbstractStreamWriter] = None - self._eof_sent = False - self._must_be_empty_body: Optional[bool] = None - self._body_length = 0 self._state: Dict[str, Any] = {} if _real_headers is not None: @@ -613,6 +611,9 @@ def __eq__(self, other: object) -> bool: class Response(StreamResponse): + + _compressed_body: Optional[bytes] = None + def __init__( self, *, @@ -677,7 +678,6 @@ def __init__( else: self.body = body - self._compressed_body: Optional[bytes] = None self._zlib_executor_size = zlib_executor_size self._zlib_executor = zlib_executor diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index c18f88eaf00..0fb1549a3aa 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -61,7 +61,22 @@ def __bool__(self) -> bool: class WebSocketResponse(StreamResponse): - _length_check = False + _length_check: bool = False + _ws_protocol: Optional[str] = None + _writer: Optional[WebSocketWriter] = None + _reader: Optional[WebSocketDataQueue] = None + _closed: bool = False + _closing: bool = False + _conn_lost: int = 0 + _close_code: Optional[int] = None + _loop: Optional[asyncio.AbstractEventLoop] = None + _waiting: bool = False + _close_wait: Optional[asyncio.Future[None]] = None + _exception: Optional[BaseException] = None + _heartbeat_when: float = 0.0 + _heartbeat_cb: Optional[asyncio.TimerHandle] = None + _pong_response_cb: Optional[asyncio.TimerHandle] = None + _ping_task: Optional[asyncio.Task[None]] = None def __init__( self, @@ -78,30 +93,15 @@ def __init__( ) -> None: super().__init__(status=101) self._protocols = protocols - self._ws_protocol: Optional[str] = None - self._writer: Optional[WebSocketWriter] = None - self._reader: Optional[WebSocketDataQueue] = None - self._closed = False - self._closing = False - self._conn_lost = 0 - self._close_code: Optional[int] = None - self._loop: Optional[asyncio.AbstractEventLoop] = None - self._waiting: bool = False - self._close_wait: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None self._timeout = timeout self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat - self._heartbeat_when = 0.0 - self._heartbeat_cb: Optional[asyncio.TimerHandle] = None if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb: Optional[asyncio.TimerHandle] = None self._compress: Union[bool, int] = compress self._max_msg_size = max_msg_size - self._ping_task: Optional[asyncio.Task[None]] = None self._writer_limit = writer_limit def _cancel_heartbeat(self) -> None: From 7e628f4edaa6a529381d0e2d39b97c756dae5341 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 26 Nov 2024 20:12:46 -0800 Subject: [PATCH 1028/1511] [PR #8699/11f0e7f backport][3.11] Reduce code indent in ResponseHandler.data_received (#10056) --- aiohttp/client_proto.py | 105 ++++++++++++++++++------------------- tests/test_client_proto.py | 84 ++++++++++++++++++++++++++++- 2 files changed, 135 insertions(+), 54 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index b899908d786..79f033e3e12 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -242,7 +242,7 @@ def data_received(self, data: bytes) -> None: if not data: return - # custom payload parser + # custom payload parser - currently always WebSocketReader if self._payload_parser is not None: eof, tail = self._payload_parser.feed_data(data) if eof: @@ -252,57 +252,56 @@ def data_received(self, data: bytes) -> None: if tail: self.data_received(tail) return - else: - if self._upgraded or self._parser is None: - # i.e. websocket connection, websocket parser is not set yet - self._tail += data + + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + return + + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as underlying_exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + if isinstance(underlying_exc, HttpProcessingError): + exc = HttpProcessingError( + code=underlying_exc.code, + message=underlying_exc.message, + headers=underlying_exc.headers, + ) else: - # parse http messages - try: - messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as underlying_exc: - if self.transport is not None: - # connection.release() could be called BEFORE - # data_received(), the transport is already - # closed in this case - self.transport.close() - # should_close is True after the call - if isinstance(underlying_exc, HttpProcessingError): - exc = HttpProcessingError( - code=underlying_exc.code, - message=underlying_exc.message, - headers=underlying_exc.headers, - ) - else: - exc = HttpProcessingError() - self.set_exception(exc, underlying_exc) - return - - self._upgraded = upgraded - - payload: Optional[StreamReader] = None - for message, payload in messages: - if message.should_close: - self._should_close = True - - self._payload = payload - - if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: - self.feed_data((message, EMPTY_PAYLOAD), 0) - else: - self.feed_data((message, payload), 0) - if payload is not None: - # new message(s) was processed - # register timeout handler unsubscribing - # either on end-of-stream or immediately for - # EMPTY_PAYLOAD - if payload is not EMPTY_PAYLOAD: - payload.on_eof(self._drop_timeout) - else: - self._drop_timeout() + exc = HttpProcessingError() + self.set_exception(exc, underlying_exc) + return - if tail: - if upgraded: - self.data_received(tail) - else: - self._tail = tail + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if upgraded and tail: + self.data_received(tail) diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index a70dc62e135..af1286dc310 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -72,7 +72,89 @@ async def test_uncompleted_message(loop) -> None: assert dict(exc.message.headers) == {"Location": "http://python.org/"} -async def test_client_protocol_readuntil_eof(loop) -> None: +async def test_data_received_after_close(loop: asyncio.AbstractEventLoop) -> None: + proto = ResponseHandler(loop=loop) + transport = mock.Mock() + proto.connection_made(transport) + proto.set_response_params(read_until_eof=True) + proto.close() + assert transport.close.called + transport.close.reset_mock() + proto.data_received(b"HTTP\r\n\r\n") + assert proto.should_close + assert not transport.close.called + assert isinstance(proto.exception(), http.HttpProcessingError) + + +async def test_multiple_responses_one_byte_at_a_time( + loop: asyncio.AbstractEventLoop, +) -> None: + proto = ResponseHandler(loop=loop) + proto.connection_made(mock.Mock()) + conn = mock.Mock(protocol=proto) + proto.set_response_params(read_until_eof=True) + + for _ in range(2): + messages = ( + b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nab" + b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\ncd" + b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nef" + ) + for i in range(len(messages)): + proto.data_received(messages[i : i + 1]) + + expected = [b"ab", b"cd", b"ef"] + for payload in expected: + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + writer=mock.Mock(), + continue100=None, + timer=TimerNoop(), + request_info=mock.Mock(), + traces=[], + loop=loop, + session=mock.Mock(), + ) + await response.start(conn) + await response.read() == payload + + +async def test_unexpected_exception_during_data_received( + loop: asyncio.AbstractEventLoop, +) -> None: + proto = ResponseHandler(loop=loop) + + class PatchableHttpResponseParser(http.HttpResponseParser): + """Subclass of HttpResponseParser to make it patchable.""" + + with mock.patch( + "aiohttp.client_proto.HttpResponseParser", PatchableHttpResponseParser + ): + proto.connection_made(mock.Mock()) + conn = mock.Mock(protocol=proto) + proto.set_response_params(read_until_eof=True) + proto.data_received(b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nab") + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + writer=mock.Mock(), + continue100=None, + timer=TimerNoop(), + request_info=mock.Mock(), + traces=[], + loop=loop, + session=mock.Mock(), + ) + await response.start(conn) + await response.read() == b"ab" + with mock.patch.object(proto._parser, "feed_data", side_effect=ValueError): + proto.data_received(b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\ncd") + + assert isinstance(proto.exception(), http.HttpProcessingError) + + +async def test_client_protocol_readuntil_eof(loop: asyncio.AbstractEventLoop) -> None: proto = ResponseHandler(loop=loop) transport = mock.Mock() proto.connection_made(transport) From 4409466813839221d6953e6886d09c569ad52b27 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 26 Nov 2024 20:17:28 -0800 Subject: [PATCH 1029/1511] [PR #8699/11f0e7f backport][3.12] Reduce code indent in ResponseHandler.data_received (#10057) --- aiohttp/client_proto.py | 105 ++++++++++++++++++------------------- tests/test_client_proto.py | 84 ++++++++++++++++++++++++++++- 2 files changed, 135 insertions(+), 54 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index b899908d786..79f033e3e12 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -242,7 +242,7 @@ def data_received(self, data: bytes) -> None: if not data: return - # custom payload parser + # custom payload parser - currently always WebSocketReader if self._payload_parser is not None: eof, tail = self._payload_parser.feed_data(data) if eof: @@ -252,57 +252,56 @@ def data_received(self, data: bytes) -> None: if tail: self.data_received(tail) return - else: - if self._upgraded or self._parser is None: - # i.e. websocket connection, websocket parser is not set yet - self._tail += data + + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + return + + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as underlying_exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + if isinstance(underlying_exc, HttpProcessingError): + exc = HttpProcessingError( + code=underlying_exc.code, + message=underlying_exc.message, + headers=underlying_exc.headers, + ) else: - # parse http messages - try: - messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as underlying_exc: - if self.transport is not None: - # connection.release() could be called BEFORE - # data_received(), the transport is already - # closed in this case - self.transport.close() - # should_close is True after the call - if isinstance(underlying_exc, HttpProcessingError): - exc = HttpProcessingError( - code=underlying_exc.code, - message=underlying_exc.message, - headers=underlying_exc.headers, - ) - else: - exc = HttpProcessingError() - self.set_exception(exc, underlying_exc) - return - - self._upgraded = upgraded - - payload: Optional[StreamReader] = None - for message, payload in messages: - if message.should_close: - self._should_close = True - - self._payload = payload - - if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: - self.feed_data((message, EMPTY_PAYLOAD), 0) - else: - self.feed_data((message, payload), 0) - if payload is not None: - # new message(s) was processed - # register timeout handler unsubscribing - # either on end-of-stream or immediately for - # EMPTY_PAYLOAD - if payload is not EMPTY_PAYLOAD: - payload.on_eof(self._drop_timeout) - else: - self._drop_timeout() + exc = HttpProcessingError() + self.set_exception(exc, underlying_exc) + return - if tail: - if upgraded: - self.data_received(tail) - else: - self._tail = tail + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if upgraded and tail: + self.data_received(tail) diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index a70dc62e135..af1286dc310 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -72,7 +72,89 @@ async def test_uncompleted_message(loop) -> None: assert dict(exc.message.headers) == {"Location": "http://python.org/"} -async def test_client_protocol_readuntil_eof(loop) -> None: +async def test_data_received_after_close(loop: asyncio.AbstractEventLoop) -> None: + proto = ResponseHandler(loop=loop) + transport = mock.Mock() + proto.connection_made(transport) + proto.set_response_params(read_until_eof=True) + proto.close() + assert transport.close.called + transport.close.reset_mock() + proto.data_received(b"HTTP\r\n\r\n") + assert proto.should_close + assert not transport.close.called + assert isinstance(proto.exception(), http.HttpProcessingError) + + +async def test_multiple_responses_one_byte_at_a_time( + loop: asyncio.AbstractEventLoop, +) -> None: + proto = ResponseHandler(loop=loop) + proto.connection_made(mock.Mock()) + conn = mock.Mock(protocol=proto) + proto.set_response_params(read_until_eof=True) + + for _ in range(2): + messages = ( + b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nab" + b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\ncd" + b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nef" + ) + for i in range(len(messages)): + proto.data_received(messages[i : i + 1]) + + expected = [b"ab", b"cd", b"ef"] + for payload in expected: + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + writer=mock.Mock(), + continue100=None, + timer=TimerNoop(), + request_info=mock.Mock(), + traces=[], + loop=loop, + session=mock.Mock(), + ) + await response.start(conn) + await response.read() == payload + + +async def test_unexpected_exception_during_data_received( + loop: asyncio.AbstractEventLoop, +) -> None: + proto = ResponseHandler(loop=loop) + + class PatchableHttpResponseParser(http.HttpResponseParser): + """Subclass of HttpResponseParser to make it patchable.""" + + with mock.patch( + "aiohttp.client_proto.HttpResponseParser", PatchableHttpResponseParser + ): + proto.connection_made(mock.Mock()) + conn = mock.Mock(protocol=proto) + proto.set_response_params(read_until_eof=True) + proto.data_received(b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nab") + response = ClientResponse( + "get", + URL("http://def-cl-resp.org"), + writer=mock.Mock(), + continue100=None, + timer=TimerNoop(), + request_info=mock.Mock(), + traces=[], + loop=loop, + session=mock.Mock(), + ) + await response.start(conn) + await response.read() == b"ab" + with mock.patch.object(proto._parser, "feed_data", side_effect=ValueError): + proto.data_received(b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\ncd") + + assert isinstance(proto.exception(), http.HttpProcessingError) + + +async def test_client_protocol_readuntil_eof(loop: asyncio.AbstractEventLoop) -> None: proto = ResponseHandler(loop=loop) transport = mock.Mock() proto.connection_made(transport) From a5a6981fe96790d5924df72db1f42bb021ed5265 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 26 Nov 2024 21:12:22 -0800 Subject: [PATCH 1030/1511] [PR #10058/12372d7 backport][3.11] Remove unreachable content length check for chunked encoding (#10060) --- aiohttp/web_response.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e05799ca7b7..cd2be24f1a3 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -181,14 +181,13 @@ def output_length(self) -> int: def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: """Enables automatic chunked transfer encoding.""" - self._chunked = True - if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError( "You can't enable chunked encoding when a content length is set" ) if chunk_size is not None: warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + self._chunked = True def enable_compression( self, @@ -493,8 +492,6 @@ async def _prepare_headers(self) -> None: if not self._must_be_empty_body: writer.enable_chunking() headers[hdrs.TRANSFER_ENCODING] = "chunked" - if hdrs.CONTENT_LENGTH in headers: - del headers[hdrs.CONTENT_LENGTH] elif self._length_check: # Disabled for WebSockets writer.length = self.content_length if writer.length is None: From d88c30c00a87425277dc26ca00e4746fd5b1d6cb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 26 Nov 2024 21:12:28 -0800 Subject: [PATCH 1031/1511] [PR #10058/12372d7 backport][3.12] Remove unreachable content length check for chunked encoding (#10061) --- aiohttp/web_response.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e05799ca7b7..cd2be24f1a3 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -181,14 +181,13 @@ def output_length(self) -> int: def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: """Enables automatic chunked transfer encoding.""" - self._chunked = True - if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError( "You can't enable chunked encoding when a content length is set" ) if chunk_size is not None: warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + self._chunked = True def enable_compression( self, @@ -493,8 +492,6 @@ async def _prepare_headers(self) -> None: if not self._must_be_empty_body: writer.enable_chunking() headers[hdrs.TRANSFER_ENCODING] = "chunked" - if hdrs.CONTENT_LENGTH in headers: - del headers[hdrs.CONTENT_LENGTH] elif self._length_check: # Disabled for WebSockets writer.length = self.content_length if writer.length is None: From 1b78cae44d97e3eb5ea15a25319d5a5a3daf8c9f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 27 Nov 2024 05:16:47 +0000 Subject: [PATCH 1032/1511] [PR #10059/aac6f741 backport][3.11] Combine executor jobs in FileResponse sendfile_fallback (#10062) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_fileresponse.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index e7951acea16..3b2bc2caf12 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -88,6 +88,10 @@ def __init__( self._path = pathlib.Path(path) self._chunk_size = chunk_size + def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes: + fobj.seek(offset) + return fobj.read(chunk_size) # type: ignore[no-any-return] + async def _sendfile_fallback( self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int ) -> AbstractStreamWriter: @@ -96,10 +100,9 @@ async def _sendfile_fallback( chunk_size = self._chunk_size loop = asyncio.get_event_loop() - - await loop.run_in_executor(None, fobj.seek, offset) - - chunk = await loop.run_in_executor(None, fobj.read, chunk_size) + chunk = await loop.run_in_executor( + None, self._seek_and_read, fobj, offset, chunk_size + ) while chunk: await writer.write(chunk) count = count - chunk_size From 6ee57820b26db57f58720a7afc41854393b9f359 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 27 Nov 2024 05:20:37 +0000 Subject: [PATCH 1033/1511] [PR #10059/aac6f741 backport][3.12] Combine executor jobs in FileResponse sendfile_fallback (#10063) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_fileresponse.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index e7951acea16..3b2bc2caf12 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -88,6 +88,10 @@ def __init__( self._path = pathlib.Path(path) self._chunk_size = chunk_size + def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes: + fobj.seek(offset) + return fobj.read(chunk_size) # type: ignore[no-any-return] + async def _sendfile_fallback( self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int ) -> AbstractStreamWriter: @@ -96,10 +100,9 @@ async def _sendfile_fallback( chunk_size = self._chunk_size loop = asyncio.get_event_loop() - - await loop.run_in_executor(None, fobj.seek, offset) - - chunk = await loop.run_in_executor(None, fobj.read, chunk_size) + chunk = await loop.run_in_executor( + None, self._seek_and_read, fobj, offset, chunk_size + ) while chunk: await writer.write(chunk) count = count - chunk_size From 1f8ade0748521753e2206aad2ff6af6f11c438e6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 27 Nov 2024 09:13:24 -0800 Subject: [PATCH 1034/1511] [PR #10055/c11fe96 backport][3.12] Downgrade logging of invalid HTTP methods on first request to debug level (#10065) --- CHANGES/10055.misc.rst | 3 + aiohttp/_http_parser.pyx | 4 +- aiohttp/http_exceptions.py | 7 +++ aiohttp/http_parser.py | 5 +- aiohttp/web_protocol.py | 14 ++++- tests/test_http_parser.py | 2 +- tests/test_web_server.py | 123 ++++++++++++++++++++++++++++++++++++- 7 files changed, 152 insertions(+), 6 deletions(-) create mode 100644 CHANGES/10055.misc.rst diff --git a/CHANGES/10055.misc.rst b/CHANGES/10055.misc.rst new file mode 100644 index 00000000000..3a5fa074f77 --- /dev/null +++ b/CHANGES/10055.misc.rst @@ -0,0 +1,3 @@ +Downgraded logging of invalid HTTP method exceptions on the first request to debug level -- by :user:`bdraco`. + +HTTP requests starting with an invalid method are relatively common, especially when connected to the public internet, because browsers or other clients may try to speak SSL to a plain-text server or vice-versa. These exceptions can quickly fill the log with noise when nothing is wrong. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index dd317edaf79..988e4247f93 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -23,6 +23,7 @@ from aiohttp.helpers import DEBUG, set_exception from .http_exceptions import ( BadHttpMessage, + BadHttpMethod, BadStatusLine, ContentLengthError, InvalidHeader, @@ -831,8 +832,9 @@ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): cparser.HPE_INVALID_EOF_STATE, cparser.HPE_INVALID_TRANSFER_ENCODING}: return BadHttpMessage(err_msg) + elif errno == cparser.HPE_INVALID_METHOD: + return BadHttpMethod(error=err_msg) elif errno in {cparser.HPE_INVALID_STATUS, - cparser.HPE_INVALID_METHOD, cparser.HPE_INVALID_VERSION}: return BadStatusLine(error=err_msg) elif errno == cparser.HPE_INVALID_URL: diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index c43ee0d9659..b8dda999acf 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -101,5 +101,12 @@ def __init__(self, line: str = "", error: Optional[str] = None) -> None: self.line = line +class BadHttpMethod(BadStatusLine): + """Invalid HTTP method in status line.""" + + def __init__(self, line: str = "", error: Optional[str] = None) -> None: + super().__init__(line, error or f"Bad HTTP method in status line {line!r}") + + class InvalidURLError(BadHttpMessage): pass diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 148a30b2ca1..1b8b5b4d49e 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -38,6 +38,7 @@ ) from .http_exceptions import ( BadHttpMessage, + BadHttpMethod, BadStatusLine, ContentEncodingError, ContentLengthError, @@ -576,7 +577,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: try: method, path, version = line.split(" ", maxsplit=2) except ValueError: - raise BadStatusLine(line) from None + raise BadHttpMethod(line) from None if len(path) > self.max_line_size: raise LineTooLong( @@ -585,7 +586,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: # method if not TOKENRE.fullmatch(method): - raise BadStatusLine(method) + raise BadHttpMethod(method) # version match = VERSRE.fullmatch(version) diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 2201eef30ad..fe2ae8a1269 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -35,6 +35,7 @@ RawRequestMessage, StreamWriter, ) +from .http_exceptions import BadHttpMethod from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD, StreamReader from .tcp_helpers import tcp_keepalive @@ -687,7 +688,18 @@ def handle_error( Returns HTTP response with specific status code. Logs additional information. It always closes current connection. """ - self.log_exception("Error handling request", exc_info=exc) + if ( + self._manager + and self._manager.requests_count == 1 + and isinstance(exc, BadHttpMethod) + ): + # BadHttpMethod is common when a client sends non-HTTP + # or encrypted traffic to an HTTP port. This is expected + # to happen when connected to the public internet so we log + # it at the debug level as to not fill logs with noise. + self.logger.debug("Error handling request", exc_info=exc) + else: + self.log_exception("Error handling request", exc_info=exc) # some data already got sent, connection is broken if request.writer.output_size > 0: diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index a8305da84f7..58fef625f82 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -965,7 +965,7 @@ def test_http_request_parser_two_slashes(parser) -> None: def test_http_request_parser_bad_method( parser, rfc9110_5_6_2_token_delim: bytes ) -> None: - with pytest.raises(http_exceptions.BadStatusLine): + with pytest.raises(http_exceptions.BadHttpMethod): parser.feed_data(rfc9110_5_6_2_token_delim + b'ET" /get HTTP/1.1\r\n\r\n') diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 9e2d078c1a0..e77df623020 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -1,11 +1,14 @@ import asyncio import socket from contextlib import suppress +from typing import NoReturn from unittest import mock import pytest from aiohttp import client, web +from aiohttp.http_exceptions import BadHttpMethod, BadStatusLine +from aiohttp.pytest_plugin import AiohttpClient, AiohttpRawServer async def test_simple_server(aiohttp_raw_server, aiohttp_client) -> None: @@ -56,7 +59,125 @@ async def handler(request): logger.exception.assert_called_with("Error handling request", exc_info=exc) -async def test_raw_server_handler_timeout(aiohttp_raw_server, aiohttp_client) -> None: +async def test_raw_server_logs_invalid_method_with_loop_debug( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error") + + async def handler(request: web.BaseRequest) -> NoReturn: + raise exc + + loop = asyncio.get_event_loop() + loop.set_debug(True) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + + txt = await resp.text() + assert "Traceback (most recent call last):\n" in txt + + # BadHttpMethod should be logged as debug + # on the first request since the client may + # be probing for TLS/SSL support which is + # expected to fail + logger.debug.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_logs_invalid_method_without_loop_debug( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error") + + async def handler(request: web.BaseRequest) -> NoReturn: + raise exc + + loop = asyncio.get_event_loop() + loop.set_debug(False) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger, debug=False) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + + txt = await resp.text() + assert "Traceback (most recent call last):\n" not in txt + + # BadHttpMethod should be logged as debug + # on the first request since the client may + # be probing for TLS/SSL support which is + # expected to fail + logger.debug.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_logs_invalid_method_second_request( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error") + request_count = 0 + + async def handler(request: web.BaseRequest) -> web.Response: + nonlocal request_count + request_count += 1 + if request_count == 2: + raise exc + return web.Response() + + loop = asyncio.get_event_loop() + loop.set_debug(False) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 200 + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + # BadHttpMethod should be logged as an exception + # if its not the first request since we know + # that the client already was speaking HTTP + logger.exception.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_logs_bad_status_line_as_exception( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadStatusLine(b"\x16\x03\x03\x01F\x01".decode(), "error") + + async def handler(request: web.BaseRequest) -> NoReturn: + raise exc + + loop = asyncio.get_event_loop() + loop.set_debug(False) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger, debug=False) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + + txt = await resp.text() + assert "Traceback (most recent call last):\n" not in txt + + logger.exception.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_handler_timeout( + aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient +) -> None: + loop = asyncio.get_event_loop() + loop.set_debug(True) exc = asyncio.TimeoutError("error") async def handler(request): From 13152c39f9360cef7ba1d78252afdc91cf220441 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 27 Nov 2024 09:17:40 -0800 Subject: [PATCH 1035/1511] [PR #10055/c11fe96 backport][3.11] Downgrade logging of invalid HTTP methods on first request to debug level (#10064) --- CHANGES/10055.misc.rst | 3 + aiohttp/_http_parser.pyx | 4 +- aiohttp/http_exceptions.py | 7 +++ aiohttp/http_parser.py | 5 +- aiohttp/web_protocol.py | 14 ++++- tests/test_http_parser.py | 2 +- tests/test_web_server.py | 123 ++++++++++++++++++++++++++++++++++++- 7 files changed, 152 insertions(+), 6 deletions(-) create mode 100644 CHANGES/10055.misc.rst diff --git a/CHANGES/10055.misc.rst b/CHANGES/10055.misc.rst new file mode 100644 index 00000000000..3a5fa074f77 --- /dev/null +++ b/CHANGES/10055.misc.rst @@ -0,0 +1,3 @@ +Downgraded logging of invalid HTTP method exceptions on the first request to debug level -- by :user:`bdraco`. + +HTTP requests starting with an invalid method are relatively common, especially when connected to the public internet, because browsers or other clients may try to speak SSL to a plain-text server or vice-versa. These exceptions can quickly fill the log with noise when nothing is wrong. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index dd317edaf79..988e4247f93 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -23,6 +23,7 @@ from aiohttp.helpers import DEBUG, set_exception from .http_exceptions import ( BadHttpMessage, + BadHttpMethod, BadStatusLine, ContentLengthError, InvalidHeader, @@ -831,8 +832,9 @@ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): cparser.HPE_INVALID_EOF_STATE, cparser.HPE_INVALID_TRANSFER_ENCODING}: return BadHttpMessage(err_msg) + elif errno == cparser.HPE_INVALID_METHOD: + return BadHttpMethod(error=err_msg) elif errno in {cparser.HPE_INVALID_STATUS, - cparser.HPE_INVALID_METHOD, cparser.HPE_INVALID_VERSION}: return BadStatusLine(error=err_msg) elif errno == cparser.HPE_INVALID_URL: diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index c43ee0d9659..b8dda999acf 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -101,5 +101,12 @@ def __init__(self, line: str = "", error: Optional[str] = None) -> None: self.line = line +class BadHttpMethod(BadStatusLine): + """Invalid HTTP method in status line.""" + + def __init__(self, line: str = "", error: Optional[str] = None) -> None: + super().__init__(line, error or f"Bad HTTP method in status line {line!r}") + + class InvalidURLError(BadHttpMessage): pass diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 148a30b2ca1..1b8b5b4d49e 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -38,6 +38,7 @@ ) from .http_exceptions import ( BadHttpMessage, + BadHttpMethod, BadStatusLine, ContentEncodingError, ContentLengthError, @@ -576,7 +577,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: try: method, path, version = line.split(" ", maxsplit=2) except ValueError: - raise BadStatusLine(line) from None + raise BadHttpMethod(line) from None if len(path) > self.max_line_size: raise LineTooLong( @@ -585,7 +586,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: # method if not TOKENRE.fullmatch(method): - raise BadStatusLine(method) + raise BadHttpMethod(method) # version match = VERSRE.fullmatch(version) diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 2201eef30ad..fe2ae8a1269 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -35,6 +35,7 @@ RawRequestMessage, StreamWriter, ) +from .http_exceptions import BadHttpMethod from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD, StreamReader from .tcp_helpers import tcp_keepalive @@ -687,7 +688,18 @@ def handle_error( Returns HTTP response with specific status code. Logs additional information. It always closes current connection. """ - self.log_exception("Error handling request", exc_info=exc) + if ( + self._manager + and self._manager.requests_count == 1 + and isinstance(exc, BadHttpMethod) + ): + # BadHttpMethod is common when a client sends non-HTTP + # or encrypted traffic to an HTTP port. This is expected + # to happen when connected to the public internet so we log + # it at the debug level as to not fill logs with noise. + self.logger.debug("Error handling request", exc_info=exc) + else: + self.log_exception("Error handling request", exc_info=exc) # some data already got sent, connection is broken if request.writer.output_size > 0: diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index a8305da84f7..58fef625f82 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -965,7 +965,7 @@ def test_http_request_parser_two_slashes(parser) -> None: def test_http_request_parser_bad_method( parser, rfc9110_5_6_2_token_delim: bytes ) -> None: - with pytest.raises(http_exceptions.BadStatusLine): + with pytest.raises(http_exceptions.BadHttpMethod): parser.feed_data(rfc9110_5_6_2_token_delim + b'ET" /get HTTP/1.1\r\n\r\n') diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 9e2d078c1a0..e77df623020 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -1,11 +1,14 @@ import asyncio import socket from contextlib import suppress +from typing import NoReturn from unittest import mock import pytest from aiohttp import client, web +from aiohttp.http_exceptions import BadHttpMethod, BadStatusLine +from aiohttp.pytest_plugin import AiohttpClient, AiohttpRawServer async def test_simple_server(aiohttp_raw_server, aiohttp_client) -> None: @@ -56,7 +59,125 @@ async def handler(request): logger.exception.assert_called_with("Error handling request", exc_info=exc) -async def test_raw_server_handler_timeout(aiohttp_raw_server, aiohttp_client) -> None: +async def test_raw_server_logs_invalid_method_with_loop_debug( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error") + + async def handler(request: web.BaseRequest) -> NoReturn: + raise exc + + loop = asyncio.get_event_loop() + loop.set_debug(True) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + + txt = await resp.text() + assert "Traceback (most recent call last):\n" in txt + + # BadHttpMethod should be logged as debug + # on the first request since the client may + # be probing for TLS/SSL support which is + # expected to fail + logger.debug.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_logs_invalid_method_without_loop_debug( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error") + + async def handler(request: web.BaseRequest) -> NoReturn: + raise exc + + loop = asyncio.get_event_loop() + loop.set_debug(False) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger, debug=False) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + + txt = await resp.text() + assert "Traceback (most recent call last):\n" not in txt + + # BadHttpMethod should be logged as debug + # on the first request since the client may + # be probing for TLS/SSL support which is + # expected to fail + logger.debug.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_logs_invalid_method_second_request( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error") + request_count = 0 + + async def handler(request: web.BaseRequest) -> web.Response: + nonlocal request_count + request_count += 1 + if request_count == 2: + raise exc + return web.Response() + + loop = asyncio.get_event_loop() + loop.set_debug(False) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 200 + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + # BadHttpMethod should be logged as an exception + # if its not the first request since we know + # that the client already was speaking HTTP + logger.exception.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_logs_bad_status_line_as_exception( + aiohttp_raw_server: AiohttpRawServer, + aiohttp_client: AiohttpClient, + loop: asyncio.AbstractEventLoop, +) -> None: + exc = BadStatusLine(b"\x16\x03\x03\x01F\x01".decode(), "error") + + async def handler(request: web.BaseRequest) -> NoReturn: + raise exc + + loop = asyncio.get_event_loop() + loop.set_debug(False) + logger = mock.Mock() + server = await aiohttp_raw_server(handler, logger=logger, debug=False) + cli = await aiohttp_client(server) + resp = await cli.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + + txt = await resp.text() + assert "Traceback (most recent call last):\n" not in txt + + logger.exception.assert_called_with("Error handling request", exc_info=exc) + + +async def test_raw_server_handler_timeout( + aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient +) -> None: + loop = asyncio.get_event_loop() + loop.set_debug(True) exc = asyncio.TimeoutError("error") async def handler(request): From 5ddf7208524f9ba5957d9a84d3e205f4f2692e28 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 27 Nov 2024 09:34:23 -0800 Subject: [PATCH 1036/1511] Release 3.11.8 (#10066) --- CHANGES.rst | 60 ++++++++++++++++++++++++++++++++++++++++++ CHANGES/10029.misc.rst | 1 - CHANGES/10030.misc.rst | 1 - CHANGES/10037.misc.rst | 1 - CHANGES/10043.misc.rst | 1 - CHANGES/10049.misc.rst | 1 - CHANGES/10055.misc.rst | 3 --- aiohttp/__init__.py | 2 +- 8 files changed, 61 insertions(+), 9 deletions(-) delete mode 100644 CHANGES/10029.misc.rst delete mode 100644 CHANGES/10030.misc.rst delete mode 100644 CHANGES/10037.misc.rst delete mode 100644 CHANGES/10043.misc.rst delete mode 100644 CHANGES/10049.misc.rst delete mode 100644 CHANGES/10055.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index e204f07b370..8a003a78c45 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,66 @@ .. towncrier release notes start +3.11.8 (2024-11-27) +=================== + +Miscellaneous internal changes +------------------------------ + +- Improved performance of creating :class:`aiohttp.ClientResponse` objects when there are no cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10029`. + + + +- Improved performance of creating :class:`aiohttp.ClientResponse` objects -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10030`. + + + +- Improved performances of creating objects during the HTTP request lifecycle -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10037`. + + + +- Improved performance of constructing :class:`aiohttp.web.Response` with headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10043`. + + + +- Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10049`. + + + +- Downgraded logging of invalid HTTP method exceptions on the first request to debug level -- by :user:`bdraco`. + + HTTP requests starting with an invalid method are relatively common, especially when connected to the public internet, because browsers or other clients may try to speak SSL to a plain-text server or vice-versa. These exceptions can quickly fill the log with noise when nothing is wrong. + + + *Related issues and pull requests on GitHub:* + :issue:`10055`. + + + + +---- + + 3.11.7 (2024-11-21) =================== diff --git a/CHANGES/10029.misc.rst b/CHANGES/10029.misc.rst deleted file mode 100644 index d98729ecac8..00000000000 --- a/CHANGES/10029.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of creating :class:`aiohttp.ClientResponse` objects when there are no cookies -- by :user:`bdraco`. diff --git a/CHANGES/10030.misc.rst b/CHANGES/10030.misc.rst deleted file mode 100644 index 68ed7d058d6..00000000000 --- a/CHANGES/10030.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of creating :class:`aiohttp.ClientResponse` objects -- by :user:`bdraco`. diff --git a/CHANGES/10037.misc.rst b/CHANGES/10037.misc.rst deleted file mode 100644 index 655c804c995..00000000000 --- a/CHANGES/10037.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performances of creating objects during the HTTP request lifecycle -- by :user:`bdraco`. diff --git a/CHANGES/10043.misc.rst b/CHANGES/10043.misc.rst deleted file mode 100644 index cfd4e88ee24..00000000000 --- a/CHANGES/10043.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of constructing :class:`aiohttp.web.Response` with headers -- by :user:`bdraco`. diff --git a/CHANGES/10049.misc.rst b/CHANGES/10049.misc.rst deleted file mode 100644 index 58f61d48420..00000000000 --- a/CHANGES/10049.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/CHANGES/10055.misc.rst b/CHANGES/10055.misc.rst deleted file mode 100644 index 3a5fa074f77..00000000000 --- a/CHANGES/10055.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Downgraded logging of invalid HTTP method exceptions on the first request to debug level -- by :user:`bdraco`. - -HTTP requests starting with an invalid method are relatively common, especially when connected to the public internet, because browsers or other clients may try to speak SSL to a plain-text server or vice-versa. These exceptions can quickly fill the log with noise when nothing is wrong. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 838c31a5fcd..32273ac23b0 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.8.dev0" +__version__ = "3.11.8" from typing import TYPE_CHECKING, Tuple From a37c4f28b7e3a4d19de283b36133abf9f4cb73cc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 27 Nov 2024 10:57:28 -0800 Subject: [PATCH 1037/1511] Increment version to 3.11.9.dev0 (#10069) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 32273ac23b0..7896648923e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.8" +__version__ = "3.11.9.dev0" from typing import TYPE_CHECKING, Tuple From 8b08c9e4c733961a7fbb14446fd3c9a5a9a55229 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 1 Dec 2024 15:12:54 -0600 Subject: [PATCH 1038/1511] [PR #10078/0d6a83e backport][3.11] Bump yarl to 1.18.3 (#10079) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index ec2e6399590..1e7c0bbe6c1 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 20b1705e6a7..d32acc7b773 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -306,7 +306,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 2c7da214ab2..168ce639d19 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -277,7 +277,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 553347e35dd..cf7f0e396f6 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 95551d58600..33510f18682 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -155,5 +155,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in From b849326eeeba6855e80e1be19c5c0a8bcb21bb10 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 1 Dec 2024 15:26:26 -0600 Subject: [PATCH 1039/1511] [PR #10078/0d6a83e backport][3.12] Bump yarl to 1.18.3 (#10080) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index ec2e6399590..1e7c0bbe6c1 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.12.2 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 20b1705e6a7..d32acc7b773 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -306,7 +306,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 2c7da214ab2..168ce639d19 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -277,7 +277,7 @@ wait-for-it==2.2.2 # via -r requirements/test.in wheel==0.44.0 # via pip-tools -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in zipp==3.20.2 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 553347e35dd..cf7f0e396f6 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.12.2 # via multidict -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 95551d58600..33510f18682 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -155,5 +155,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.17.1 +yarl==1.18.3 # via -r requirements/runtime-deps.in From da9210bde9540a49a75c1f3ba3bc332e3ee47823 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 21:32:25 +0000 Subject: [PATCH 1040/1511] [PR #10076/fad44f6b backport][3.11] Fix request count check for BadHttpMethod (#10081) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10055.bugfix.rst | 1 + CHANGES/10076.bugfix.rst | 1 + aiohttp/web_protocol.py | 7 ++----- aiohttp/web_server.py | 2 ++ tests/test_web_server.py | 14 ++++++++++++++ 5 files changed, 20 insertions(+), 5 deletions(-) create mode 120000 CHANGES/10055.bugfix.rst create mode 100644 CHANGES/10076.bugfix.rst diff --git a/CHANGES/10055.bugfix.rst b/CHANGES/10055.bugfix.rst new file mode 120000 index 00000000000..b1f45d8b887 --- /dev/null +++ b/CHANGES/10055.bugfix.rst @@ -0,0 +1 @@ +10076.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10076.bugfix.rst b/CHANGES/10076.bugfix.rst new file mode 100644 index 00000000000..c577366bbe8 --- /dev/null +++ b/CHANGES/10076.bugfix.rst @@ -0,0 +1 @@ +Fixed invalid method logging unexpected being logged at exception level on subsequent connections -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index fe2ae8a1269..e8bb41abf97 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -192,6 +192,7 @@ def __init__( ): super().__init__(loop) + # _request_count is the number of requests processed with the same connection. self._request_count = 0 self._keepalive = False self._current_request: Optional[BaseRequest] = None @@ -688,11 +689,7 @@ def handle_error( Returns HTTP response with specific status code. Logs additional information. It always closes current connection. """ - if ( - self._manager - and self._manager.requests_count == 1 - and isinstance(exc, BadHttpMethod) - ): + if self._request_count == 1 and isinstance(exc, BadHttpMethod): # BadHttpMethod is common when a client sends non-HTTP # or encrypted traffic to an HTTP port. This is expected # to happen when connected to the public internet so we log diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index b6ac25ac1a5..328aca1e405 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -25,6 +25,8 @@ def __init__( self._loop = loop or asyncio.get_running_loop() self._connections: Dict[RequestHandler, asyncio.Transport] = {} self._kwargs = kwargs + # requests_count is the number of requests being processed by the server + # for the lifetime of the server. self.requests_count = 0 self.request_handler = handler self.request_factory = request_factory or self._make_request diff --git a/tests/test_web_server.py b/tests/test_web_server.py index e77df623020..7b9b87a374a 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -86,6 +86,20 @@ async def handler(request: web.BaseRequest) -> NoReturn: # be probing for TLS/SSL support which is # expected to fail logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.reset_mock() + + # Now make another connection to the server + # to make sure that the exception is logged + # at debug on a second fresh connection + cli2 = await aiohttp_client(server) + resp = await cli2.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + # BadHttpMethod should be logged as debug + # on the first request since the client may + # be probing for TLS/SSL support which is + # expected to fail + logger.debug.assert_called_with("Error handling request", exc_info=exc) async def test_raw_server_logs_invalid_method_without_loop_debug( From 2b2301fcdc70010a3c39ff060901f2d1e65bbdfe Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 21:51:00 +0000 Subject: [PATCH 1041/1511] [PR #10076/fad44f6b backport][3.12] Fix request count check for BadHttpMethod (#10082) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10055.bugfix.rst | 1 + CHANGES/10076.bugfix.rst | 1 + aiohttp/web_protocol.py | 7 ++----- aiohttp/web_server.py | 2 ++ tests/test_web_server.py | 14 ++++++++++++++ 5 files changed, 20 insertions(+), 5 deletions(-) create mode 120000 CHANGES/10055.bugfix.rst create mode 100644 CHANGES/10076.bugfix.rst diff --git a/CHANGES/10055.bugfix.rst b/CHANGES/10055.bugfix.rst new file mode 120000 index 00000000000..b1f45d8b887 --- /dev/null +++ b/CHANGES/10055.bugfix.rst @@ -0,0 +1 @@ +10076.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10076.bugfix.rst b/CHANGES/10076.bugfix.rst new file mode 100644 index 00000000000..c577366bbe8 --- /dev/null +++ b/CHANGES/10076.bugfix.rst @@ -0,0 +1 @@ +Fixed invalid method logging unexpected being logged at exception level on subsequent connections -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index fe2ae8a1269..e8bb41abf97 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -192,6 +192,7 @@ def __init__( ): super().__init__(loop) + # _request_count is the number of requests processed with the same connection. self._request_count = 0 self._keepalive = False self._current_request: Optional[BaseRequest] = None @@ -688,11 +689,7 @@ def handle_error( Returns HTTP response with specific status code. Logs additional information. It always closes current connection. """ - if ( - self._manager - and self._manager.requests_count == 1 - and isinstance(exc, BadHttpMethod) - ): + if self._request_count == 1 and isinstance(exc, BadHttpMethod): # BadHttpMethod is common when a client sends non-HTTP # or encrypted traffic to an HTTP port. This is expected # to happen when connected to the public internet so we log diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index b6ac25ac1a5..328aca1e405 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -25,6 +25,8 @@ def __init__( self._loop = loop or asyncio.get_running_loop() self._connections: Dict[RequestHandler, asyncio.Transport] = {} self._kwargs = kwargs + # requests_count is the number of requests being processed by the server + # for the lifetime of the server. self.requests_count = 0 self.request_handler = handler self.request_factory = request_factory or self._make_request diff --git a/tests/test_web_server.py b/tests/test_web_server.py index e77df623020..7b9b87a374a 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -86,6 +86,20 @@ async def handler(request: web.BaseRequest) -> NoReturn: # be probing for TLS/SSL support which is # expected to fail logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.reset_mock() + + # Now make another connection to the server + # to make sure that the exception is logged + # at debug on a second fresh connection + cli2 = await aiohttp_client(server) + resp = await cli2.get("/path/to") + assert resp.status == 500 + assert resp.headers["Content-Type"].startswith("text/plain") + # BadHttpMethod should be logged as debug + # on the first request since the client may + # be probing for TLS/SSL support which is + # expected to fail + logger.debug.assert_called_with("Error handling request", exc_info=exc) async def test_raw_server_logs_invalid_method_without_loop_debug( From 9df00a132a6ce64e97c455dc26076345e79f68c7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 21:55:51 +0000 Subject: [PATCH 1042/1511] [PR #10073/349b7565 backport][3.11] Improve performance of parsing headers (#10083) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10073.misc.rst | 1 + aiohttp/_http_parser.pyx | 56 ++++++++++++++++++---------------------- 2 files changed, 26 insertions(+), 31 deletions(-) create mode 100644 CHANGES/10073.misc.rst diff --git a/CHANGES/10073.misc.rst b/CHANGES/10073.misc.rst new file mode 100644 index 00000000000..2a7e3514ea3 --- /dev/null +++ b/CHANGES/10073.misc.rst @@ -0,0 +1 @@ +Improved performance of parsing headers when using the C parser -- by :user:`bdraco`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 988e4247f93..19dc3e63b74 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -71,7 +71,7 @@ cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD cdef object StreamReader = _StreamReader cdef object DeflateBuffer = _DeflateBuffer - +cdef bytes EMPTY_BYTES = b"" cdef inline object extend(object buf, const char* at, size_t length): cdef Py_ssize_t s @@ -277,8 +277,9 @@ cdef class HttpParser: cparser.llhttp_t* _cparser cparser.llhttp_settings_t* _csettings - bytearray _raw_name - bytearray _raw_value + bytes _raw_name + object _name + bytes _raw_value bint _has_value object _protocol @@ -296,7 +297,7 @@ cdef class HttpParser: bytearray _buf str _path str _reason - object _headers + list _headers list _raw_headers bint _upgraded list _messages @@ -350,8 +351,8 @@ cdef class HttpParser: self._payload_exception = payload_exception self._messages = [] - self._raw_name = bytearray() - self._raw_value = bytearray() + self._raw_name = EMPTY_BYTES + self._raw_value = EMPTY_BYTES self._has_value = False self._max_line_size = max_line_size @@ -378,42 +379,35 @@ cdef class HttpParser: self._limit = limit cdef _process_header(self): - if self._raw_name: - raw_name = bytes(self._raw_name) - raw_value = bytes(self._raw_value) - - name = find_header(raw_name) - value = raw_value.decode('utf-8', 'surrogateescape') + cdef str value + if self._raw_name is not EMPTY_BYTES: + name = find_header(self._raw_name) + value = self._raw_value.decode('utf-8', 'surrogateescape') - self._headers.add(name, value) + self._headers.append((name, value)) if name is CONTENT_ENCODING: self._content_encoding = value - PyByteArray_Resize(self._raw_name, 0) - PyByteArray_Resize(self._raw_value, 0) self._has_value = False - self._raw_headers.append((raw_name, raw_value)) + self._raw_headers.append((self._raw_name, self._raw_value)) + self._raw_name = EMPTY_BYTES + self._raw_value = EMPTY_BYTES cdef _on_header_field(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf if self._has_value: self._process_header() - size = PyByteArray_Size(self._raw_name) - PyByteArray_Resize(self._raw_name, size + length) - buf = PyByteArray_AsString(self._raw_name) - memcpy(buf + size, at, length) + if self._raw_name is EMPTY_BYTES: + self._raw_name = at[:length] + else: + self._raw_name += at[:length] cdef _on_header_value(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf - - size = PyByteArray_Size(self._raw_value) - PyByteArray_Resize(self._raw_value, size + length) - buf = PyByteArray_AsString(self._raw_value) - memcpy(buf + size, at, length) + if self._raw_value is EMPTY_BYTES: + self._raw_value = at[:length] + else: + self._raw_value += at[:length] self._has_value = True cdef _on_headers_complete(self): @@ -424,7 +418,7 @@ cdef class HttpParser: chunked = self._cparser.flags & cparser.F_CHUNKED raw_headers = tuple(self._raw_headers) - headers = CIMultiDictProxy(self._headers) + headers = CIMultiDictProxy(CIMultiDict(self._headers)) if self._cparser.type == cparser.HTTP_REQUEST: allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES @@ -672,7 +666,7 @@ cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: cdef HttpParser pyparser = <HttpParser>parser.data pyparser._started = True - pyparser._headers = CIMultiDict() + pyparser._headers = [] pyparser._raw_headers = [] PyByteArray_Resize(pyparser._buf, 0) pyparser._path = None From bf0a9bf40d7e11031aa5b15802bb300de1793823 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 22:00:45 +0000 Subject: [PATCH 1043/1511] [PR #10073/349b7565 backport][3.12] Improve performance of parsing headers (#10084) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10073.misc.rst | 1 + aiohttp/_http_parser.pyx | 56 ++++++++++++++++++---------------------- 2 files changed, 26 insertions(+), 31 deletions(-) create mode 100644 CHANGES/10073.misc.rst diff --git a/CHANGES/10073.misc.rst b/CHANGES/10073.misc.rst new file mode 100644 index 00000000000..2a7e3514ea3 --- /dev/null +++ b/CHANGES/10073.misc.rst @@ -0,0 +1 @@ +Improved performance of parsing headers when using the C parser -- by :user:`bdraco`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 988e4247f93..19dc3e63b74 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -71,7 +71,7 @@ cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD cdef object StreamReader = _StreamReader cdef object DeflateBuffer = _DeflateBuffer - +cdef bytes EMPTY_BYTES = b"" cdef inline object extend(object buf, const char* at, size_t length): cdef Py_ssize_t s @@ -277,8 +277,9 @@ cdef class HttpParser: cparser.llhttp_t* _cparser cparser.llhttp_settings_t* _csettings - bytearray _raw_name - bytearray _raw_value + bytes _raw_name + object _name + bytes _raw_value bint _has_value object _protocol @@ -296,7 +297,7 @@ cdef class HttpParser: bytearray _buf str _path str _reason - object _headers + list _headers list _raw_headers bint _upgraded list _messages @@ -350,8 +351,8 @@ cdef class HttpParser: self._payload_exception = payload_exception self._messages = [] - self._raw_name = bytearray() - self._raw_value = bytearray() + self._raw_name = EMPTY_BYTES + self._raw_value = EMPTY_BYTES self._has_value = False self._max_line_size = max_line_size @@ -378,42 +379,35 @@ cdef class HttpParser: self._limit = limit cdef _process_header(self): - if self._raw_name: - raw_name = bytes(self._raw_name) - raw_value = bytes(self._raw_value) - - name = find_header(raw_name) - value = raw_value.decode('utf-8', 'surrogateescape') + cdef str value + if self._raw_name is not EMPTY_BYTES: + name = find_header(self._raw_name) + value = self._raw_value.decode('utf-8', 'surrogateescape') - self._headers.add(name, value) + self._headers.append((name, value)) if name is CONTENT_ENCODING: self._content_encoding = value - PyByteArray_Resize(self._raw_name, 0) - PyByteArray_Resize(self._raw_value, 0) self._has_value = False - self._raw_headers.append((raw_name, raw_value)) + self._raw_headers.append((self._raw_name, self._raw_value)) + self._raw_name = EMPTY_BYTES + self._raw_value = EMPTY_BYTES cdef _on_header_field(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf if self._has_value: self._process_header() - size = PyByteArray_Size(self._raw_name) - PyByteArray_Resize(self._raw_name, size + length) - buf = PyByteArray_AsString(self._raw_name) - memcpy(buf + size, at, length) + if self._raw_name is EMPTY_BYTES: + self._raw_name = at[:length] + else: + self._raw_name += at[:length] cdef _on_header_value(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf - - size = PyByteArray_Size(self._raw_value) - PyByteArray_Resize(self._raw_value, size + length) - buf = PyByteArray_AsString(self._raw_value) - memcpy(buf + size, at, length) + if self._raw_value is EMPTY_BYTES: + self._raw_value = at[:length] + else: + self._raw_value += at[:length] self._has_value = True cdef _on_headers_complete(self): @@ -424,7 +418,7 @@ cdef class HttpParser: chunked = self._cparser.flags & cparser.F_CHUNKED raw_headers = tuple(self._raw_headers) - headers = CIMultiDictProxy(self._headers) + headers = CIMultiDictProxy(CIMultiDict(self._headers)) if self._cparser.type == cparser.HTTP_REQUEST: allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES @@ -672,7 +666,7 @@ cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: cdef HttpParser pyparser = <HttpParser>parser.data pyparser._started = True - pyparser._headers = CIMultiDict() + pyparser._headers = [] pyparser._raw_headers = [] PyByteArray_Resize(pyparser._buf, 0) pyparser._path = None From 79561a69d4030d245ec5803069d7803c1ef663c6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 1 Dec 2024 16:18:38 -0600 Subject: [PATCH 1044/1511] Release 3.11.9 (#10085) --- CHANGES.rst | 30 ++++++++++++++++++++++++++++++ CHANGES/10055.bugfix.rst | 1 - CHANGES/10073.misc.rst | 1 - CHANGES/10076.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 5 files changed, 31 insertions(+), 4 deletions(-) delete mode 120000 CHANGES/10055.bugfix.rst delete mode 100644 CHANGES/10073.misc.rst delete mode 100644 CHANGES/10076.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 8a003a78c45..8352236c320 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,36 @@ .. towncrier release notes start +3.11.9 (2024-12-01) +=================== + +Bug fixes +--------- + +- Fixed invalid method logging unexpected being logged at exception level on subsequent connections -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10055`, :issue:`10076`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of parsing headers when using the C parser -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10073`. + + + + +---- + + 3.11.8 (2024-11-27) =================== diff --git a/CHANGES/10055.bugfix.rst b/CHANGES/10055.bugfix.rst deleted file mode 120000 index b1f45d8b887..00000000000 --- a/CHANGES/10055.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10076.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10073.misc.rst b/CHANGES/10073.misc.rst deleted file mode 100644 index 2a7e3514ea3..00000000000 --- a/CHANGES/10073.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of parsing headers when using the C parser -- by :user:`bdraco`. diff --git a/CHANGES/10076.bugfix.rst b/CHANGES/10076.bugfix.rst deleted file mode 100644 index c577366bbe8..00000000000 --- a/CHANGES/10076.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed invalid method logging unexpected being logged at exception level on subsequent connections -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 7896648923e..5615e5349ae 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.9.dev0" +__version__ = "3.11.9" from typing import TYPE_CHECKING, Tuple From ae2b050dffda8e82bcab78aa0fb455494166b718 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 1 Dec 2024 17:44:22 -0600 Subject: [PATCH 1045/1511] Increment version to 3.11.10.dev0 (#10087) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 5615e5349ae..0024853acaf 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.9" +__version__ = "3.11.10.dev0" from typing import TYPE_CHECKING, Tuple From 86bb6ad30c9411a3dc4320b356959a3dc0189eca Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 01:22:11 +0000 Subject: [PATCH 1046/1511] [PR #10088/29c3ca93 backport][3.11] Avoid calling len on the same data in the stream reader twice (#10090) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/streams.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index b97846171b1..029d577b88c 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -517,8 +517,9 @@ def _read_nowait_chunk(self, n: int) -> bytes: else: data = self._buffer.popleft() - self._size -= len(data) - self._cursor += len(data) + data_len = len(data) + self._size -= data_len + self._cursor += data_len chunk_splits = self._http_chunk_splits # Prevent memory leak: drop useless chunk splits From 6865d6b93a6a7a1da3202b22cdc65a6a2096dc8e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 01:26:35 +0000 Subject: [PATCH 1047/1511] [PR #10088/29c3ca93 backport][3.12] Avoid calling len on the same data in the stream reader twice (#10091) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/streams.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index b97846171b1..029d577b88c 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -517,8 +517,9 @@ def _read_nowait_chunk(self, n: int) -> bytes: else: data = self._buffer.popleft() - self._size -= len(data) - self._cursor += len(data) + data_len = len(data) + self._size -= data_len + self._cursor += data_len chunk_splits = self._http_chunk_splits # Prevent memory leak: drop useless chunk splits From d872e3429f6f0ddd9aba5e847f58a57b04ca8dd5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:48:32 +0000 Subject: [PATCH 1048/1511] [PR #10074/f733258e backport][3.12] Support absolute url to override base url (#10094) **This is a backport of PR #10074 as merged into master (f733258ed9e5e71b7b97511f5654efd6799cac46).** Co-authored-by: vivodi <103735539+vivodi@users.noreply.github.com> --- CHANGES/10074.feature.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/client.py | 6 ++---- docs/client_reference.rst | 4 ++++ tests/test_client_session.py | 18 ++++++++++++++++++ 5 files changed, 27 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10074.feature.rst diff --git a/CHANGES/10074.feature.rst b/CHANGES/10074.feature.rst new file mode 100644 index 00000000000..d956c38af57 --- /dev/null +++ b/CHANGES/10074.feature.rst @@ -0,0 +1,2 @@ +Added support for overriding the base URL with an absolute one in client sessions +-- by :user:`vivodi`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 6adb3b97fb1..c3abc66bebf 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -52,6 +52,7 @@ Arseny Timoniq Artem Yushkovskiy Arthur Darcet Austin Scola +Bai Haoran Ben Bader Ben Greiner Ben Kallus diff --git a/aiohttp/client.py b/aiohttp/client.py index e04a6ff989a..7539310aa8a 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -461,11 +461,9 @@ def request( def _build_url(self, str_or_url: StrOrURL) -> URL: url = URL(str_or_url) - if self._base_url is None: - return url - else: - assert not url.absolute + if self._base_url and not url.absolute: return self._base_url.join(url) + return url async def _request( self, diff --git a/docs/client_reference.rst b/docs/client_reference.rst index c9031de5383..7e7cdf12184 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -79,6 +79,10 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.8 + .. versionchanged:: 3.12 + + Added support for overriding the base URL with an absolute one in client sessions. + :param aiohttp.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 65f80b6abe9..a2c4833b83e 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -1086,6 +1086,24 @@ async def test_requote_redirect_setter() -> None: URL("http://example.com/test1/test2?q=foo#bar"), id="base_url=URL('http://example.com/test1/') url='test2?q=foo#bar'", ), + pytest.param( + URL("http://example.com/test1/"), + "http://foo.com/bar", + URL("http://foo.com/bar"), + id="base_url=URL('http://example.com/test1/') url='http://foo.com/bar'", + ), + pytest.param( + URL("http://example.com"), + "http://foo.com/bar", + URL("http://foo.com/bar"), + id="base_url=URL('http://example.com') url='http://foo.com/bar'", + ), + pytest.param( + URL("http://example.com/test1/"), + "http://foo.com", + URL("http://foo.com"), + id="base_url=URL('http://example.com/test1/') url='http://foo.com'", + ), ], ) async def test_build_url_returns_expected_url( From 094ffb6e06feaea3d26b3d9084a40aafcb1a92ef Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 20:14:37 -0600 Subject: [PATCH 1049/1511] [PR #10095/fcce1bf6 backport][3.12] Add a benchmark for web.FileResponse (#10098) **This is a backport of PR #10095 as merged into master (fcce1bf6a5c339a3d63ab1678dcb6658ffc7d570).** We didn't have any benchmarks for file responses. From the benchmarks it turns out most of the time is creating and processing the executor jobs. If we combine the stat into a job that returns the open fileobj it will likely be faster and solve https://github.com/aio-libs/aiohttp/issues/8013 Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_fileresponse.py | 64 +++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 tests/test_benchmarks_web_fileresponse.py diff --git a/tests/test_benchmarks_web_fileresponse.py b/tests/test_benchmarks_web_fileresponse.py new file mode 100644 index 00000000000..7cdbda2efbb --- /dev/null +++ b/tests/test_benchmarks_web_fileresponse.py @@ -0,0 +1,64 @@ +"""codspeed benchmarks for the web file responses.""" + +import asyncio +import pathlib + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient + + +def test_simple_web_file_response( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark creating 100 simple web.FileResponse.""" + response_count = 100 + filepath = pathlib.Path(__file__).parent / "sample.txt" + + async def handler(request: web.Request) -> web.FileResponse: + return web.FileResponse(path=filepath) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_file_resonse_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(response_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_file_resonse_benchmark()) + + +def test_simple_web_file_sendfile_fallback_response( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark creating 100 simple web.FileResponse without sendfile.""" + response_count = 100 + filepath = pathlib.Path(__file__).parent / "sample.txt" + + async def handler(request: web.Request) -> web.FileResponse: + transport = request.transport + assert transport is not None + transport._sendfile_compatible = False # type: ignore[attr-defined] + return web.FileResponse(path=filepath) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_file_resonse_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(response_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_file_resonse_benchmark()) From c41ffc7fea9d93378f933035e33240d278197b52 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 20:14:47 -0600 Subject: [PATCH 1050/1511] [PR #10095/fcce1bf6 backport][3.11] Add a benchmark for web.FileResponse (#10097) **This is a backport of PR #10095 as merged into master (fcce1bf6a5c339a3d63ab1678dcb6658ffc7d570).** We didn't have any benchmarks for file responses. From the benchmarks it turns out most of the time is creating and processing the executor jobs. If we combine the stat into a job that returns the open fileobj it will likely be faster and solve https://github.com/aio-libs/aiohttp/issues/8013 Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_fileresponse.py | 64 +++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 tests/test_benchmarks_web_fileresponse.py diff --git a/tests/test_benchmarks_web_fileresponse.py b/tests/test_benchmarks_web_fileresponse.py new file mode 100644 index 00000000000..7cdbda2efbb --- /dev/null +++ b/tests/test_benchmarks_web_fileresponse.py @@ -0,0 +1,64 @@ +"""codspeed benchmarks for the web file responses.""" + +import asyncio +import pathlib + +from pytest_codspeed import BenchmarkFixture + +from aiohttp import web +from aiohttp.pytest_plugin import AiohttpClient + + +def test_simple_web_file_response( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark creating 100 simple web.FileResponse.""" + response_count = 100 + filepath = pathlib.Path(__file__).parent / "sample.txt" + + async def handler(request: web.Request) -> web.FileResponse: + return web.FileResponse(path=filepath) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_file_resonse_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(response_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_file_resonse_benchmark()) + + +def test_simple_web_file_sendfile_fallback_response( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark creating 100 simple web.FileResponse without sendfile.""" + response_count = 100 + filepath = pathlib.Path(__file__).parent / "sample.txt" + + async def handler(request: web.Request) -> web.FileResponse: + transport = request.transport + assert transport is not None + transport._sendfile_compatible = False # type: ignore[attr-defined] + return web.FileResponse(path=filepath) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_file_resonse_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(response_count): + await client.get("/") + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_file_resonse_benchmark()) From 23a4b31ce98522b526a435bc339429f38f5a6295 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 19:37:47 +0000 Subject: [PATCH 1051/1511] [PR #10102/7557b03d backport][3.11] Fix deprecated calls to `guess_type` for paths with Python 3.13 (#10103) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10102.bugfix.rst | 1 + aiohttp/payload.py | 7 ++++++- aiohttp/web_fileresponse.py | 9 ++++++--- 3 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10102.bugfix.rst diff --git a/CHANGES/10102.bugfix.rst b/CHANGES/10102.bugfix.rst new file mode 100644 index 00000000000..86dda8684dd --- /dev/null +++ b/CHANGES/10102.bugfix.rst @@ -0,0 +1 @@ +Replaced deprecated call to :func:`mimetypes.guess_type` with :func:`mimetypes.guess_file_type` when using Python 3.13+ -- by :user:`bdraco`. diff --git a/aiohttp/payload.py b/aiohttp/payload.py index c8c01814698..3f6d3672db2 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -4,6 +4,7 @@ import json import mimetypes import os +import sys import warnings from abc import ABC, abstractmethod from itertools import chain @@ -169,7 +170,11 @@ def __init__( if content_type is not sentinel and content_type is not None: self._headers[hdrs.CONTENT_TYPE] = content_type elif self._filename is not None: - content_type = mimetypes.guess_type(self._filename)[0] + if sys.version_info >= (3, 13): + guesser = mimetypes.guess_file_type + else: + guesser = mimetypes.guess_type + content_type = guesser(self._filename)[0] if content_type is None: content_type = self._default_content_type self._headers[hdrs.CONTENT_TYPE] = content_type diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 3b2bc2caf12..ff191415ed5 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,6 +1,7 @@ import asyncio import os import pathlib +import sys from contextlib import suppress from mimetypes import MimeTypes from stat import S_ISREG @@ -317,9 +318,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # extension of the request path. The encoding returned by guess_type # can be ignored since the map was cleared above. if hdrs.CONTENT_TYPE not in self.headers: - self.content_type = ( - CONTENT_TYPES.guess_type(self._path)[0] or FALLBACK_CONTENT_TYPE - ) + if sys.version_info >= (3, 13): + guesser = CONTENT_TYPES.guess_file_type + else: + guesser = CONTENT_TYPES.guess_type + self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE if file_encoding: self.headers[hdrs.CONTENT_ENCODING] = file_encoding From f180fc1987721523c41425d102277c778a6ad762 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 19:46:24 +0000 Subject: [PATCH 1052/1511] [PR #10102/7557b03d backport][3.12] Fix deprecated calls to `guess_type` for paths with Python 3.13 (#10104) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10102.bugfix.rst | 1 + aiohttp/payload.py | 7 ++++++- aiohttp/web_fileresponse.py | 9 ++++++--- 3 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10102.bugfix.rst diff --git a/CHANGES/10102.bugfix.rst b/CHANGES/10102.bugfix.rst new file mode 100644 index 00000000000..86dda8684dd --- /dev/null +++ b/CHANGES/10102.bugfix.rst @@ -0,0 +1 @@ +Replaced deprecated call to :func:`mimetypes.guess_type` with :func:`mimetypes.guess_file_type` when using Python 3.13+ -- by :user:`bdraco`. diff --git a/aiohttp/payload.py b/aiohttp/payload.py index c8c01814698..3f6d3672db2 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -4,6 +4,7 @@ import json import mimetypes import os +import sys import warnings from abc import ABC, abstractmethod from itertools import chain @@ -169,7 +170,11 @@ def __init__( if content_type is not sentinel and content_type is not None: self._headers[hdrs.CONTENT_TYPE] = content_type elif self._filename is not None: - content_type = mimetypes.guess_type(self._filename)[0] + if sys.version_info >= (3, 13): + guesser = mimetypes.guess_file_type + else: + guesser = mimetypes.guess_type + content_type = guesser(self._filename)[0] if content_type is None: content_type = self._default_content_type self._headers[hdrs.CONTENT_TYPE] = content_type diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 3b2bc2caf12..ff191415ed5 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,6 +1,7 @@ import asyncio import os import pathlib +import sys from contextlib import suppress from mimetypes import MimeTypes from stat import S_ISREG @@ -317,9 +318,11 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # extension of the request path. The encoding returned by guess_type # can be ignored since the map was cleared above. if hdrs.CONTENT_TYPE not in self.headers: - self.content_type = ( - CONTENT_TYPES.guess_type(self._path)[0] or FALLBACK_CONTENT_TYPE - ) + if sys.version_info >= (3, 13): + guesser = CONTENT_TYPES.guess_file_type + else: + guesser = CONTENT_TYPES.guess_type + self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE if file_encoding: self.headers[hdrs.CONTENT_ENCODING] = file_encoding From 07d17590833b9d3785e95f6ec991a4f996fdaf9e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 20:28:53 +0000 Subject: [PATCH 1053/1511] [PR #10101/678993a4 backport][3.11] Fix race in `FileResponse` if file is replaced during `prepare` (#10105) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #8013 --- CHANGES/10101.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 79 ++++++++++++++++++++++++--------- tests/test_web_urldispatcher.py | 7 +-- 3 files changed, 63 insertions(+), 24 deletions(-) create mode 100644 CHANGES/10101.bugfix.rst diff --git a/CHANGES/10101.bugfix.rst b/CHANGES/10101.bugfix.rst new file mode 100644 index 00000000000..e06195ac028 --- /dev/null +++ b/CHANGES/10101.bugfix.rst @@ -0,0 +1 @@ +Fixed race condition in :class:`aiohttp.web.FileResponse` that could have resulted in an incorrect response if the file was replaced on the file system during ``prepare`` -- by :user:`bdraco`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index ff191415ed5..7c5149bb62a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,4 +1,5 @@ import asyncio +import io import os import pathlib import sys @@ -16,6 +17,7 @@ Iterator, List, Optional, + Set, Tuple, Union, cast, @@ -73,6 +75,9 @@ CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] +_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() + + class FileResponse(StreamResponse): """A response object can be used to send files.""" @@ -161,10 +166,10 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _get_file_path_stat_encoding( + def _open_file_path_stat_encoding( self, accept_encoding: str - ) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]: - """Return the file path, stat result, and encoding. + ) -> Tuple[Optional[io.BufferedReader], os.stat_result, Optional[str]]: + """Return the io object, stat result, and encoding. If an uncompressed file is returned, the encoding is set to :py:data:`None`. @@ -182,10 +187,27 @@ def _get_file_path_stat_encoding( # Do not follow symlinks and ignore any non-regular files. st = compressed_path.lstat() if S_ISREG(st.st_mode): - return compressed_path, st, file_encoding + fobj = compressed_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return fobj, st, file_encoding # Fallback to the uncompressed file - return file_path, file_path.stat(), None + st = file_path.stat() + if not S_ISREG(st.st_mode): + return None, st, None + fobj = file_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return fobj, st, None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_running_loop() @@ -193,20 +215,44 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() try: - file_path, st, file_encoding = await loop.run_in_executor( - None, self._get_file_path_stat_encoding, accept_encoding + fobj, st, file_encoding = await loop.run_in_executor( + None, self._open_file_path_stat_encoding, accept_encoding ) + except PermissionError: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) except OSError: # Most likely to be FileNotFoundError or OSError for circular # symlinks in python >= 3.13, so respond with 404. self.set_status(HTTPNotFound.status_code) return await super().prepare(request) - # Forbid special files like sockets, pipes, devices, etc. - if not S_ISREG(st.st_mode): - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) + try: + # Forbid special files like sockets, pipes, devices, etc. + if not fobj or not S_ISREG(st.st_mode): + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + return await self._prepare_open_file(request, fobj, st, file_encoding) + finally: + if fobj: + # We do not await here because we do not want to wait + # for the executor to finish before returning the response + # so the connection can begin servicing another request + # as soon as possible. + close_future = loop.run_in_executor(None, fobj.close) + # Hold a strong reference to the future to prevent it from being + # garbage collected before it completes. + _CLOSE_FUTURES.add(close_future) + close_future.add_done_callback(_CLOSE_FUTURES.remove) + + async def _prepare_open_file( + self, + request: "BaseRequest", + fobj: io.BufferedReader, + st: os.stat_result, + file_encoding: Optional[str], + ) -> Optional[AbstractStreamWriter]: etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime @@ -349,18 +395,9 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter if count == 0 or must_be_empty_body(request.method, self.status): return await super().prepare(request) - try: - fobj = await loop.run_in_executor(None, file_path.open, "rb") - except PermissionError: - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) - if start: # be aware that start could be None or int=0 here. offset = start else: offset = 0 - try: - return await self._sendfile(request, fobj, offset, count) - finally: - await asyncio.shield(loop.run_in_executor(None, fobj.close)) + return await self._sendfile(request, fobj, offset, count) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 92066f09b7d..ee60b6917c5 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -585,16 +585,17 @@ async def test_access_mock_special_resource( my_special.touch() real_result = my_special.stat() - real_stat = pathlib.Path.stat + real_stat = os.stat - def mock_stat(self: pathlib.Path, **kwargs: Any) -> os.stat_result: - s = real_stat(self, **kwargs) + def mock_stat(path: Any, **kwargs: Any) -> os.stat_result: + s = real_stat(path, **kwargs) if os.path.samestat(s, real_result): mock_mode = S_IFIFO | S_IMODE(s.st_mode) s = os.stat_result([mock_mode] + list(s)[1:]) return s monkeypatch.setattr("pathlib.Path.stat", mock_stat) + monkeypatch.setattr("os.stat", mock_stat) app = web.Application() app.router.add_static("/", str(tmp_path)) From fa6804c189805ed0179693b12e1ca2eca99a7863 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 20:38:09 +0000 Subject: [PATCH 1054/1511] [PR #10101/678993a4 backport][3.12] Fix race in `FileResponse` if file is replaced during `prepare` (#10106) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #8013 --- CHANGES/10101.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 79 ++++++++++++++++++++++++--------- tests/test_web_urldispatcher.py | 7 +-- 3 files changed, 63 insertions(+), 24 deletions(-) create mode 100644 CHANGES/10101.bugfix.rst diff --git a/CHANGES/10101.bugfix.rst b/CHANGES/10101.bugfix.rst new file mode 100644 index 00000000000..e06195ac028 --- /dev/null +++ b/CHANGES/10101.bugfix.rst @@ -0,0 +1 @@ +Fixed race condition in :class:`aiohttp.web.FileResponse` that could have resulted in an incorrect response if the file was replaced on the file system during ``prepare`` -- by :user:`bdraco`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index ff191415ed5..7c5149bb62a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,4 +1,5 @@ import asyncio +import io import os import pathlib import sys @@ -16,6 +17,7 @@ Iterator, List, Optional, + Set, Tuple, Union, cast, @@ -73,6 +75,9 @@ CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] +_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() + + class FileResponse(StreamResponse): """A response object can be used to send files.""" @@ -161,10 +166,10 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _get_file_path_stat_encoding( + def _open_file_path_stat_encoding( self, accept_encoding: str - ) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]: - """Return the file path, stat result, and encoding. + ) -> Tuple[Optional[io.BufferedReader], os.stat_result, Optional[str]]: + """Return the io object, stat result, and encoding. If an uncompressed file is returned, the encoding is set to :py:data:`None`. @@ -182,10 +187,27 @@ def _get_file_path_stat_encoding( # Do not follow symlinks and ignore any non-regular files. st = compressed_path.lstat() if S_ISREG(st.st_mode): - return compressed_path, st, file_encoding + fobj = compressed_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return fobj, st, file_encoding # Fallback to the uncompressed file - return file_path, file_path.stat(), None + st = file_path.stat() + if not S_ISREG(st.st_mode): + return None, st, None + fobj = file_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return fobj, st, None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_running_loop() @@ -193,20 +215,44 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() try: - file_path, st, file_encoding = await loop.run_in_executor( - None, self._get_file_path_stat_encoding, accept_encoding + fobj, st, file_encoding = await loop.run_in_executor( + None, self._open_file_path_stat_encoding, accept_encoding ) + except PermissionError: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) except OSError: # Most likely to be FileNotFoundError or OSError for circular # symlinks in python >= 3.13, so respond with 404. self.set_status(HTTPNotFound.status_code) return await super().prepare(request) - # Forbid special files like sockets, pipes, devices, etc. - if not S_ISREG(st.st_mode): - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) + try: + # Forbid special files like sockets, pipes, devices, etc. + if not fobj or not S_ISREG(st.st_mode): + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + return await self._prepare_open_file(request, fobj, st, file_encoding) + finally: + if fobj: + # We do not await here because we do not want to wait + # for the executor to finish before returning the response + # so the connection can begin servicing another request + # as soon as possible. + close_future = loop.run_in_executor(None, fobj.close) + # Hold a strong reference to the future to prevent it from being + # garbage collected before it completes. + _CLOSE_FUTURES.add(close_future) + close_future.add_done_callback(_CLOSE_FUTURES.remove) + + async def _prepare_open_file( + self, + request: "BaseRequest", + fobj: io.BufferedReader, + st: os.stat_result, + file_encoding: Optional[str], + ) -> Optional[AbstractStreamWriter]: etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime @@ -349,18 +395,9 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter if count == 0 or must_be_empty_body(request.method, self.status): return await super().prepare(request) - try: - fobj = await loop.run_in_executor(None, file_path.open, "rb") - except PermissionError: - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) - if start: # be aware that start could be None or int=0 here. offset = start else: offset = 0 - try: - return await self._sendfile(request, fobj, offset, count) - finally: - await asyncio.shield(loop.run_in_executor(None, fobj.close)) + return await self._sendfile(request, fobj, offset, count) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 92066f09b7d..ee60b6917c5 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -585,16 +585,17 @@ async def test_access_mock_special_resource( my_special.touch() real_result = my_special.stat() - real_stat = pathlib.Path.stat + real_stat = os.stat - def mock_stat(self: pathlib.Path, **kwargs: Any) -> os.stat_result: - s = real_stat(self, **kwargs) + def mock_stat(path: Any, **kwargs: Any) -> os.stat_result: + s = real_stat(path, **kwargs) if os.path.samestat(s, real_result): mock_mode = S_IFIFO | S_IMODE(s.st_mode) s = os.stat_result([mock_mode] + list(s)[1:]) return s monkeypatch.setattr("pathlib.Path.stat", mock_stat) + monkeypatch.setattr("os.stat", mock_stat) app = web.Application() app.router.add_static("/", str(tmp_path)) From ae153abfcc95d06a5a8c8189f47fea25b198f340 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 21:07:46 +0000 Subject: [PATCH 1055/1511] [PR #10107/84bb77d1 backport][3.11] Use internal `self._headers` var in `FileResponse` (#10108) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_fileresponse.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7c5149bb62a..53a27feb098 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -315,7 +315,7 @@ async def _prepare_open_file( # # Will do the same below. Many servers ignore this and do not # send a Content-Range header with HTTP 416 - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" self.set_status(HTTPRequestRangeNotSatisfiable.status_code) return await super().prepare(request) @@ -351,7 +351,7 @@ async def _prepare_open_file( # suffix-byte-range-spec with a non-zero suffix-length, # then the byte-range-set is satisfiable. Otherwise, the # byte-range-set is unsatisfiable. - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" self.set_status(HTTPRequestRangeNotSatisfiable.status_code) return await super().prepare(request) @@ -363,7 +363,7 @@ async def _prepare_open_file( # If the Content-Type header is not already set, guess it based on the # extension of the request path. The encoding returned by guess_type # can be ignored since the map was cleared above. - if hdrs.CONTENT_TYPE not in self.headers: + if hdrs.CONTENT_TYPE not in self._headers: if sys.version_info >= (3, 13): guesser = CONTENT_TYPES.guess_file_type else: @@ -371,8 +371,8 @@ async def _prepare_open_file( self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE if file_encoding: - self.headers[hdrs.CONTENT_ENCODING] = file_encoding - self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + self._headers[hdrs.CONTENT_ENCODING] = file_encoding + self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING # Disable compression if we are already sending # a compressed file since we don't want to double # compress. @@ -382,12 +382,12 @@ async def _prepare_open_file( self.last_modified = st.st_mtime # type: ignore[assignment] self.content_length = count - self.headers[hdrs.ACCEPT_RANGES] = "bytes" + self._headers[hdrs.ACCEPT_RANGES] = "bytes" real_start = cast(int, start) if status == HTTPPartialContent.status_code: - self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( + self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( real_start, real_start + count - 1, file_size ) From 8745cf4fbf6024d133891a29ab451686f26f505a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 21:12:41 +0000 Subject: [PATCH 1056/1511] [PR #10107/84bb77d1 backport][3.12] Use internal `self._headers` var in `FileResponse` (#10109) Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/web_fileresponse.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7c5149bb62a..53a27feb098 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -315,7 +315,7 @@ async def _prepare_open_file( # # Will do the same below. Many servers ignore this and do not # send a Content-Range header with HTTP 416 - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" self.set_status(HTTPRequestRangeNotSatisfiable.status_code) return await super().prepare(request) @@ -351,7 +351,7 @@ async def _prepare_open_file( # suffix-byte-range-spec with a non-zero suffix-length, # then the byte-range-set is satisfiable. Otherwise, the # byte-range-set is unsatisfiable. - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" self.set_status(HTTPRequestRangeNotSatisfiable.status_code) return await super().prepare(request) @@ -363,7 +363,7 @@ async def _prepare_open_file( # If the Content-Type header is not already set, guess it based on the # extension of the request path. The encoding returned by guess_type # can be ignored since the map was cleared above. - if hdrs.CONTENT_TYPE not in self.headers: + if hdrs.CONTENT_TYPE not in self._headers: if sys.version_info >= (3, 13): guesser = CONTENT_TYPES.guess_file_type else: @@ -371,8 +371,8 @@ async def _prepare_open_file( self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE if file_encoding: - self.headers[hdrs.CONTENT_ENCODING] = file_encoding - self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + self._headers[hdrs.CONTENT_ENCODING] = file_encoding + self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING # Disable compression if we are already sending # a compressed file since we don't want to double # compress. @@ -382,12 +382,12 @@ async def _prepare_open_file( self.last_modified = st.st_mtime # type: ignore[assignment] self.content_length = count - self.headers[hdrs.ACCEPT_RANGES] = "bytes" + self._headers[hdrs.ACCEPT_RANGES] = "bytes" real_start = cast(int, start) if status == HTTPPartialContent.status_code: - self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( + self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( real_start, real_start + count - 1, file_size ) From 78473b9c695903fe4021688b74cf7506b95e6463 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:55:34 +0000 Subject: [PATCH 1057/1511] [PR #10114/94569554 backport][3.11] Add 304 benchmark for FileResponse (#10115) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_fileresponse.py | 51 ++++++++++++++++++++--- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/tests/test_benchmarks_web_fileresponse.py b/tests/test_benchmarks_web_fileresponse.py index 7cdbda2efbb..01aa7448c86 100644 --- a/tests/test_benchmarks_web_fileresponse.py +++ b/tests/test_benchmarks_web_fileresponse.py @@ -3,9 +3,10 @@ import asyncio import pathlib +from multidict import CIMultiDict from pytest_codspeed import BenchmarkFixture -from aiohttp import web +from aiohttp import ClientResponse, web from aiohttp.pytest_plugin import AiohttpClient @@ -24,7 +25,7 @@ async def handler(request: web.Request) -> web.FileResponse: app = web.Application() app.router.add_route("GET", "/", handler) - async def run_file_resonse_benchmark() -> None: + async def run_file_response_benchmark() -> None: client = await aiohttp_client(app) for _ in range(response_count): await client.get("/") @@ -32,7 +33,7 @@ async def run_file_resonse_benchmark() -> None: @benchmark def _run() -> None: - loop.run_until_complete(run_file_resonse_benchmark()) + loop.run_until_complete(run_file_response_benchmark()) def test_simple_web_file_sendfile_fallback_response( @@ -53,7 +54,7 @@ async def handler(request: web.Request) -> web.FileResponse: app = web.Application() app.router.add_route("GET", "/", handler) - async def run_file_resonse_benchmark() -> None: + async def run_file_response_benchmark() -> None: client = await aiohttp_client(app) for _ in range(response_count): await client.get("/") @@ -61,4 +62,44 @@ async def run_file_resonse_benchmark() -> None: @benchmark def _run() -> None: - loop.run_until_complete(run_file_resonse_benchmark()) + loop.run_until_complete(run_file_response_benchmark()) + + +def test_simple_web_file_response_not_modified( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark web.FileResponse that return a 304.""" + response_count = 100 + filepath = pathlib.Path(__file__).parent / "sample.txt" + + async def handler(request: web.Request) -> web.FileResponse: + return web.FileResponse(path=filepath) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def make_last_modified_header() -> CIMultiDict[str]: + client = await aiohttp_client(app) + resp = await client.get("/") + last_modified = resp.headers["Last-Modified"] + headers = CIMultiDict({"If-Modified-Since": last_modified}) + return headers + + async def run_file_response_benchmark( + headers: CIMultiDict[str], + ) -> ClientResponse: + client = await aiohttp_client(app) + for _ in range(response_count): + resp = await client.get("/", headers=headers) + + await client.close() + return resp # type: ignore[possibly-undefined] + + headers = loop.run_until_complete(make_last_modified_header()) + + @benchmark + def _run() -> None: + resp = loop.run_until_complete(run_file_response_benchmark(headers)) + assert resp.status == 304 From cd0c2c8b410665056966606f0d085736cc2cabad Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 16:01:05 +0000 Subject: [PATCH 1058/1511] [PR #10114/94569554 backport][3.12] Add 304 benchmark for FileResponse (#10116) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_web_fileresponse.py | 51 ++++++++++++++++++++--- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/tests/test_benchmarks_web_fileresponse.py b/tests/test_benchmarks_web_fileresponse.py index 7cdbda2efbb..01aa7448c86 100644 --- a/tests/test_benchmarks_web_fileresponse.py +++ b/tests/test_benchmarks_web_fileresponse.py @@ -3,9 +3,10 @@ import asyncio import pathlib +from multidict import CIMultiDict from pytest_codspeed import BenchmarkFixture -from aiohttp import web +from aiohttp import ClientResponse, web from aiohttp.pytest_plugin import AiohttpClient @@ -24,7 +25,7 @@ async def handler(request: web.Request) -> web.FileResponse: app = web.Application() app.router.add_route("GET", "/", handler) - async def run_file_resonse_benchmark() -> None: + async def run_file_response_benchmark() -> None: client = await aiohttp_client(app) for _ in range(response_count): await client.get("/") @@ -32,7 +33,7 @@ async def run_file_resonse_benchmark() -> None: @benchmark def _run() -> None: - loop.run_until_complete(run_file_resonse_benchmark()) + loop.run_until_complete(run_file_response_benchmark()) def test_simple_web_file_sendfile_fallback_response( @@ -53,7 +54,7 @@ async def handler(request: web.Request) -> web.FileResponse: app = web.Application() app.router.add_route("GET", "/", handler) - async def run_file_resonse_benchmark() -> None: + async def run_file_response_benchmark() -> None: client = await aiohttp_client(app) for _ in range(response_count): await client.get("/") @@ -61,4 +62,44 @@ async def run_file_resonse_benchmark() -> None: @benchmark def _run() -> None: - loop.run_until_complete(run_file_resonse_benchmark()) + loop.run_until_complete(run_file_response_benchmark()) + + +def test_simple_web_file_response_not_modified( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark web.FileResponse that return a 304.""" + response_count = 100 + filepath = pathlib.Path(__file__).parent / "sample.txt" + + async def handler(request: web.Request) -> web.FileResponse: + return web.FileResponse(path=filepath) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def make_last_modified_header() -> CIMultiDict[str]: + client = await aiohttp_client(app) + resp = await client.get("/") + last_modified = resp.headers["Last-Modified"] + headers = CIMultiDict({"If-Modified-Since": last_modified}) + return headers + + async def run_file_response_benchmark( + headers: CIMultiDict[str], + ) -> ClientResponse: + client = await aiohttp_client(app) + for _ in range(response_count): + resp = await client.get("/", headers=headers) + + await client.close() + return resp # type: ignore[possibly-undefined] + + headers = loop.run_until_complete(make_last_modified_header()) + + @benchmark + def _run() -> None: + resp = loop.run_until_complete(run_file_response_benchmark(headers)) + assert resp.status == 304 From 6207a6d84fdfc2aef0c8bd50958d65e8b5ae3fe7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 11:53:06 -0600 Subject: [PATCH 1059/1511] [PR #10113/01302134 backport][3.12] Restore 304 performance after fixing `FileResponse` replace race (#10118) --- CHANGES/10113.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 163 ++++++++++++++++++++---------------- 2 files changed, 92 insertions(+), 72 deletions(-) create mode 120000 CHANGES/10113.bugfix.rst diff --git a/CHANGES/10113.bugfix.rst b/CHANGES/10113.bugfix.rst new file mode 120000 index 00000000000..89cef58729f --- /dev/null +++ b/CHANGES/10113.bugfix.rst @@ -0,0 +1 @@ +10101.bugfix.rst \ No newline at end of file diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 53a27feb098..2c54c988920 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -4,6 +4,7 @@ import pathlib import sys from contextlib import suppress +from enum import Enum, auto from mimetypes import MimeTypes from stat import S_ISREG from types import MappingProxyType @@ -69,6 +70,16 @@ } ) + +class _FileResponseResult(Enum): + """The result of the file response.""" + + SEND_FILE = auto() # Ie a regular file to send + NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file + PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed + NOT_MODIFIED = auto() # 304 Not Modified + + # Add custom pairs and clear the encodings map so guess_type ignores them. CONTENT_TYPES.encodings_map.clear() for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): @@ -166,10 +177,12 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _open_file_path_stat_encoding( - self, accept_encoding: str - ) -> Tuple[Optional[io.BufferedReader], os.stat_result, Optional[str]]: - """Return the io object, stat result, and encoding. + def _make_response( + self, request: "BaseRequest", accept_encoding: str + ) -> Tuple[ + _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str] + ]: + """Return the response result, io object, stat result, and encoding. If an uncompressed file is returned, the encoding is set to :py:data:`None`. @@ -177,6 +190,52 @@ def _open_file_path_stat_encoding( This method should be called from a thread executor since it calls os.stat which may block. """ + file_path, st, file_encoding = self._get_file_path_stat_encoding( + accept_encoding + ) + if not file_path: + return _FileResponseResult.NOT_ACCEPTABLE, None, st, None + + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 + if (ifmatch := request.if_match) is not None and not self._etag_match( + etag_value, ifmatch, weak=False + ): + return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding + + if ( + (unmodsince := request.if_unmodified_since) is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding + + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 + if (ifnonematch := request.if_none_match) is not None and self._etag_match( + etag_value, ifnonematch, weak=True + ): + return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding + + if ( + (modsince := request.if_modified_since) is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding + + fobj = file_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return _FileResponseResult.SEND_FILE, fobj, st, file_encoding + + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]: file_path = self._path for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): if file_encoding not in accept_encoding: @@ -187,27 +246,13 @@ def _open_file_path_stat_encoding( # Do not follow symlinks and ignore any non-regular files. st = compressed_path.lstat() if S_ISREG(st.st_mode): - fobj = compressed_path.open("rb") - with suppress(OSError): - # fstat() may not be available on all platforms - # Once we open the file, we want the fstat() to ensure - # the file has not changed between the first stat() - # and the open(). - st = os.stat(fobj.fileno()) - return fobj, st, file_encoding + return compressed_path, st, file_encoding # Fallback to the uncompressed file st = file_path.stat() if not S_ISREG(st.st_mode): return None, st, None - fobj = file_path.open("rb") - with suppress(OSError): - # fstat() may not be available on all platforms - # Once we open the file, we want the fstat() to ensure - # the file has not changed between the first stat() - # and the open(). - st = os.stat(fobj.fileno()) - return fobj, st, None + return file_path, st, None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_running_loop() @@ -215,8 +260,8 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() try: - fobj, st, file_encoding = await loop.run_in_executor( - None, self._open_file_path_stat_encoding, accept_encoding + response_result, fobj, st, file_encoding = await loop.run_in_executor( + None, self._make_response, request, accept_encoding ) except PermissionError: self.set_status(HTTPForbidden.status_code) @@ -227,24 +272,32 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.set_status(HTTPNotFound.status_code) return await super().prepare(request) - try: - # Forbid special files like sockets, pipes, devices, etc. - if not fobj or not S_ISREG(st.st_mode): - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) + # Forbid special files like sockets, pipes, devices, etc. + if response_result is _FileResponseResult.NOT_ACCEPTABLE: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + + if response_result is _FileResponseResult.PRE_CONDITION_FAILED: + return await self._precondition_failed(request) + if response_result is _FileResponseResult.NOT_MODIFIED: + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + return await self._not_modified(request, etag_value, last_modified) + + assert fobj is not None + try: return await self._prepare_open_file(request, fobj, st, file_encoding) finally: - if fobj: - # We do not await here because we do not want to wait - # for the executor to finish before returning the response - # so the connection can begin servicing another request - # as soon as possible. - close_future = loop.run_in_executor(None, fobj.close) - # Hold a strong reference to the future to prevent it from being - # garbage collected before it completes. - _CLOSE_FUTURES.add(close_future) - close_future.add_done_callback(_CLOSE_FUTURES.remove) + # We do not await here because we do not want to wait + # for the executor to finish before returning the response + # so the connection can begin servicing another request + # as soon as possible. + close_future = loop.run_in_executor(None, fobj.close) + # Hold a strong reference to the future to prevent it from being + # garbage collected before it completes. + _CLOSE_FUTURES.add(close_future) + close_future.add_done_callback(_CLOSE_FUTURES.remove) async def _prepare_open_file( self, @@ -253,43 +306,9 @@ async def _prepare_open_file( st: os.stat_result, file_encoding: Optional[str], ) -> Optional[AbstractStreamWriter]: - etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" - last_modified = st.st_mtime - - # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 - ifmatch = request.if_match - if ifmatch is not None and not self._etag_match( - etag_value, ifmatch, weak=False - ): - return await self._precondition_failed(request) - - unmodsince = request.if_unmodified_since - if ( - unmodsince is not None - and ifmatch is None - and st.st_mtime > unmodsince.timestamp() - ): - return await self._precondition_failed(request) - - # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 - ifnonematch = request.if_none_match - if ifnonematch is not None and self._etag_match( - etag_value, ifnonematch, weak=True - ): - return await self._not_modified(request, etag_value, last_modified) - - modsince = request.if_modified_since - if ( - modsince is not None - and ifnonematch is None - and st.st_mtime <= modsince.timestamp() - ): - return await self._not_modified(request, etag_value, last_modified) - status = self._status file_size = st.st_size count = file_size - start = None ifrange = request.if_range @@ -378,7 +397,7 @@ async def _prepare_open_file( # compress. self._compression = False - self.etag = etag_value # type: ignore[assignment] + self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment] self.last_modified = st.st_mtime # type: ignore[assignment] self.content_length = count From bcae5617932a779c79887a0bc6968bf738047892 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 11:53:31 -0600 Subject: [PATCH 1060/1511] [PR #10113/01302134 backport][3.11] Restore 304 performance after fixing `FileResponse` replace race (#10117) --- CHANGES/10113.bugfix.rst | 1 + aiohttp/web_fileresponse.py | 163 ++++++++++++++++++++---------------- 2 files changed, 92 insertions(+), 72 deletions(-) create mode 120000 CHANGES/10113.bugfix.rst diff --git a/CHANGES/10113.bugfix.rst b/CHANGES/10113.bugfix.rst new file mode 120000 index 00000000000..89cef58729f --- /dev/null +++ b/CHANGES/10113.bugfix.rst @@ -0,0 +1 @@ +10101.bugfix.rst \ No newline at end of file diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 53a27feb098..2c54c988920 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -4,6 +4,7 @@ import pathlib import sys from contextlib import suppress +from enum import Enum, auto from mimetypes import MimeTypes from stat import S_ISREG from types import MappingProxyType @@ -69,6 +70,16 @@ } ) + +class _FileResponseResult(Enum): + """The result of the file response.""" + + SEND_FILE = auto() # Ie a regular file to send + NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file + PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed + NOT_MODIFIED = auto() # 304 Not Modified + + # Add custom pairs and clear the encodings map so guess_type ignores them. CONTENT_TYPES.encodings_map.clear() for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): @@ -166,10 +177,12 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _open_file_path_stat_encoding( - self, accept_encoding: str - ) -> Tuple[Optional[io.BufferedReader], os.stat_result, Optional[str]]: - """Return the io object, stat result, and encoding. + def _make_response( + self, request: "BaseRequest", accept_encoding: str + ) -> Tuple[ + _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str] + ]: + """Return the response result, io object, stat result, and encoding. If an uncompressed file is returned, the encoding is set to :py:data:`None`. @@ -177,6 +190,52 @@ def _open_file_path_stat_encoding( This method should be called from a thread executor since it calls os.stat which may block. """ + file_path, st, file_encoding = self._get_file_path_stat_encoding( + accept_encoding + ) + if not file_path: + return _FileResponseResult.NOT_ACCEPTABLE, None, st, None + + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 + if (ifmatch := request.if_match) is not None and not self._etag_match( + etag_value, ifmatch, weak=False + ): + return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding + + if ( + (unmodsince := request.if_unmodified_since) is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding + + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 + if (ifnonematch := request.if_none_match) is not None and self._etag_match( + etag_value, ifnonematch, weak=True + ): + return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding + + if ( + (modsince := request.if_modified_since) is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding + + fobj = file_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return _FileResponseResult.SEND_FILE, fobj, st, file_encoding + + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]: file_path = self._path for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): if file_encoding not in accept_encoding: @@ -187,27 +246,13 @@ def _open_file_path_stat_encoding( # Do not follow symlinks and ignore any non-regular files. st = compressed_path.lstat() if S_ISREG(st.st_mode): - fobj = compressed_path.open("rb") - with suppress(OSError): - # fstat() may not be available on all platforms - # Once we open the file, we want the fstat() to ensure - # the file has not changed between the first stat() - # and the open(). - st = os.stat(fobj.fileno()) - return fobj, st, file_encoding + return compressed_path, st, file_encoding # Fallback to the uncompressed file st = file_path.stat() if not S_ISREG(st.st_mode): return None, st, None - fobj = file_path.open("rb") - with suppress(OSError): - # fstat() may not be available on all platforms - # Once we open the file, we want the fstat() to ensure - # the file has not changed between the first stat() - # and the open(). - st = os.stat(fobj.fileno()) - return fobj, st, None + return file_path, st, None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_running_loop() @@ -215,8 +260,8 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() try: - fobj, st, file_encoding = await loop.run_in_executor( - None, self._open_file_path_stat_encoding, accept_encoding + response_result, fobj, st, file_encoding = await loop.run_in_executor( + None, self._make_response, request, accept_encoding ) except PermissionError: self.set_status(HTTPForbidden.status_code) @@ -227,24 +272,32 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.set_status(HTTPNotFound.status_code) return await super().prepare(request) - try: - # Forbid special files like sockets, pipes, devices, etc. - if not fobj or not S_ISREG(st.st_mode): - self.set_status(HTTPForbidden.status_code) - return await super().prepare(request) + # Forbid special files like sockets, pipes, devices, etc. + if response_result is _FileResponseResult.NOT_ACCEPTABLE: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + + if response_result is _FileResponseResult.PRE_CONDITION_FAILED: + return await self._precondition_failed(request) + if response_result is _FileResponseResult.NOT_MODIFIED: + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + return await self._not_modified(request, etag_value, last_modified) + + assert fobj is not None + try: return await self._prepare_open_file(request, fobj, st, file_encoding) finally: - if fobj: - # We do not await here because we do not want to wait - # for the executor to finish before returning the response - # so the connection can begin servicing another request - # as soon as possible. - close_future = loop.run_in_executor(None, fobj.close) - # Hold a strong reference to the future to prevent it from being - # garbage collected before it completes. - _CLOSE_FUTURES.add(close_future) - close_future.add_done_callback(_CLOSE_FUTURES.remove) + # We do not await here because we do not want to wait + # for the executor to finish before returning the response + # so the connection can begin servicing another request + # as soon as possible. + close_future = loop.run_in_executor(None, fobj.close) + # Hold a strong reference to the future to prevent it from being + # garbage collected before it completes. + _CLOSE_FUTURES.add(close_future) + close_future.add_done_callback(_CLOSE_FUTURES.remove) async def _prepare_open_file( self, @@ -253,43 +306,9 @@ async def _prepare_open_file( st: os.stat_result, file_encoding: Optional[str], ) -> Optional[AbstractStreamWriter]: - etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" - last_modified = st.st_mtime - - # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 - ifmatch = request.if_match - if ifmatch is not None and not self._etag_match( - etag_value, ifmatch, weak=False - ): - return await self._precondition_failed(request) - - unmodsince = request.if_unmodified_since - if ( - unmodsince is not None - and ifmatch is None - and st.st_mtime > unmodsince.timestamp() - ): - return await self._precondition_failed(request) - - # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 - ifnonematch = request.if_none_match - if ifnonematch is not None and self._etag_match( - etag_value, ifnonematch, weak=True - ): - return await self._not_modified(request, etag_value, last_modified) - - modsince = request.if_modified_since - if ( - modsince is not None - and ifnonematch is None - and st.st_mtime <= modsince.timestamp() - ): - return await self._not_modified(request, etag_value, last_modified) - status = self._status file_size = st.st_size count = file_size - start = None ifrange = request.if_range @@ -378,7 +397,7 @@ async def _prepare_open_file( # compress. self._compression = False - self.etag = etag_value # type: ignore[assignment] + self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment] self.last_modified = st.st_mtime # type: ignore[assignment] self.content_length = count From db5e6bb97f51f35bfc5b44c42106fb5c55756129 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 5 Dec 2024 13:46:35 -0600 Subject: [PATCH 1061/1511] [PR #10122/703ce61 backport][3.11] Typing improvements for file responses (#10123) --- aiohttp/web_fileresponse.py | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 2c54c988920..be9cf87e069 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -250,9 +250,7 @@ def _get_file_path_stat_encoding( # Fallback to the uncompressed file st = file_path.stat() - if not S_ISREG(st.st_mode): - return None, st, None - return file_path, st, None + return file_path if S_ISREG(st.st_mode) else None, st, None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_running_loop() @@ -307,12 +305,12 @@ async def _prepare_open_file( file_encoding: Optional[str], ) -> Optional[AbstractStreamWriter]: status = self._status - file_size = st.st_size - count = file_size - start = None + file_size: int = st.st_size + file_mtime: float = st.st_mtime + count: int = file_size + start: Optional[int] = None - ifrange = request.if_range - if ifrange is None or st.st_mtime <= ifrange.timestamp(): + if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp(): # If-Range header check: # condition = cached date >= last modification date # return 206 if True else 200. @@ -323,7 +321,7 @@ async def _prepare_open_file( try: rng = request.http_range start = rng.start - end = rng.stop + end: Optional[int] = rng.stop except ValueError: # https://tools.ietf.org/html/rfc7233: # A server generating a 416 (Range Not Satisfiable) response to @@ -340,7 +338,7 @@ async def _prepare_open_file( # If a range request has been made, convert start, end slice # notation into file pointer offset and count - if start is not None or end is not None: + if start is not None: if start < 0 and end is None: # return tail of file start += file_size if start < 0: @@ -398,25 +396,23 @@ async def _prepare_open_file( self._compression = False self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment] - self.last_modified = st.st_mtime # type: ignore[assignment] + self.last_modified = file_mtime # type: ignore[assignment] self.content_length = count self._headers[hdrs.ACCEPT_RANGES] = "bytes" - real_start = cast(int, start) - if status == HTTPPartialContent.status_code: + real_start = start + assert real_start is not None self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( real_start, real_start + count - 1, file_size ) # If we are sending 0 bytes calling sendfile() will throw a ValueError - if count == 0 or must_be_empty_body(request.method, self.status): + if count == 0 or must_be_empty_body(request.method, status): return await super().prepare(request) - if start: # be aware that start could be None or int=0 here. - offset = start - else: - offset = 0 + # be aware that start could be None or int=0 here. + offset = start or 0 return await self._sendfile(request, fobj, offset, count) From da4e00b078c07e0eb68cfa4d6d3dd13273c543a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 5 Dec 2024 13:50:53 -0600 Subject: [PATCH 1062/1511] [PR #10122/703ce61 backport][3.12] Typing improvements for file responses (#10124) --- aiohttp/web_fileresponse.py | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 2c54c988920..be9cf87e069 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -250,9 +250,7 @@ def _get_file_path_stat_encoding( # Fallback to the uncompressed file st = file_path.stat() - if not S_ISREG(st.st_mode): - return None, st, None - return file_path, st, None + return file_path if S_ISREG(st.st_mode) else None, st, None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_running_loop() @@ -307,12 +305,12 @@ async def _prepare_open_file( file_encoding: Optional[str], ) -> Optional[AbstractStreamWriter]: status = self._status - file_size = st.st_size - count = file_size - start = None + file_size: int = st.st_size + file_mtime: float = st.st_mtime + count: int = file_size + start: Optional[int] = None - ifrange = request.if_range - if ifrange is None or st.st_mtime <= ifrange.timestamp(): + if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp(): # If-Range header check: # condition = cached date >= last modification date # return 206 if True else 200. @@ -323,7 +321,7 @@ async def _prepare_open_file( try: rng = request.http_range start = rng.start - end = rng.stop + end: Optional[int] = rng.stop except ValueError: # https://tools.ietf.org/html/rfc7233: # A server generating a 416 (Range Not Satisfiable) response to @@ -340,7 +338,7 @@ async def _prepare_open_file( # If a range request has been made, convert start, end slice # notation into file pointer offset and count - if start is not None or end is not None: + if start is not None: if start < 0 and end is None: # return tail of file start += file_size if start < 0: @@ -398,25 +396,23 @@ async def _prepare_open_file( self._compression = False self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment] - self.last_modified = st.st_mtime # type: ignore[assignment] + self.last_modified = file_mtime # type: ignore[assignment] self.content_length = count self._headers[hdrs.ACCEPT_RANGES] = "bytes" - real_start = cast(int, start) - if status == HTTPPartialContent.status_code: + real_start = start + assert real_start is not None self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( real_start, real_start + count - 1, file_size ) # If we are sending 0 bytes calling sendfile() will throw a ValueError - if count == 0 or must_be_empty_body(request.method, self.status): + if count == 0 or must_be_empty_body(request.method, status): return await super().prepare(request) - if start: # be aware that start could be None or int=0 here. - offset = start - else: - offset = 0 + # be aware that start could be None or int=0 here. + offset = start or 0 return await self._sendfile(request, fobj, offset, count) From 5ddff951a40d0e699b3037f8482b53bf1d9f3990 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 22:27:46 +0000 Subject: [PATCH 1063/1511] [PR #10125/d58d2c3d backport][3.11] Disable zero copy writes in the ``StreamWriter`` (#10126) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10125.bugfix.rst | 1 + aiohttp/http_writer.py | 2 +- tests/test_http_writer.py | 27 +++++++++++++-------------- 3 files changed, 15 insertions(+), 15 deletions(-) create mode 100644 CHANGES/10125.bugfix.rst diff --git a/CHANGES/10125.bugfix.rst b/CHANGES/10125.bugfix.rst new file mode 100644 index 00000000000..4ece1e68d96 --- /dev/null +++ b/CHANGES/10125.bugfix.rst @@ -0,0 +1 @@ +Disabled zero copy writes in the ``StreamWriter`` -- by :user:`bdraco`. diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index c66fda3d8d0..edd19ed65da 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -90,7 +90,7 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport = self._protocol.transport if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") - transport.writelines(chunks) + transport.write(b"".join(chunks)) async def write( self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 0ed0e615700..5f316fad2f7 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -104,16 +104,15 @@ async def test_write_large_payload_deflate_compression_data_in_eof( assert transport.write.called # type: ignore[attr-defined] chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] transport.write.reset_mock() # type: ignore[attr-defined] - assert not transport.writelines.called # type: ignore[attr-defined] # This payload compresses to 20447 bytes payload = b"".join( [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] ) await msg.write_eof(payload) - assert not transport.write.called # type: ignore[attr-defined] - assert transport.writelines.called # type: ignore[attr-defined] - chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] + + assert all(chunks) content = b"".join(chunks) assert zlib.decompress(content) == (b"data" * 4096) + payload @@ -180,7 +179,7 @@ async def test_write_payload_deflate_compression_chunked( await msg.write(b"data") await msg.write_eof() - chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] assert all(chunks) content = b"".join(chunks) assert content == expected @@ -216,7 +215,7 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof( await msg.write(b"data") await msg.write_eof(b"end") - chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] assert all(chunks) content = b"".join(chunks) assert content == expected @@ -235,16 +234,16 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof( # This payload compresses to 1111 bytes payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) await msg.write_eof(payload) - assert not transport.write.called # type: ignore[attr-defined] - chunks = [] - for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] - chunked_payload = list(write_lines_call[1][0])[1:] - chunked_payload.pop() - chunks.extend(chunked_payload) + compressed = [] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + chunked_body = b"".join(chunks) + split_body = chunked_body.split(b"\r\n") + while split_body: + if split_body.pop(0): + compressed.append(split_body.pop(0)) - assert all(chunks) - content = b"".join(chunks) + content = b"".join(compressed) assert zlib.decompress(content) == (b"data" * 4096) + payload From 6462f7553ff1e7f1778c5e1d315bd5ff890ceaa5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 22:35:01 +0000 Subject: [PATCH 1064/1511] [PR #10125/d58d2c3d backport][3.12] Disable zero copy writes in the ``StreamWriter`` (#10127) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10125.bugfix.rst | 1 + aiohttp/http_writer.py | 2 +- tests/test_http_writer.py | 27 +++++++++++++-------------- 3 files changed, 15 insertions(+), 15 deletions(-) create mode 100644 CHANGES/10125.bugfix.rst diff --git a/CHANGES/10125.bugfix.rst b/CHANGES/10125.bugfix.rst new file mode 100644 index 00000000000..4ece1e68d96 --- /dev/null +++ b/CHANGES/10125.bugfix.rst @@ -0,0 +1 @@ +Disabled zero copy writes in the ``StreamWriter`` -- by :user:`bdraco`. diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index c66fda3d8d0..edd19ed65da 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -90,7 +90,7 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport = self._protocol.transport if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") - transport.writelines(chunks) + transport.write(b"".join(chunks)) async def write( self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 0ed0e615700..5f316fad2f7 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -104,16 +104,15 @@ async def test_write_large_payload_deflate_compression_data_in_eof( assert transport.write.called # type: ignore[attr-defined] chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] transport.write.reset_mock() # type: ignore[attr-defined] - assert not transport.writelines.called # type: ignore[attr-defined] # This payload compresses to 20447 bytes payload = b"".join( [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] ) await msg.write_eof(payload) - assert not transport.write.called # type: ignore[attr-defined] - assert transport.writelines.called # type: ignore[attr-defined] - chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] + + assert all(chunks) content = b"".join(chunks) assert zlib.decompress(content) == (b"data" * 4096) + payload @@ -180,7 +179,7 @@ async def test_write_payload_deflate_compression_chunked( await msg.write(b"data") await msg.write_eof() - chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] assert all(chunks) content = b"".join(chunks) assert content == expected @@ -216,7 +215,7 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof( await msg.write(b"data") await msg.write_eof(b"end") - chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] assert all(chunks) content = b"".join(chunks) assert content == expected @@ -235,16 +234,16 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof( # This payload compresses to 1111 bytes payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) await msg.write_eof(payload) - assert not transport.write.called # type: ignore[attr-defined] - chunks = [] - for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] - chunked_payload = list(write_lines_call[1][0])[1:] - chunked_payload.pop() - chunks.extend(chunked_payload) + compressed = [] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + chunked_body = b"".join(chunks) + split_body = chunked_body.split(b"\r\n") + while split_body: + if split_body.pop(0): + compressed.append(split_body.pop(0)) - assert all(chunks) - content = b"".join(chunks) + content = b"".join(compressed) assert zlib.decompress(content) == (b"data" * 4096) + payload From 0d7352aeca2ac52a4e18aa786bf2aefd47129bbe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 5 Dec 2024 16:44:47 -0600 Subject: [PATCH 1065/1511] Release 3.11.10 (#10128) --- CHANGES.rst | 34 ++++++++++++++++++++++++++++++++++ CHANGES/10101.bugfix.rst | 1 - CHANGES/10102.bugfix.rst | 1 - CHANGES/10113.bugfix.rst | 1 - CHANGES/10125.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 35 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/10101.bugfix.rst delete mode 100644 CHANGES/10102.bugfix.rst delete mode 120000 CHANGES/10113.bugfix.rst delete mode 100644 CHANGES/10125.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 8352236c320..586d70c9697 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,40 @@ .. towncrier release notes start +3.11.10 (2024-12-05) +==================== + +Bug fixes +--------- + +- Fixed race condition in :class:`aiohttp.web.FileResponse` that could have resulted in an incorrect response if the file was replaced on the file system during ``prepare`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10101`, :issue:`10113`. + + + +- Replaced deprecated call to :func:`mimetypes.guess_type` with :func:`mimetypes.guess_file_type` when using Python 3.13+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10102`. + + + +- Disabled zero copy writes in the ``StreamWriter`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10125`. + + + + +---- + + 3.11.9 (2024-12-01) =================== diff --git a/CHANGES/10101.bugfix.rst b/CHANGES/10101.bugfix.rst deleted file mode 100644 index e06195ac028..00000000000 --- a/CHANGES/10101.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed race condition in :class:`aiohttp.web.FileResponse` that could have resulted in an incorrect response if the file was replaced on the file system during ``prepare`` -- by :user:`bdraco`. diff --git a/CHANGES/10102.bugfix.rst b/CHANGES/10102.bugfix.rst deleted file mode 100644 index 86dda8684dd..00000000000 --- a/CHANGES/10102.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Replaced deprecated call to :func:`mimetypes.guess_type` with :func:`mimetypes.guess_file_type` when using Python 3.13+ -- by :user:`bdraco`. diff --git a/CHANGES/10113.bugfix.rst b/CHANGES/10113.bugfix.rst deleted file mode 120000 index 89cef58729f..00000000000 --- a/CHANGES/10113.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10101.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10125.bugfix.rst b/CHANGES/10125.bugfix.rst deleted file mode 100644 index 4ece1e68d96..00000000000 --- a/CHANGES/10125.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Disabled zero copy writes in the ``StreamWriter`` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 0024853acaf..8c80ff3ab7d 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.10.dev0" +__version__ = "3.11.10" from typing import TYPE_CHECKING, Tuple From 8ce1c14ef6eafbd1b0afb59ad0f47cbc01e5194c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 5 Dec 2024 18:08:26 -0600 Subject: [PATCH 1066/1511] Increment version to 3.11.11.dev0 (#10130) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 8c80ff3ab7d..f4d732b8674 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.10" +__version__ = "3.11.11.dev0" from typing import TYPE_CHECKING, Tuple From 7b868436beb3aa1aaf1b6d90da3fe830aa97f632 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 00:56:24 +0000 Subject: [PATCH 1067/1511] [PR #10131/7f92bebb backport][3.11] Bump Python version for benchmarks to 3.13 (#10132) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 765047b933f..95238b93687 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -250,11 +250,11 @@ jobs: uses: actions/checkout@v4 with: submodules: true - - name: Setup Python 3.12 + - name: Setup Python 3.13 id: python-install uses: actions/setup-python@v5 with: - python-version: 3.12 + python-version: 3.13 cache: pip cache-dependency-path: requirements/*.txt - name: Update pip, wheel, setuptools, build, twine From 0e4a0e4829894e8aabe175120ea74311a25a1a8e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 04:07:44 +0000 Subject: [PATCH 1068/1511] [PR #10131/7f92bebb backport][3.12] Bump Python version for benchmarks to 3.13 (#10133) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 765047b933f..95238b93687 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -250,11 +250,11 @@ jobs: uses: actions/checkout@v4 with: submodules: true - - name: Setup Python 3.12 + - name: Setup Python 3.13 id: python-install uses: actions/setup-python@v5 with: - python-version: 3.12 + python-version: 3.13 cache: pip cache-dependency-path: requirements/*.txt - name: Update pip, wheel, setuptools, build, twine From 87f0f4866c9e7710d0d66367a94425e0e7e3d642 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 10:43:25 +0000 Subject: [PATCH 1069/1511] Bump actions/cache from 4.1.2 to 4.2.0 (#10136) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.1.2 to 4.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.2.0</h2> <h2>⚠️ Important Changes</h2> <p>The cache backend service has been rewritten from the ground up for improved performance and reliability. <a href="https://github.com/actions/cache">actions/cache</a> now integrates with the new cache service (v2) APIs.</p> <p>The new service will gradually roll out as of <strong>February 1st, 2025</strong>. The legacy service will also be sunset on the same date. Changes in these release are <strong>fully backward compatible</strong>.</p> <p><strong>We are deprecating some versions of this action</strong>. We recommend upgrading to version <code>v4</code> or <code>v3</code> as soon as possible before <strong>February 1st, 2025.</strong> (Upgrade instructions below).</p> <p>If you are using pinned SHAs, please use the SHAs of versions <code>v4.2.0</code> or <code>v3.4.0</code></p> <p>If you do not upgrade, all workflow runs using any of the deprecated <a href="https://github.com/actions/cache">actions/cache</a> will fail.</p> <p>Upgrading to the recommended versions will not break your workflows.</p> <p>Read more about the change & access the migration guide: <a href="https://github.com/actions/cache/discussions/1510">reference to the announcement</a>.</p> <h3>Minor changes</h3> <p>Minor and patch version updates for these dependencies:</p> <ul> <li><code>@​actions/core</code>: <code>1.11.1</code></li> <li><code>@​actions/io</code>: <code>1.1.3</code></li> <li><code>@​vercel/ncc</code>: <code>0.38.3</code></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4...v4.2.0">https://github.com/actions/cache/compare/v4...v4.2.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.2.0</h3> <p>TLDR; The cache backend service has been rewritten from the ground up for improved performance and reliability. <a href="https://github.com/actions/cache">actions/cache</a> now integrates with the new cache service (v2) APIs.</p> <p>The new service will gradually roll out as of <strong>February 1st, 2025</strong>. The legacy service will also be sunset on the same date. Changes in these release are <strong>fully backward compatible</strong>.</p> <p><strong>We are deprecating some versions of this action</strong>. We recommend upgrading to version <code>v4</code> or <code>v3</code> as soon as possible before <strong>February 1st, 2025.</strong> (Upgrade instructions below).</p> <p>If you are using pinned SHAs, please use the SHAs of versions <code>v4.2.0</code> or <code>v3.4.0</code></p> <p>If you do not upgrade, all workflow runs using any of the deprecated <a href="https://github.com/actions/cache">actions/cache</a> will fail.</p> <p>Upgrading to the recommended versions will not break your workflows.</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/1bd1e32a3bdc45362d1e726936510720a7c30a57"><code>1bd1e32</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1509">#1509</a> from actions/Link-/cache-4.2.0</li> <li><a href="https://github.com/actions/cache/commit/882d7ced4c9b8af53ed67bfa36ee600195e62940"><code>882d7ce</code></a> Add 3.4.0 release notes</li> <li><a href="https://github.com/actions/cache/commit/f2695d7a42dd0d7ad21976ed7ab91dbdbc3c2216"><code>f2695d7</code></a> Rerun CI</li> <li><a href="https://github.com/actions/cache/commit/f46ceeb60d3da27b7cbac269520a4b1bfb15f199"><code>f46ceeb</code></a> Add licensed output</li> <li><a href="https://github.com/actions/cache/commit/e6f5858749f178cf4a01b0d777917ba913710560"><code>e6f5858</code></a> Add lodash to list of reviewed licenses</li> <li><a href="https://github.com/actions/cache/commit/4ae6f21c0d820c73db2589af6983e001d8c19c1d"><code>4ae6f21</code></a> Add reviewed licensed packages</li> <li><a href="https://github.com/actions/cache/commit/c16df86586baf94b0deaa873e22eb739c59e5b15"><code>c16df86</code></a> Add licensed output</li> <li><a href="https://github.com/actions/cache/commit/b109c12f3bdd6fb6a7dd42b202df645243efbd2f"><code>b109c12</code></a> Upgrade <code>@​actions/core</code> to 1.11.1 and other deps</li> <li><a href="https://github.com/actions/cache/commit/b7d227d702af06e6be1fa308c014252c10bbc267"><code>b7d227d</code></a> Upgrade <code>@​vercel/ncc</code> to 0.38.3</li> <li><a href="https://github.com/actions/cache/commit/faf639248d95d2a6c5884b8e6588e233eb3b10a0"><code>faf6392</code></a> Update RELEASES.md</li> <li>Additional commits viewable in <a href="https://github.com/actions/cache/compare/v4.1.2...v4.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.1.2&new-version=4.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 95238b93687..d5e119b779d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 489b6649a37b1850ef7c3dc042252cfb90f7daf5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 17:34:12 +0000 Subject: [PATCH 1070/1511] [PR #10138/dbd77ad6 backport][3.11] Bump sphinx to 8.1.3 along with required dependencies (#10139) Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/constraints.txt | 14 +++++++------- requirements/dev.txt | 14 +++++++------- requirements/doc-spelling.txt | 14 +++++++------- requirements/doc.txt | 14 +++++++------- 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d32acc7b773..740e3e2d559 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -14,7 +14,7 @@ aiohttp-theme==0.1.7 # via -r requirements/doc.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic @@ -236,22 +236,22 @@ slotscheck==0.19.1 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-spelling # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 168ce639d19..72e49ed9edf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -14,7 +14,7 @@ aiohttp-theme==0.1.7 # via -r requirements/doc.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic @@ -210,21 +210,21 @@ slotscheck==0.19.1 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index df393012548..892ae6b164c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -6,7 +6,7 @@ # aiohttp-theme==0.1.7 # via -r requirements/doc.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx @@ -46,22 +46,22 @@ requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-spelling # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in diff --git a/requirements/doc.txt b/requirements/doc.txt index 43b7c6b7e8b..f7f98330e1f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -6,7 +6,7 @@ # aiohttp-theme==0.1.7 # via -r requirements/doc.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx @@ -44,21 +44,21 @@ requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in From f52f60a4ba8f023d08c4b319a0d8c0e7d290d4cf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 17:39:22 +0000 Subject: [PATCH 1071/1511] [PR #10138/dbd77ad6 backport][3.12] Bump sphinx to 8.1.3 along with required dependencies (#10140) Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/constraints.txt | 14 +++++++------- requirements/dev.txt | 14 +++++++------- requirements/doc-spelling.txt | 14 +++++++------- requirements/doc.txt | 14 +++++++------- 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d32acc7b773..740e3e2d559 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -14,7 +14,7 @@ aiohttp-theme==0.1.7 # via -r requirements/doc.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic @@ -236,22 +236,22 @@ slotscheck==0.19.1 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-spelling # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 168ce639d19..72e49ed9edf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -14,7 +14,7 @@ aiohttp-theme==0.1.7 # via -r requirements/doc.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic @@ -210,21 +210,21 @@ slotscheck==0.19.1 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index df393012548..892ae6b164c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -6,7 +6,7 @@ # aiohttp-theme==0.1.7 # via -r requirements/doc.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx @@ -46,22 +46,22 @@ requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-spelling # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in diff --git a/requirements/doc.txt b/requirements/doc.txt index 43b7c6b7e8b..f7f98330e1f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -6,7 +6,7 @@ # aiohttp-theme==0.1.7 # via -r requirements/doc.in -alabaster==0.7.13 +alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx @@ -44,21 +44,21 @@ requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.1.2 +sphinx==8.1.3 # via # -r requirements/doc.in # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in From eb42db85a461f3cc883c3e41a43bb02c9b87e308 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 8 Dec 2024 15:26:09 +0000 Subject: [PATCH 1072/1511] Fix type of SSLContext for some static type checkers (#10099) (#10144) (cherry picked from commit 6200513d8fd34a820a4d10d238ca92d9f73ce7ee) Co-authored-by: AlanBogarin <bogarin01alan@gmail.com> --- CHANGES/10099.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client_exceptions.py | 10 +++++++--- aiohttp/client_reqrep.py | 12 ++++++++---- aiohttp/connector.py | 14 +++++++++----- aiohttp/web.py | 10 +++++++--- aiohttp/web_runner.py | 12 +++++++----- aiohttp/worker.py | 15 ++++++++++----- docs/spelling_wordlist.txt | 1 + 9 files changed, 51 insertions(+), 25 deletions(-) create mode 100644 CHANGES/10099.bugfix.rst diff --git a/CHANGES/10099.bugfix.rst b/CHANGES/10099.bugfix.rst new file mode 100644 index 00000000000..718420a6ad5 --- /dev/null +++ b/CHANGES/10099.bugfix.rst @@ -0,0 +1 @@ +Fixed type of ``SSLContext`` for some static type checkers (e.g. pyright). diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c3abc66bebf..94d003a1719 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -9,6 +9,7 @@ Adam Mills Adrian Krupa Adrián Chaves Ahmed Tahri +Alan Bogarin Alan Tse Alec Hanefeld Alejandro Gómez diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 667da8d5084..1d298e9a8cf 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -8,13 +8,17 @@ from .typedefs import StrOrURL -try: +if TYPE_CHECKING: import ssl SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = SSLContext = None # type: ignore[assignment] +else: + try: + import ssl + SSLContext = ssl.SSLContext + except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e97c40ce0e5..43b48063c6e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -72,12 +72,16 @@ RawHeaders, ) -try: +if TYPE_CHECKING: import ssl from ssl import SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] +else: + try: + import ssl + from ssl import SSLContext + except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 93bc2513b20..a9123f82bc0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -60,14 +60,18 @@ ) from .resolver import DefaultResolver -try: +if TYPE_CHECKING: import ssl SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - +else: + try: + import ssl + + SSLContext = ssl.SSLContext + except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] EMPTY_SCHEMA_SET = frozenset({""}) HTTP_SCHEMA_SET = frozenset({"http", "https"}) diff --git a/aiohttp/web.py b/aiohttp/web.py index f975b665331..d6ab6f6fad4 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -9,6 +9,7 @@ from contextlib import suppress from importlib import import_module from typing import ( + TYPE_CHECKING, Any, Awaitable, Callable, @@ -287,10 +288,13 @@ ) -try: +if TYPE_CHECKING: from ssl import SSLContext -except ImportError: # pragma: no cover - SSLContext = Any # type: ignore[misc,assignment] +else: + try: + from ssl import SSLContext + except ImportError: # pragma: no cover + SSLContext = object # type: ignore[misc,assignment] # Only display warning when using -Wdefault, -We, -X dev or similar. warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index f8933383435..bcfec727c84 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -3,7 +3,7 @@ import socket import warnings from abc import ABC, abstractmethod -from typing import Any, List, Optional, Set +from typing import TYPE_CHECKING, Any, List, Optional, Set from yarl import URL @@ -11,11 +11,13 @@ from .web_app import Application from .web_server import Server -try: +if TYPE_CHECKING: from ssl import SSLContext -except ImportError: - SSLContext = object # type: ignore[misc,assignment] - +else: + try: + from ssl import SSLContext + except ImportError: # pragma: no cover + SSLContext = object # type: ignore[misc,assignment] __all__ = ( "BaseSite", diff --git a/aiohttp/worker.py b/aiohttp/worker.py index 9b307697336..8ed121ac955 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -6,7 +6,7 @@ import signal import sys from types import FrameType -from typing import Any, Awaitable, Callable, Optional, Union # noqa +from typing import TYPE_CHECKING, Any, Optional from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat from gunicorn.workers import base @@ -17,13 +17,18 @@ from .web_app import Application from .web_log import AccessLogger -try: +if TYPE_CHECKING: import ssl SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] +else: + try: + import ssl + + SSLContext = ssl.SSLContext + except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] __all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index a1f3d944584..c4e10b44987 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -245,6 +245,7 @@ py pydantic pyenv pyflakes +pyright pytest Pytest Quickstart From b770b1ac2cde2ec77d234a04d771df4f7c573626 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Sun, 8 Dec 2024 15:30:15 +0000 Subject: [PATCH 1073/1511] Fix type of SSLContext for some static type checkers (#10099) (#10145) (cherry picked from commit 6200513d8fd34a820a4d10d238ca92d9f73ce7ee) Co-authored-by: AlanBogarin <bogarin01alan@gmail.com> --- CHANGES/10099.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client_exceptions.py | 10 +++++++--- aiohttp/client_reqrep.py | 12 ++++++++---- aiohttp/connector.py | 14 +++++++++----- aiohttp/web.py | 10 +++++++--- aiohttp/web_runner.py | 12 +++++++----- aiohttp/worker.py | 15 ++++++++++----- docs/spelling_wordlist.txt | 1 + 9 files changed, 51 insertions(+), 25 deletions(-) create mode 100644 CHANGES/10099.bugfix.rst diff --git a/CHANGES/10099.bugfix.rst b/CHANGES/10099.bugfix.rst new file mode 100644 index 00000000000..718420a6ad5 --- /dev/null +++ b/CHANGES/10099.bugfix.rst @@ -0,0 +1 @@ +Fixed type of ``SSLContext`` for some static type checkers (e.g. pyright). diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 6adb3b97fb1..ded6c463e40 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -9,6 +9,7 @@ Adam Mills Adrian Krupa Adrián Chaves Ahmed Tahri +Alan Bogarin Alan Tse Alec Hanefeld Alejandro Gómez diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 667da8d5084..1d298e9a8cf 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -8,13 +8,17 @@ from .typedefs import StrOrURL -try: +if TYPE_CHECKING: import ssl SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = SSLContext = None # type: ignore[assignment] +else: + try: + import ssl + SSLContext = ssl.SSLContext + except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e97c40ce0e5..43b48063c6e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -72,12 +72,16 @@ RawHeaders, ) -try: +if TYPE_CHECKING: import ssl from ssl import SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] +else: + try: + import ssl + from ssl import SSLContext + except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 93bc2513b20..a9123f82bc0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -60,14 +60,18 @@ ) from .resolver import DefaultResolver -try: +if TYPE_CHECKING: import ssl SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - +else: + try: + import ssl + + SSLContext = ssl.SSLContext + except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] EMPTY_SCHEMA_SET = frozenset({""}) HTTP_SCHEMA_SET = frozenset({"http", "https"}) diff --git a/aiohttp/web.py b/aiohttp/web.py index f975b665331..d6ab6f6fad4 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -9,6 +9,7 @@ from contextlib import suppress from importlib import import_module from typing import ( + TYPE_CHECKING, Any, Awaitable, Callable, @@ -287,10 +288,13 @@ ) -try: +if TYPE_CHECKING: from ssl import SSLContext -except ImportError: # pragma: no cover - SSLContext = Any # type: ignore[misc,assignment] +else: + try: + from ssl import SSLContext + except ImportError: # pragma: no cover + SSLContext = object # type: ignore[misc,assignment] # Only display warning when using -Wdefault, -We, -X dev or similar. warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index f8933383435..bcfec727c84 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -3,7 +3,7 @@ import socket import warnings from abc import ABC, abstractmethod -from typing import Any, List, Optional, Set +from typing import TYPE_CHECKING, Any, List, Optional, Set from yarl import URL @@ -11,11 +11,13 @@ from .web_app import Application from .web_server import Server -try: +if TYPE_CHECKING: from ssl import SSLContext -except ImportError: - SSLContext = object # type: ignore[misc,assignment] - +else: + try: + from ssl import SSLContext + except ImportError: # pragma: no cover + SSLContext = object # type: ignore[misc,assignment] __all__ = ( "BaseSite", diff --git a/aiohttp/worker.py b/aiohttp/worker.py index 9b307697336..8ed121ac955 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -6,7 +6,7 @@ import signal import sys from types import FrameType -from typing import Any, Awaitable, Callable, Optional, Union # noqa +from typing import TYPE_CHECKING, Any, Optional from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat from gunicorn.workers import base @@ -17,13 +17,18 @@ from .web_app import Application from .web_log import AccessLogger -try: +if TYPE_CHECKING: import ssl SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] +else: + try: + import ssl + + SSLContext = ssl.SSLContext + except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] __all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index a1f3d944584..c4e10b44987 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -245,6 +245,7 @@ py pydantic pyenv pyflakes +pyright pytest Pytest Quickstart From 51cdda86baead26080f774c400f81492b62022f6 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 9 Dec 2024 20:12:25 +0000 Subject: [PATCH 1074/1511] Add host parameter to aiohttp_server fixture (#10120) (#10121) (#10150) Co-authored-by: ChristianWBrock <christian.brock AT posteo.net> (cherry picked from commit 7f8e2d35ad4d0d5fed82721060bafd1bafa264b8) Co-authored-by: christianwbrock <christian.brock@posteo.net> --- CHANGES/10120.feature.rst | 1 + aiohttp/pytest_plugin.py | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10120.feature.rst diff --git a/CHANGES/10120.feature.rst b/CHANGES/10120.feature.rst new file mode 100644 index 00000000000..98cee5650d6 --- /dev/null +++ b/CHANGES/10120.feature.rst @@ -0,0 +1 @@ +Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 7ce60faa4a4..158fd684b7a 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -301,9 +301,13 @@ def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: servers = [] async def go( - app: Application, *, port: Optional[int] = None, **kwargs: Any + app: Application, + *, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, ) -> TestServer: - server = TestServer(app, port=port) + server = TestServer(app, host=host, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) return server From 7f389138e7ac33687c7c486cde2d2056f9b0f6fe Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:50:34 +0100 Subject: [PATCH 1075/1511] [PR #10154/3f07b1a3 backport][3.11] Update StreamResponse.write annotation for strict-bytes (#10157) **This is a backport of PR #10154 as merged into master (3f07b1a38beffc350adbf51a605d27fe306de66c).** ## What do these changes do? Mypy will add a `--strict-bytes` flag. https://github.com/python/mypy/pull/18263 With that `bytearray` and `memoryview` are no longer subclasses of `bytes` and must be listed explicitly instead if they are supported. ## Are there changes in behavior for the user? -- ## Related issue number -- ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- CHANGES/10154.bugfix.rst | 1 + aiohttp/abc.py | 3 ++- aiohttp/http_writer.py | 8 ++++++-- aiohttp/web_response.py | 2 +- 4 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10154.bugfix.rst diff --git a/CHANGES/10154.bugfix.rst b/CHANGES/10154.bugfix.rst new file mode 100644 index 00000000000..382d9e56e6c --- /dev/null +++ b/CHANGES/10154.bugfix.rst @@ -0,0 +1 @@ +Updated :meth:`aiohttp.web.StreamResponse.write` annotation to also allow :class:`bytearray` and :class:`memoryview` as inputs -- by :user:`cdce8p`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index d6f9f782b0f..989f0a561ff 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -17,6 +17,7 @@ Optional, Tuple, TypedDict, + Union, ) from multidict import CIMultiDict @@ -200,7 +201,7 @@ class AbstractStreamWriter(ABC): length: Optional[int] = 0 @abstractmethod - async def write(self, chunk: bytes) -> None: + async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: """Write chunk into stream.""" @abstractmethod diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index edd19ed65da..28b14f7a791 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -72,7 +72,7 @@ def enable_compression( ) -> None: self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) - def _write(self, chunk: bytes) -> None: + def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: size = len(chunk) self.buffer_size += size self.output_size += size @@ -93,7 +93,11 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport.write(b"".join(chunks)) async def write( - self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 + self, + chunk: Union[bytes, bytearray, memoryview], + *, + drain: bool = True, + LIMIT: int = 0x10000, ) -> None: """Writes chunk of data to a stream. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index cd2be24f1a3..e498a905caf 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -537,7 +537,7 @@ async def _write_headers(self) -> None: status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) - async def write(self, data: bytes) -> None: + async def write(self, data: Union[bytes, bytearray, memoryview]) -> None: assert isinstance( data, (bytes, bytearray, memoryview) ), "data argument must be byte-ish (%r)" % type(data) From 5d9d83065c8ce9a4fb6548cf8d5993f793315010 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:50:56 +0100 Subject: [PATCH 1076/1511] [PR #10154/3f07b1a3 backport][3.12] Update StreamResponse.write annotation for strict-bytes (#10158) **This is a backport of PR #10154 as merged into master (3f07b1a38beffc350adbf51a605d27fe306de66c).** ## What do these changes do? Mypy will add a `--strict-bytes` flag. https://github.com/python/mypy/pull/18263 With that `bytearray` and `memoryview` are no longer subclasses of `bytes` and must be listed explicitly instead if they are supported. ## Are there changes in behavior for the user? -- ## Related issue number -- ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- CHANGES/10154.bugfix.rst | 1 + aiohttp/abc.py | 3 ++- aiohttp/http_writer.py | 8 ++++++-- aiohttp/web_response.py | 2 +- 4 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10154.bugfix.rst diff --git a/CHANGES/10154.bugfix.rst b/CHANGES/10154.bugfix.rst new file mode 100644 index 00000000000..382d9e56e6c --- /dev/null +++ b/CHANGES/10154.bugfix.rst @@ -0,0 +1 @@ +Updated :meth:`aiohttp.web.StreamResponse.write` annotation to also allow :class:`bytearray` and :class:`memoryview` as inputs -- by :user:`cdce8p`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index d6f9f782b0f..989f0a561ff 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -17,6 +17,7 @@ Optional, Tuple, TypedDict, + Union, ) from multidict import CIMultiDict @@ -200,7 +201,7 @@ class AbstractStreamWriter(ABC): length: Optional[int] = 0 @abstractmethod - async def write(self, chunk: bytes) -> None: + async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: """Write chunk into stream.""" @abstractmethod diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index edd19ed65da..28b14f7a791 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -72,7 +72,7 @@ def enable_compression( ) -> None: self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) - def _write(self, chunk: bytes) -> None: + def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: size = len(chunk) self.buffer_size += size self.output_size += size @@ -93,7 +93,11 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport.write(b"".join(chunks)) async def write( - self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 + self, + chunk: Union[bytes, bytearray, memoryview], + *, + drain: bool = True, + LIMIT: int = 0x10000, ) -> None: """Writes chunk of data to a stream. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index cd2be24f1a3..e498a905caf 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -537,7 +537,7 @@ async def _write_headers(self) -> None: status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) - async def write(self, data: bytes) -> None: + async def write(self, data: Union[bytes, bytearray, memoryview]) -> None: assert isinstance( data, (bytes, bytearray, memoryview) ), "data argument must be byte-ish (%r)" % type(data) From f38d51ca28e504cc8637093d53734abc998777ab Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:38:44 +0000 Subject: [PATCH 1077/1511] [PR #10146/a818e51c backport][3.12] Raise `TypeError` when setting `StreamResponse.last_modified` to an unsupported type (#10161) --- CHANGES/10146.misc.rst | 1 + aiohttp/web_response.py | 3 +++ tests/test_web_response.py | 7 +++++++ 3 files changed, 11 insertions(+) create mode 100644 CHANGES/10146.misc.rst diff --git a/CHANGES/10146.misc.rst b/CHANGES/10146.misc.rst new file mode 100644 index 00000000000..bee4ef68fb3 --- /dev/null +++ b/CHANGES/10146.misc.rst @@ -0,0 +1 @@ +Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e498a905caf..e2351ddf7b7 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -370,6 +370,9 @@ def last_modified( ) elif isinstance(value, str): self._headers[hdrs.LAST_MODIFIED] = value + else: + msg = f"Unsupported type for last_modified: {type(value).__name__}" + raise TypeError(msg) @property def etag(self) -> Optional[ETag]: diff --git a/tests/test_web_response.py b/tests/test_web_response.py index f4acf23f61b..24743e64635 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -255,6 +255,13 @@ def test_last_modified_reset() -> None: assert resp.last_modified is None +def test_last_modified_invalid_type() -> None: + resp = StreamResponse() + + with pytest.raises(TypeError, match="Unsupported type for last_modified: object"): + resp.last_modified = object() # type: ignore[assignment] + + @pytest.mark.parametrize( ["header_val", "expected"], [ From 99a373737135d924180121ca01500e479b52ba96 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:42:06 +0100 Subject: [PATCH 1078/1511] [PR #10156/00700458 backport][3.12] Add ALPN extension to client SSL Context (#10164) **This is a backport of PR #10156 as merged into master (00700458eb7741f15861a8616dbf77a0d82dc31f).** ## What do these changes do? Add "http/1.1" ALPN extension to aiohttp client's SSL Context. ## Are there changes in behavior for the user? ## Is it a substantial burden for the maintainers to support this? ## Related issue number Fixes #10152 ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [ ] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder Co-authored-by: Cycloctane <Octane-vs@outlook.com> --- CHANGES/10156.feature.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 18 ++++++++++-------- tests/test_client_functional.py | 24 ++++++++++++++++++++++++ 4 files changed, 38 insertions(+), 8 deletions(-) create mode 100644 CHANGES/10156.feature.rst diff --git a/CHANGES/10156.feature.rst b/CHANGES/10156.feature.rst new file mode 100644 index 00000000000..0ff6b6b8bd8 --- /dev/null +++ b/CHANGES/10156.feature.rst @@ -0,0 +1,3 @@ +Enabled ALPN on default SSL contexts. This improves compatibility with some +proxies which don't work without this extension. +-- by :user:`Cycloctane`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 94d003a1719..035436c0426 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -366,6 +366,7 @@ William S. Wilson Ong wouter bolsterlee Xavier Halloran +Xi Rui Xiang Li Yang Zhou Yannick Koechlin diff --git a/aiohttp/connector.py b/aiohttp/connector.py index a9123f82bc0..7e0986df657 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -780,14 +780,16 @@ def _make_ssl_context(verified: bool) -> SSLContext: # No ssl support return None if verified: - return ssl.create_default_context() - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - sslcontext.options |= ssl.OP_NO_COMPRESSION - sslcontext.set_default_verify_paths() + sslcontext = ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + sslcontext.set_alpn_protocols(("http/1.1",)) return sslcontext diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index b34ccdb600d..05af9ae25ad 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -603,6 +603,30 @@ async def handler(request): assert txt == "Test message" +async def test_ssl_client_alpn( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + ssl_ctx: ssl.SSLContext, +) -> None: + + async def handler(request: web.Request) -> web.Response: + assert request.transport is not None + sslobj = request.transport.get_extra_info("ssl_object") + return web.Response(text=sslobj.selected_alpn_protocol()) + + app = web.Application() + app.router.add_route("GET", "/", handler) + ssl_ctx.set_alpn_protocols(("http/1.1",)) + server = await aiohttp_server(app, ssl=ssl_ctx) + + connector = aiohttp.TCPConnector(ssl=False) + client = await aiohttp_client(server, connector=connector) + resp = await client.get("/") + assert resp.status == 200 + txt = await resp.text() + assert txt == "http/1.1" + + async def test_tcp_connector_fingerprint_ok( aiohttp_server, aiohttp_client, From 3680479df7b72d1fcef1c76abf80727553fe573f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:42:26 +0100 Subject: [PATCH 1079/1511] [PR #10156/00700458 backport][3.11] Add ALPN extension to client SSL Context (#10163) **This is a backport of PR #10156 as merged into master (00700458eb7741f15861a8616dbf77a0d82dc31f).** ## What do these changes do? Add "http/1.1" ALPN extension to aiohttp client's SSL Context. ## Are there changes in behavior for the user? ## Is it a substantial burden for the maintainers to support this? ## Related issue number Fixes #10152 ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [ ] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder Co-authored-by: Cycloctane <Octane-vs@outlook.com> --- CHANGES/10156.feature.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 18 ++++++++++-------- tests/test_client_functional.py | 24 ++++++++++++++++++++++++ 4 files changed, 38 insertions(+), 8 deletions(-) create mode 100644 CHANGES/10156.feature.rst diff --git a/CHANGES/10156.feature.rst b/CHANGES/10156.feature.rst new file mode 100644 index 00000000000..0ff6b6b8bd8 --- /dev/null +++ b/CHANGES/10156.feature.rst @@ -0,0 +1,3 @@ +Enabled ALPN on default SSL contexts. This improves compatibility with some +proxies which don't work without this extension. +-- by :user:`Cycloctane`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index ded6c463e40..5acc4de44fc 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -365,6 +365,7 @@ William S. Wilson Ong wouter bolsterlee Xavier Halloran +Xi Rui Xiang Li Yang Zhou Yannick Koechlin diff --git a/aiohttp/connector.py b/aiohttp/connector.py index a9123f82bc0..7e0986df657 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -780,14 +780,16 @@ def _make_ssl_context(verified: bool) -> SSLContext: # No ssl support return None if verified: - return ssl.create_default_context() - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - sslcontext.options |= ssl.OP_NO_COMPRESSION - sslcontext.set_default_verify_paths() + sslcontext = ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + sslcontext.set_alpn_protocols(("http/1.1",)) return sslcontext diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index b34ccdb600d..05af9ae25ad 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -603,6 +603,30 @@ async def handler(request): assert txt == "Test message" +async def test_ssl_client_alpn( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + ssl_ctx: ssl.SSLContext, +) -> None: + + async def handler(request: web.Request) -> web.Response: + assert request.transport is not None + sslobj = request.transport.get_extra_info("ssl_object") + return web.Response(text=sslobj.selected_alpn_protocol()) + + app = web.Application() + app.router.add_route("GET", "/", handler) + ssl_ctx.set_alpn_protocols(("http/1.1",)) + server = await aiohttp_server(app, ssl=ssl_ctx) + + connector = aiohttp.TCPConnector(ssl=False) + client = await aiohttp_client(server, connector=connector) + resp = await client.get("/") + assert resp.status == 200 + txt = await resp.text() + assert txt == "http/1.1" + + async def test_tcp_connector_fingerprint_ok( aiohttp_server, aiohttp_client, From c80be677c02d9bb63b4e96a3fd5b8d29bd11f0c6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 18:37:44 +0000 Subject: [PATCH 1080/1511] [PR #10151/7c12b1a9 backport][3.11] Fix infinite callback loop when time is not moving forward (#10173) Co-authored-by: Bruce Merry <1963944+bmerry@users.noreply.github.com> Fixes #123'). --> Fixes #10149. --- CHANGES/10149.misc.rst | 4 ++++ aiohttp/web_protocol.py | 2 +- tests/test_web_functional.py | 38 ++++++++++++++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10149.misc.rst diff --git a/CHANGES/10149.misc.rst b/CHANGES/10149.misc.rst new file mode 100644 index 00000000000..61765a50fcf --- /dev/null +++ b/CHANGES/10149.misc.rst @@ -0,0 +1,4 @@ +Fixed an infinite loop that can occur when using aiohttp in combination +with `async-solipsism`_ -- by :user:`bmerry`. + +.. _async-solipsism: https://github.com/bmerry/async-solipsism diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index e8bb41abf97..3306b86bded 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -458,7 +458,7 @@ def _process_keepalive(self) -> None: loop = self._loop now = loop.time() close_time = self._next_keepalive_close_time - if now <= close_time: + if now < close_time: # Keep alive close check fired too early, reschedule self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) return diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index a3a990141a1..e4979851300 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -2324,3 +2324,41 @@ async def handler(request: web.Request) -> web.Response: # Make 2nd request which will hit the race condition. async with client.get("/") as resp: assert resp.status == 200 + + +async def test_keepalive_expires_on_time(aiohttp_client: AiohttpClient) -> None: + """Test that the keepalive handle expires on time.""" + + async def handler(request: web.Request) -> web.Response: + body = await request.read() + assert b"" == body + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_route("GET", "/", handler) + + connector = aiohttp.TCPConnector(limit=1) + client = await aiohttp_client(app, connector=connector) + + loop = asyncio.get_running_loop() + now = loop.time() + + # Patch loop time so we can control when the keepalive timeout is processed + with mock.patch.object(loop, "time") as loop_time_mock: + loop_time_mock.return_value = now + resp1 = await client.get("/") + await resp1.read() + request_handler = client.server.handler.connections[0] + + # Ensure the keep alive handle is set + assert request_handler._keepalive_handle is not None + + # Set the loop time to exactly the keepalive timeout + loop_time_mock.return_value = request_handler._next_keepalive_close_time + + # sleep twice to ensure the keep alive timeout is processed + await asyncio.sleep(0) + await asyncio.sleep(0) + + # Ensure the keep alive handle expires + assert request_handler._keepalive_handle is None From 5afac5580cd40f1b20b56cc5892aa9c6125a482a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 18:45:53 +0000 Subject: [PATCH 1081/1511] [PR #10151/7c12b1a9 backport][3.12] Fix infinite callback loop when time is not moving forward (#10174) Co-authored-by: Bruce Merry <1963944+bmerry@users.noreply.github.com> Fixes #123'). --> Fixes #10149. --- CHANGES/10149.misc.rst | 4 ++++ aiohttp/web_protocol.py | 2 +- tests/test_web_functional.py | 38 ++++++++++++++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10149.misc.rst diff --git a/CHANGES/10149.misc.rst b/CHANGES/10149.misc.rst new file mode 100644 index 00000000000..61765a50fcf --- /dev/null +++ b/CHANGES/10149.misc.rst @@ -0,0 +1,4 @@ +Fixed an infinite loop that can occur when using aiohttp in combination +with `async-solipsism`_ -- by :user:`bmerry`. + +.. _async-solipsism: https://github.com/bmerry/async-solipsism diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index e8bb41abf97..3306b86bded 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -458,7 +458,7 @@ def _process_keepalive(self) -> None: loop = self._loop now = loop.time() close_time = self._next_keepalive_close_time - if now <= close_time: + if now < close_time: # Keep alive close check fired too early, reschedule self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) return diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index a3a990141a1..e4979851300 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -2324,3 +2324,41 @@ async def handler(request: web.Request) -> web.Response: # Make 2nd request which will hit the race condition. async with client.get("/") as resp: assert resp.status == 200 + + +async def test_keepalive_expires_on_time(aiohttp_client: AiohttpClient) -> None: + """Test that the keepalive handle expires on time.""" + + async def handler(request: web.Request) -> web.Response: + body = await request.read() + assert b"" == body + return web.Response(body=b"OK") + + app = web.Application() + app.router.add_route("GET", "/", handler) + + connector = aiohttp.TCPConnector(limit=1) + client = await aiohttp_client(app, connector=connector) + + loop = asyncio.get_running_loop() + now = loop.time() + + # Patch loop time so we can control when the keepalive timeout is processed + with mock.patch.object(loop, "time") as loop_time_mock: + loop_time_mock.return_value = now + resp1 = await client.get("/") + await resp1.read() + request_handler = client.server.handler.connections[0] + + # Ensure the keep alive handle is set + assert request_handler._keepalive_handle is not None + + # Set the loop time to exactly the keepalive timeout + loop_time_mock.return_value = request_handler._next_keepalive_close_time + + # sleep twice to ensure the keep alive timeout is processed + await asyncio.sleep(0) + await asyncio.sleep(0) + + # Ensure the keep alive handle expires + assert request_handler._keepalive_handle is None From 8c96a62fca08313431afa6ff56f35626c1b97e8d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 17 Dec 2024 11:50:53 -0700 Subject: [PATCH 1082/1511] [PR #10093/7b5d54a backport][3.11] Use `quote_cookie` setting from ClientSession's cookiejar in `tmp_cookie_jar` (#10175) Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: Cycloctane <Octane-vs@outlook.com> --- CHANGES/10093.bugfix.rst | 2 ++ aiohttp/abc.py | 5 +++++ aiohttp/client.py | 4 +++- aiohttp/cookiejar.py | 8 ++++++++ tests/test_client_session.py | 23 ++++++++++++++++++++--- tests/test_cookiejar.py | 1 + 6 files changed, 39 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10093.bugfix.rst diff --git a/CHANGES/10093.bugfix.rst b/CHANGES/10093.bugfix.rst new file mode 100644 index 00000000000..4d7076115d9 --- /dev/null +++ b/CHANGES/10093.bugfix.rst @@ -0,0 +1,2 @@ +Update :py:meth:`~aiohttp.ClientSession.request` to reuse the ``quote_cookie`` setting from ``ClientSession._cookie_jar`` when processing cookies parameter. +-- by :user:`Cycloctane`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 989f0a561ff..5794a9108b0 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -176,6 +176,11 @@ class AbstractCookieJar(Sized, IterableBase): def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: self._loop = loop or asyncio.get_running_loop() + @property + @abstractmethod + def quote_cookie(self) -> bool: + """Return True if cookies should be quoted.""" + @abstractmethod def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: """Clear all cookies if no predicate is passed.""" diff --git a/aiohttp/client.py b/aiohttp/client.py index e04a6ff989a..3b1dc08544f 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -658,7 +658,9 @@ async def _request( all_cookies = self._cookie_jar.filter_cookies(url) if cookies is not None: - tmp_cookie_jar = CookieJar() + tmp_cookie_jar = CookieJar( + quote_cookie=self._cookie_jar.quote_cookie + ) tmp_cookie_jar.update_cookies(cookies) req_cookies = tmp_cookie_jar.filter_cookies(url) if req_cookies: diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index ef04bda5ad6..f6b9a921767 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -117,6 +117,10 @@ def __init__( self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] self._expirations: Dict[Tuple[str, str, str], float] = {} + @property + def quote_cookie(self) -> bool: + return self._quote_cookie + def save(self, file_path: PathLike) -> None: file_path = pathlib.Path(file_path) with file_path.open(mode="wb") as f: @@ -474,6 +478,10 @@ def __iter__(self) -> "Iterator[Morsel[str]]": def __len__(self) -> int: return 0 + @property + def quote_cookie(self) -> bool: + return True + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: pass diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 65f80b6abe9..6309c5daf2e 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -15,13 +15,14 @@ from yarl import URL import aiohttp -from aiohttp import client, hdrs, web +from aiohttp import CookieJar, client, hdrs, web from aiohttp.client import ClientSession from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG from aiohttp.http import RawResponseMessage +from aiohttp.pytest_plugin import AiohttpServer from aiohttp.test_utils import make_mocked_coro from aiohttp.tracing import Trace @@ -634,8 +635,24 @@ async def handler(request): assert resp_cookies["response"].value == "resp_value" -async def test_session_default_version(loop) -> None: - session = aiohttp.ClientSession(loop=loop) +async def test_cookies_with_not_quoted_cookie_jar( + aiohttp_server: AiohttpServer, +) -> None: + async def handler(_: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + server = await aiohttp_server(app) + jar = CookieJar(quote_cookie=False) + cookies = {"name": "val=foobar"} + async with aiohttp.ClientSession(cookie_jar=jar) as sess: + resp = await sess.request("GET", server.make_url("/"), cookies=cookies) + assert resp.request_info.headers.get("Cookie", "") == "name=val=foobar" + + +async def test_session_default_version(loop: asyncio.AbstractEventLoop) -> None: + session = aiohttp.ClientSession() assert session.version == aiohttp.HttpVersion11 await session.close() diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index bdcf54fa796..0b440bc2ca6 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -807,6 +807,7 @@ async def make_jar(): async def test_dummy_cookie_jar() -> None: cookie = SimpleCookie("foo=bar; Domain=example.com;") dummy_jar = DummyCookieJar() + assert dummy_jar.quote_cookie is True assert len(dummy_jar) == 0 dummy_jar.update_cookies(cookie) assert len(dummy_jar) == 0 From 86e21404c6681704f45aa31401959053a1b3b0df Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 17 Dec 2024 12:11:55 -0700 Subject: [PATCH 1083/1511] [PR #10093/7b5d54a backport][3.12] Use `quote_cookie` setting from ClientSession's cookiejar in `tmp_cookie_jar` (#10176) Co-authored-by: pre-commit-ci[bot] Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: Cycloctane <Octane-vs@outlook.com> --- CHANGES/10093.bugfix.rst | 2 ++ aiohttp/abc.py | 5 +++++ aiohttp/client.py | 4 +++- aiohttp/cookiejar.py | 8 ++++++++ tests/test_client_session.py | 23 ++++++++++++++++++++--- tests/test_cookiejar.py | 1 + 6 files changed, 39 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10093.bugfix.rst diff --git a/CHANGES/10093.bugfix.rst b/CHANGES/10093.bugfix.rst new file mode 100644 index 00000000000..4d7076115d9 --- /dev/null +++ b/CHANGES/10093.bugfix.rst @@ -0,0 +1,2 @@ +Update :py:meth:`~aiohttp.ClientSession.request` to reuse the ``quote_cookie`` setting from ``ClientSession._cookie_jar`` when processing cookies parameter. +-- by :user:`Cycloctane`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 989f0a561ff..5794a9108b0 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -176,6 +176,11 @@ class AbstractCookieJar(Sized, IterableBase): def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: self._loop = loop or asyncio.get_running_loop() + @property + @abstractmethod + def quote_cookie(self) -> bool: + """Return True if cookies should be quoted.""" + @abstractmethod def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: """Clear all cookies if no predicate is passed.""" diff --git a/aiohttp/client.py b/aiohttp/client.py index 7539310aa8a..fbf691e89d1 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -656,7 +656,9 @@ async def _request( all_cookies = self._cookie_jar.filter_cookies(url) if cookies is not None: - tmp_cookie_jar = CookieJar() + tmp_cookie_jar = CookieJar( + quote_cookie=self._cookie_jar.quote_cookie + ) tmp_cookie_jar.update_cookies(cookies) req_cookies = tmp_cookie_jar.filter_cookies(url) if req_cookies: diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index ef04bda5ad6..f6b9a921767 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -117,6 +117,10 @@ def __init__( self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] self._expirations: Dict[Tuple[str, str, str], float] = {} + @property + def quote_cookie(self) -> bool: + return self._quote_cookie + def save(self, file_path: PathLike) -> None: file_path = pathlib.Path(file_path) with file_path.open(mode="wb") as f: @@ -474,6 +478,10 @@ def __iter__(self) -> "Iterator[Morsel[str]]": def __len__(self) -> int: return 0 + @property + def quote_cookie(self) -> bool: + return True + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: pass diff --git a/tests/test_client_session.py b/tests/test_client_session.py index a2c4833b83e..548af5db551 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -15,13 +15,14 @@ from yarl import URL import aiohttp -from aiohttp import client, hdrs, web +from aiohttp import CookieJar, client, hdrs, web from aiohttp.client import ClientSession from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import BaseConnector, Connection, TCPConnector, UnixConnector from aiohttp.helpers import DEBUG from aiohttp.http import RawResponseMessage +from aiohttp.pytest_plugin import AiohttpServer from aiohttp.test_utils import make_mocked_coro from aiohttp.tracing import Trace @@ -634,8 +635,24 @@ async def handler(request): assert resp_cookies["response"].value == "resp_value" -async def test_session_default_version(loop) -> None: - session = aiohttp.ClientSession(loop=loop) +async def test_cookies_with_not_quoted_cookie_jar( + aiohttp_server: AiohttpServer, +) -> None: + async def handler(_: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + server = await aiohttp_server(app) + jar = CookieJar(quote_cookie=False) + cookies = {"name": "val=foobar"} + async with aiohttp.ClientSession(cookie_jar=jar) as sess: + resp = await sess.request("GET", server.make_url("/"), cookies=cookies) + assert resp.request_info.headers.get("Cookie", "") == "name=val=foobar" + + +async def test_session_default_version(loop: asyncio.AbstractEventLoop) -> None: + session = aiohttp.ClientSession() assert session.version == aiohttp.HttpVersion11 await session.close() diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index bdcf54fa796..0b440bc2ca6 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -807,6 +807,7 @@ async def make_jar(): async def test_dummy_cookie_jar() -> None: cookie = SimpleCookie("foo=bar; Domain=example.com;") dummy_jar = DummyCookieJar() + assert dummy_jar.quote_cookie is True assert len(dummy_jar) == 0 dummy_jar.update_cookies(cookie) assert len(dummy_jar) == 0 From d7e4e61607fb3e16ea2e93d4044b0538bae22d26 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 12:01:32 +0000 Subject: [PATCH 1084/1511] [PR #10172/e45c3b8e backport][3.12] Ensure Response is True even when map is empty (#10177) **This is a backport of PR #10172 as merged into master (e45c3b8ea68e879493e6593d1097155ecc1878e2).** Fixes #10119 Technically a breaking change, but I can't imagine anyone depending on this. Co-authored-by: Robin <robin@rkslot.nl> --- CHANGES/10119.bugfix.rst | 1 + aiohttp/web_response.py | 3 +++ tests/test_web_request.py | 1 + tests/test_web_response.py | 1 + 4 files changed, 6 insertions(+) create mode 100644 CHANGES/10119.bugfix.rst diff --git a/CHANGES/10119.bugfix.rst b/CHANGES/10119.bugfix.rst new file mode 100644 index 00000000000..86d2511f5b5 --- /dev/null +++ b/CHANGES/10119.bugfix.rst @@ -0,0 +1 @@ +Response is now always True, instead of using MutableMapping behaviour (False when map is empty) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e2351ddf7b7..99636f2de59 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -609,6 +609,9 @@ def __hash__(self) -> int: def __eq__(self, other: object) -> bool: return self is other + def __bool__(self) -> bool: + return True + class Response(StreamResponse): diff --git a/tests/test_web_request.py b/tests/test_web_request.py index c22b3b17921..6c9e3826d73 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -311,6 +311,7 @@ def test_match_info() -> None: def test_request_is_mutable_mapping() -> None: req = make_mocked_request("GET", "/") assert isinstance(req, MutableMapping) + assert req # even when the MutableMapping is empty, request should always be True req["key"] = "value" assert "value" == req["key"] diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 24743e64635..1e65f7364b6 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -106,6 +106,7 @@ def test_stream_response_eq() -> None: def test_stream_response_is_mutable_mapping() -> None: resp = StreamResponse() assert isinstance(resp, collections.abc.MutableMapping) + assert resp # even when the MutableMapping is empty, response should always be True resp["key"] = "value" assert "value" == resp["key"] From db56d743b2f11a8889938da4f044e73c0ad4bd30 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 18 Dec 2024 08:55:12 -1000 Subject: [PATCH 1085/1511] [PR #10171/5185f93 backport][3.11] Stream unpauses protocol before releasing connection (#10179) Co-authored-by: Javier Torres <javier@javiertorres.eu> --- CHANGES/10169.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/streams.py | 3 +++ tests/test_flowcontrol_streams.py | 23 +++++++++++++++++++++++ 4 files changed, 30 insertions(+) create mode 100644 CHANGES/10169.bugfix.rst diff --git a/CHANGES/10169.bugfix.rst b/CHANGES/10169.bugfix.rst new file mode 100644 index 00000000000..32e06783856 --- /dev/null +++ b/CHANGES/10169.bugfix.rst @@ -0,0 +1,3 @@ +Fixed a hang where a connection previously used for a streaming +download could be returned to the pool in a paused state. +-- by :user:`javitonino`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 5acc4de44fc..589784b29cb 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -171,6 +171,7 @@ Jan Buchar Jan Gosmann Jarno Elonen Jashandeep Sohi +Javier Torres Jean-Baptiste Estival Jens Steinhauser Jeonghun Lee diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 029d577b88c..6126fb5695d 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -220,6 +220,9 @@ def feed_eof(self) -> None: self._eof_waiter = None set_result(waiter, None) + if self._protocol._reading_paused: + self._protocol.resume_reading() + for cb in self._eof_callbacks: try: cb() diff --git a/tests/test_flowcontrol_streams.py b/tests/test_flowcontrol_streams.py index 68e623b6dd7..9874cc2511e 100644 --- a/tests/test_flowcontrol_streams.py +++ b/tests/test_flowcontrol_streams.py @@ -4,6 +4,7 @@ import pytest from aiohttp import streams +from aiohttp.base_protocol import BaseProtocol @pytest.fixture @@ -112,6 +113,15 @@ async def test_read_nowait(self, stream) -> None: assert res == b"" assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] + async def test_resumed_on_eof(self, stream: streams.StreamReader) -> None: + stream.feed_data(b"data") + assert stream._protocol.pause_reading.call_count == 1 # type: ignore[attr-defined] + assert stream._protocol.resume_reading.call_count == 0 # type: ignore[attr-defined] + stream._protocol._reading_paused = True + + stream.feed_eof() + assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] + async def test_flow_control_data_queue_waiter_cancelled( buffer: streams.FlowControlDataQueue, @@ -180,3 +190,16 @@ async def test_flow_control_data_queue_read_eof( buffer.feed_eof() with pytest.raises(streams.EofStream): await buffer.read() + + +async def test_stream_reader_eof_when_full() -> None: + loop = asyncio.get_event_loop() + protocol = BaseProtocol(loop=loop) + protocol.transport = asyncio.Transport() + stream = streams.StreamReader(protocol, 1024, loop=loop) + + data_len = stream._high_water + 1 + stream.feed_data(b"0" * data_len) + assert protocol._reading_paused + stream.feed_eof() + assert not protocol._reading_paused From a3a57167c918af1d1f1323177760e3ba8abcca5c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 18 Dec 2024 08:58:57 -1000 Subject: [PATCH 1086/1511] [PR #10171/5185f93 backport][3.12] Stream unpauses protocol before releasing connection (#10180) Co-authored-by: Javier Torres <javier@javiertorres.eu> --- CHANGES/10169.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/streams.py | 3 +++ tests/test_flowcontrol_streams.py | 23 +++++++++++++++++++++++ 4 files changed, 30 insertions(+) create mode 100644 CHANGES/10169.bugfix.rst diff --git a/CHANGES/10169.bugfix.rst b/CHANGES/10169.bugfix.rst new file mode 100644 index 00000000000..32e06783856 --- /dev/null +++ b/CHANGES/10169.bugfix.rst @@ -0,0 +1,3 @@ +Fixed a hang where a connection previously used for a streaming +download could be returned to the pool in a paused state. +-- by :user:`javitonino`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 035436c0426..930815d8b62 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -172,6 +172,7 @@ Jan Buchar Jan Gosmann Jarno Elonen Jashandeep Sohi +Javier Torres Jean-Baptiste Estival Jens Steinhauser Jeonghun Lee diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 029d577b88c..6126fb5695d 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -220,6 +220,9 @@ def feed_eof(self) -> None: self._eof_waiter = None set_result(waiter, None) + if self._protocol._reading_paused: + self._protocol.resume_reading() + for cb in self._eof_callbacks: try: cb() diff --git a/tests/test_flowcontrol_streams.py b/tests/test_flowcontrol_streams.py index 68e623b6dd7..9874cc2511e 100644 --- a/tests/test_flowcontrol_streams.py +++ b/tests/test_flowcontrol_streams.py @@ -4,6 +4,7 @@ import pytest from aiohttp import streams +from aiohttp.base_protocol import BaseProtocol @pytest.fixture @@ -112,6 +113,15 @@ async def test_read_nowait(self, stream) -> None: assert res == b"" assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] + async def test_resumed_on_eof(self, stream: streams.StreamReader) -> None: + stream.feed_data(b"data") + assert stream._protocol.pause_reading.call_count == 1 # type: ignore[attr-defined] + assert stream._protocol.resume_reading.call_count == 0 # type: ignore[attr-defined] + stream._protocol._reading_paused = True + + stream.feed_eof() + assert stream._protocol.resume_reading.call_count == 1 # type: ignore[attr-defined] + async def test_flow_control_data_queue_waiter_cancelled( buffer: streams.FlowControlDataQueue, @@ -180,3 +190,16 @@ async def test_flow_control_data_queue_read_eof( buffer.feed_eof() with pytest.raises(streams.EofStream): await buffer.read() + + +async def test_stream_reader_eof_when_full() -> None: + loop = asyncio.get_event_loop() + protocol = BaseProtocol(loop=loop) + protocol.transport = asyncio.Transport() + stream = streams.StreamReader(protocol, 1024, loop=loop) + + data_len = stream._high_water + 1 + stream.feed_data(b"0" * data_len) + assert protocol._reading_paused + stream.feed_eof() + assert not protocol._reading_paused From 8aaaba3ec798327ab5ab52c977fb7395b56c54a4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 18 Dec 2024 09:35:05 -1000 Subject: [PATCH 1087/1511] Release 3.11.11 (#10181) --- CHANGES.rst | 74 +++++++++++++++++++++++++++++++++++++++ CHANGES/10093.bugfix.rst | 2 -- CHANGES/10099.bugfix.rst | 1 - CHANGES/10149.misc.rst | 4 --- CHANGES/10154.bugfix.rst | 1 - CHANGES/10156.feature.rst | 3 -- CHANGES/10169.bugfix.rst | 3 -- aiohttp/__init__.py | 2 +- 8 files changed, 75 insertions(+), 15 deletions(-) delete mode 100644 CHANGES/10093.bugfix.rst delete mode 100644 CHANGES/10099.bugfix.rst delete mode 100644 CHANGES/10149.misc.rst delete mode 100644 CHANGES/10154.bugfix.rst delete mode 100644 CHANGES/10156.feature.rst delete mode 100644 CHANGES/10169.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 586d70c9697..b07cec6a093 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,80 @@ .. towncrier release notes start +3.11.11 (2024-12-18) +==================== + +Bug fixes +--------- + +- Updated :py:meth:`~aiohttp.ClientSession.request` to reuse the ``quote_cookie`` setting from ``ClientSession._cookie_jar`` when processing cookies parameter. + -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`10093`. + + + +- Fixed type of ``SSLContext`` for some static type checkers (e.g. pyright). + + + *Related issues and pull requests on GitHub:* + :issue:`10099`. + + + +- Updated :meth:`aiohttp.web.StreamResponse.write` annotation to also allow :class:`bytearray` and :class:`memoryview` as inputs -- by :user:`cdce8p`. + + + *Related issues and pull requests on GitHub:* + :issue:`10154`. + + + +- Fixed a hang where a connection previously used for a streaming + download could be returned to the pool in a paused state. + -- by :user:`javitonino`. + + + *Related issues and pull requests on GitHub:* + :issue:`10169`. + + + + +Features +-------- + +- Enabled ALPN on default SSL contexts. This improves compatibility with some + proxies which don't work without this extension. + -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`10156`. + + + + +Miscellaneous internal changes +------------------------------ + +- Fixed an infinite loop that can occur when using aiohttp in combination + with `async-solipsism`_ -- by :user:`bmerry`. + + .. _async-solipsism: https://github.com/bmerry/async-solipsism + + + *Related issues and pull requests on GitHub:* + :issue:`10149`. + + + + +---- + + 3.11.10 (2024-12-05) ==================== diff --git a/CHANGES/10093.bugfix.rst b/CHANGES/10093.bugfix.rst deleted file mode 100644 index 4d7076115d9..00000000000 --- a/CHANGES/10093.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update :py:meth:`~aiohttp.ClientSession.request` to reuse the ``quote_cookie`` setting from ``ClientSession._cookie_jar`` when processing cookies parameter. --- by :user:`Cycloctane`. diff --git a/CHANGES/10099.bugfix.rst b/CHANGES/10099.bugfix.rst deleted file mode 100644 index 718420a6ad5..00000000000 --- a/CHANGES/10099.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed type of ``SSLContext`` for some static type checkers (e.g. pyright). diff --git a/CHANGES/10149.misc.rst b/CHANGES/10149.misc.rst deleted file mode 100644 index 61765a50fcf..00000000000 --- a/CHANGES/10149.misc.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fixed an infinite loop that can occur when using aiohttp in combination -with `async-solipsism`_ -- by :user:`bmerry`. - -.. _async-solipsism: https://github.com/bmerry/async-solipsism diff --git a/CHANGES/10154.bugfix.rst b/CHANGES/10154.bugfix.rst deleted file mode 100644 index 382d9e56e6c..00000000000 --- a/CHANGES/10154.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Updated :meth:`aiohttp.web.StreamResponse.write` annotation to also allow :class:`bytearray` and :class:`memoryview` as inputs -- by :user:`cdce8p`. diff --git a/CHANGES/10156.feature.rst b/CHANGES/10156.feature.rst deleted file mode 100644 index 0ff6b6b8bd8..00000000000 --- a/CHANGES/10156.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Enabled ALPN on default SSL contexts. This improves compatibility with some -proxies which don't work without this extension. --- by :user:`Cycloctane`. diff --git a/CHANGES/10169.bugfix.rst b/CHANGES/10169.bugfix.rst deleted file mode 100644 index 32e06783856..00000000000 --- a/CHANGES/10169.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed a hang where a connection previously used for a streaming -download could be returned to the pool in a paused state. --- by :user:`javitonino`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f4d732b8674..b9af3f829f7 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.11.dev0" +__version__ = "3.11.11" from typing import TYPE_CHECKING, Tuple From 47b8b576133212b9e94a5c98786f279d010d85f0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 18 Dec 2024 11:54:28 -1000 Subject: [PATCH 1088/1511] Increment version to 3.11.12.dev0 (#10183) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index b9af3f829f7..0c3c65fdbe1 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.11" +__version__ = "3.11.12.dev0" from typing import TYPE_CHECKING, Tuple From a438bd3afc7834d33016dee747e29732fe0da841 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 22 Dec 2024 22:41:28 +0000 Subject: [PATCH 1089/1511] Bump actions/cache from 4.1.2 to 4.2.0 (#10190) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.1.2 to 4.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.2.0</h2> <h2>⚠️ Important Changes</h2> <p>The cache backend service has been rewritten from the ground up for improved performance and reliability. <a href="https://github.com/actions/cache">actions/cache</a> now integrates with the new cache service (v2) APIs.</p> <p>The new service will gradually roll out as of <strong>February 1st, 2025</strong>. The legacy service will also be sunset on the same date. Changes in these release are <strong>fully backward compatible</strong>.</p> <p><strong>We are deprecating some versions of this action</strong>. We recommend upgrading to version <code>v4</code> or <code>v3</code> as soon as possible before <strong>February 1st, 2025.</strong> (Upgrade instructions below).</p> <p>If you are using pinned SHAs, please use the SHAs of versions <code>v4.2.0</code> or <code>v3.4.0</code></p> <p>If you do not upgrade, all workflow runs using any of the deprecated <a href="https://github.com/actions/cache">actions/cache</a> will fail.</p> <p>Upgrading to the recommended versions will not break your workflows.</p> <p>Read more about the change & access the migration guide: <a href="https://github.com/actions/cache/discussions/1510">reference to the announcement</a>.</p> <h3>Minor changes</h3> <p>Minor and patch version updates for these dependencies:</p> <ul> <li><code>@​actions/core</code>: <code>1.11.1</code></li> <li><code>@​actions/io</code>: <code>1.1.3</code></li> <li><code>@​vercel/ncc</code>: <code>0.38.3</code></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4...v4.2.0">https://github.com/actions/cache/compare/v4...v4.2.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.2.0</h3> <p>TLDR; The cache backend service has been rewritten from the ground up for improved performance and reliability. <a href="https://github.com/actions/cache">actions/cache</a> now integrates with the new cache service (v2) APIs.</p> <p>The new service will gradually roll out as of <strong>February 1st, 2025</strong>. The legacy service will also be sunset on the same date. Changes in these release are <strong>fully backward compatible</strong>.</p> <p><strong>We are deprecating some versions of this action</strong>. We recommend upgrading to version <code>v4</code> or <code>v3</code> as soon as possible before <strong>February 1st, 2025.</strong> (Upgrade instructions below).</p> <p>If you are using pinned SHAs, please use the SHAs of versions <code>v4.2.0</code> or <code>v3.4.0</code></p> <p>If you do not upgrade, all workflow runs using any of the deprecated <a href="https://github.com/actions/cache">actions/cache</a> will fail.</p> <p>Upgrading to the recommended versions will not break your workflows.</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/1bd1e32a3bdc45362d1e726936510720a7c30a57"><code>1bd1e32</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1509">#1509</a> from actions/Link-/cache-4.2.0</li> <li><a href="https://github.com/actions/cache/commit/882d7ced4c9b8af53ed67bfa36ee600195e62940"><code>882d7ce</code></a> Add 3.4.0 release notes</li> <li><a href="https://github.com/actions/cache/commit/f2695d7a42dd0d7ad21976ed7ab91dbdbc3c2216"><code>f2695d7</code></a> Rerun CI</li> <li><a href="https://github.com/actions/cache/commit/f46ceeb60d3da27b7cbac269520a4b1bfb15f199"><code>f46ceeb</code></a> Add licensed output</li> <li><a href="https://github.com/actions/cache/commit/e6f5858749f178cf4a01b0d777917ba913710560"><code>e6f5858</code></a> Add lodash to list of reviewed licenses</li> <li><a href="https://github.com/actions/cache/commit/4ae6f21c0d820c73db2589af6983e001d8c19c1d"><code>4ae6f21</code></a> Add reviewed licensed packages</li> <li><a href="https://github.com/actions/cache/commit/c16df86586baf94b0deaa873e22eb739c59e5b15"><code>c16df86</code></a> Add licensed output</li> <li><a href="https://github.com/actions/cache/commit/b109c12f3bdd6fb6a7dd42b202df645243efbd2f"><code>b109c12</code></a> Upgrade <code>@​actions/core</code> to 1.11.1 and other deps</li> <li><a href="https://github.com/actions/cache/commit/b7d227d702af06e6be1fa308c014252c10bbc267"><code>b7d227d</code></a> Upgrade <code>@​vercel/ncc</code> to 0.38.3</li> <li><a href="https://github.com/actions/cache/commit/faf639248d95d2a6c5884b8e6588e233eb3b10a0"><code>faf6392</code></a> Update RELEASES.md</li> <li>Additional commits viewable in <a href="https://github.com/actions/cache/compare/v4.1.2...v4.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.1.2&new-version=4.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 95238b93687..d5e119b779d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.1.2 + uses: actions/cache@v4.2.0 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From d3ca8b1f3245fda82dcaf9f0d8d8cb67dad1d9aa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 22 Dec 2024 23:34:39 +0000 Subject: [PATCH 1090/1511] [PR #10192/693e57f6 backport][3.12] Bump Dependabot updates to Python 3.10 (#10193) **This is a backport of PR #10192 as merged into master (693e57f602a2d455a72582197987f48042464e55).** Co-authored-by: Sam Bull <git@sambull.org> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1e7c0bbe6c1..0f048bdd859 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 740e3e2d559..3f211a4d386 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # To update, run: # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in diff --git a/requirements/cython.txt b/requirements/cython.txt index f67cc903a0b..7e392bddf91 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 72e49ed9edf..dcbbd95b2bd 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # To update, run: # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 892ae6b164c..d3b456f3274 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/doc-spelling.txt --strip-extras requirements/doc-spelling.in diff --git a/requirements/doc.txt b/requirements/doc.txt index f7f98330e1f..a7c6e8cdfc8 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # To update, run: # # pip-compile --allow-unsafe --output-file=requirements/doc.txt --resolver=backtracking --strip-extras requirements/doc.in diff --git a/requirements/lint.txt b/requirements/lint.txt index d7d97277bce..9e9160fbde9 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in diff --git a/requirements/multidict.txt b/requirements/multidict.txt index b8b44428920..e9f433aa07d 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index cf7f0e396f6..2bf55e750fe 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 33510f18682..2c6677d1cb3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with python 3.8 +# This file is autogenerated by pip-compile with python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in From 65ac69a7ed75917d64c5e38fdd9b192058f5d5e3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 10:38:12 +0000 Subject: [PATCH 1091/1511] Bump six from 1.16.0 to 1.17.0 (#10204) Bumps [six](https://github.com/benjaminp/six) from 1.16.0 to 1.17.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/benjaminp/six/blob/main/CHANGES">six's changelog</a>.</em></p> <blockquote> <h2>1.17.0</h2> <ul> <li> <p>Pull request <a href="https://redirect.github.com/benjaminp/six/issues/388">#388</a>: Remove <code>URLopener</code> and <code>FancyURLopener</code> classes from <code>urllib.request</code> when running on Python 3.14 or greater.</p> </li> <li> <p>Pull request <a href="https://redirect.github.com/benjaminp/six/issues/365">#365</a>, issue <a href="https://redirect.github.com/benjaminp/six/issues/283">#283</a>: <code>six.moves.UserDict</code> now points to <code>UserDict.IterableUserDict</code> instead of <code>UserDict.UserDict</code> on Python 2.</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/benjaminp/six/commit/ebd9b3af90247b8858d415a05e96e9ee61e48d07"><code>ebd9b3a</code></a> six 1.17.0</li> <li><a href="https://github.com/benjaminp/six/commit/40d248e516ae9bb32221a417de49a3d2fd9e39c1"><code>40d248e</code></a> Expunge travis.</li> <li><a href="https://github.com/benjaminp/six/commit/dd7940e4b8760c620e61dd0e99ec0e60ee759fc8"><code>dd7940e</code></a> Add PyPI publication workflow. (<a href="https://redirect.github.com/benjaminp/six/issues/390">#390</a>)</li> <li><a href="https://github.com/benjaminp/six/commit/8cb688f32f24b6ebe94938b15e92ec5c75dd8f12"><code>8cb688f</code></a> Update copyright years.</li> <li><a href="https://github.com/benjaminp/six/commit/86f89c5da1488463a00bd090c8f9e29aeabf6a10"><code>86f89c5</code></a> Add a GitHub actions CI workflow. (<a href="https://redirect.github.com/benjaminp/six/issues/389">#389</a>)</li> <li><a href="https://github.com/benjaminp/six/commit/06430b9fc66e3912bea2c0a2d13b22d7054a9ac7"><code>06430b9</code></a> Make test_getoutput work on Windows.</li> <li><a href="https://github.com/benjaminp/six/commit/ceddaf2d0a45cf3a19ce0ac63a24ca4ffc76ba7c"><code>ceddaf2</code></a> Add a changelog for <a href="https://redirect.github.com/benjaminp/six/issues/314">#314</a>.</li> <li><a href="https://github.com/benjaminp/six/commit/9a05aab157c79647aa11c49f31a6bff5b18404a5"><code>9a05aab</code></a> Python 3.14 removed the URLopener and FancyURLopener classes from urllib.req...</li> <li><a href="https://github.com/benjaminp/six/commit/c1b416f24de52ebd8eaed3e379ab85ec9c3e935b"><code>c1b416f</code></a> Fix deprecation warning from setuptools (<a href="https://redirect.github.com/benjaminp/six/issues/382">#382</a>)</li> <li><a href="https://github.com/benjaminp/six/commit/02c3bcab0b94eb7a86f6606bc8b02e3ee0a72860"><code>02c3bca</code></a> tkinter.tix was removed from Python 3.13, skip the test (<a href="https://redirect.github.com/benjaminp/six/issues/377">#377</a>)</li> <li>Additional commits viewable in <a href="https://github.com/benjaminp/six/compare/1.16.0...1.17.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=six&package-manager=pip&previous-version=1.16.0&new-version=1.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 44 ++++++++---------------------------- requirements/dev.txt | 25 ++++++++------------ requirements/lint.txt | 7 +++--- requirements/test.txt | 14 +++++------- 4 files changed, 29 insertions(+), 61 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3f211a4d386..1b0cf42c3e7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -18,8 +18,6 @@ alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic -apipkg==1.5 - # via execnet async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in @@ -69,12 +67,12 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest +execnet==2.1.1 + # via pytest-xdist filelock==3.16.1 # via # pytest-codspeed # virtualenv -execnet==2.1.1 - # via pytest-xdist freezegun==1.5.1 # via # -r requirements/lint.in @@ -96,12 +94,6 @@ idna==3.3 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.5.0 - # via - # build - # sphinx -importlib-resources==6.4.5 - # via towncrier incremental==24.7.2 # via towncrier iniconfig==2.0.0 @@ -148,11 +140,7 @@ propcache==0.2.0 # -r requirements/runtime-deps.in # yarl proxy-py==2.4.9 - # via - # -r requirements/lint.in - # -r requirements/test.in -py==1.11.0 - # via pytest + # via -r requirements/test.in pycares==4.4.0 # via aiodns pycparser==2.22 @@ -190,29 +178,17 @@ pytest-codspeed==3.0.0 pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 - # via -r requirements/test.in -pytest-xdist==3.6.1 - # via -r requirements/test.txt -python-dateutil==2.8.2 - # via freezegun -python-on-whales==0.71.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==5.0.0 +pytest-xdist==3.6.1 # via -r requirements/test.in -pytest-mock==3.14.0 - # via - # -r requirements/lint.in - # -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.73.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2024.2 - # via babel pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 @@ -225,12 +201,14 @@ requests==2.32.3 # python-on-whales # sphinx rich==13.9.3 - # via typer + # via + # pytest-codspeed + # typer setuptools-git==1.2 # via -r requirements/test.in shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in @@ -267,6 +245,7 @@ tomli==2.0.2 # pip-tools # pytest # slotscheck + # sphinx # towncrier towncrier==23.11.0 # via @@ -282,7 +261,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # annotated-types # multidict # mypy # pydantic @@ -308,10 +286,6 @@ wheel==0.44.0 # via pip-tools yarl==1.18.3 # via -r requirements/runtime-deps.in -zipp==3.20.2 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index dcbbd95b2bd..67f23c108a0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -65,6 +65,8 @@ docutils==0.20.1 # via sphinx exceptiongroup==1.2.2 # via pytest +execnet==2.1.1 + # via pytest-xdist filelock==3.16.1 # via # pytest-codspeed @@ -90,12 +92,6 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.5.0 - # via - # build - # sphinx -importlib-resources==6.4.5 - # via towncrier incremental==24.7.2 # via towncrier iniconfig==2.0.0 @@ -169,6 +165,7 @@ pytest==8.3.3 # pytest-codspeed # pytest-cov # pytest-mock + # pytest-xdist pytest-codspeed==3.0.0 # via # -r requirements/lint.in @@ -179,14 +176,14 @@ pytest-mock==3.14.0 # via # -r requirements/lint.in # -r requirements/test.in +pytest-xdist==3.6.1 + # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.73.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2024.2 - # via babel pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 @@ -199,12 +196,14 @@ requests==2.32.3 # python-on-whales # sphinx rich==13.9.3 - # via typer + # via + # pytest-codspeed + # typer setuptools-git==1.2 # via -r requirements/test.in shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in @@ -238,6 +237,7 @@ tomli==2.0.2 # pip-tools # pytest # slotscheck + # sphinx # towncrier towncrier==23.11.0 # via @@ -253,7 +253,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # annotated-types # multidict # mypy # pydantic @@ -279,10 +278,6 @@ wheel==0.44.0 # via pip-tools yarl==1.18.3 # via -r requirements/runtime-deps.in -zipp==3.20.2 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 diff --git a/requirements/lint.txt b/requirements/lint.txt index 9e9160fbde9..4b835224534 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -91,10 +91,12 @@ pyyaml==6.0.2 requests==2.32.3 # via python-on-whales rich==13.9.3 - # via typer + # via + # pytest-codspeed + # typer shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in @@ -111,7 +113,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # annotated-types # mypy # pydantic # pydantic-core diff --git a/requirements/test.txt b/requirements/test.txt index 2c6677d1cb3..9eadb32c016 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -82,8 +82,6 @@ propcache==0.2.0 # yarl proxy-py==2.4.9 # via -r requirements/test.in -py==1.11.0 - # via pytest pycares==4.4.0 # via aiodns pycparser==2.22 @@ -100,10 +98,9 @@ pytest==8.3.3 # pytest-codspeed # pytest-cov # pytest-mock -pytest-codspeed==3.0.0 - # via - # -r requirements/test.in # pytest-xdist +pytest-codspeed==3.0.0 + # via -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 @@ -121,12 +118,14 @@ regex==2024.9.11 requests==2.32.3 # via python-on-whales rich==13.9.3 - # via typer + # via + # pytest-codspeed + # typer setuptools-git==1.2 # via -r requirements/test.in shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil tomli==2.0.2 # via @@ -141,7 +140,6 @@ typer==0.12.5 # via python-on-whales typing-extensions==4.12.2 # via - # annotated-types # multidict # mypy # pydantic From 41e224ee5805ab31ba86b73b9180e294c1e8cea3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 10:38:58 +0000 Subject: [PATCH 1092/1511] Bump tqdm from 4.66.5 to 4.67.1 (#10205) Bumps [tqdm](https://github.com/tqdm/tqdm) from 4.66.5 to 4.67.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tqdm/tqdm/releases">tqdm's releases</a>.</em></p> <blockquote> <h2>tqdm v4.67.1 stable</h2> <ul> <li>fix <code>gui</code> (<code>matplotlib</code> syntax) (<a href="https://redirect.github.com/tqdm/tqdm/issues/1629">#1629</a>)</li> <li>misc test & framework updates <ul> <li>bump <code>pytest-asyncio</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1630">#1630</a>)</li> <li>fix <code>codecov</code> rate limit</li> <li>fix pybuild</li> <li>sync dependencies</li> </ul> </li> </ul> <h2>tqdm v4.67.0 stable</h2> <ul> <li><code>contrib.discord</code>: replace <code>disco-py</code> with <code>requests</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1536">#1536</a>)</li> </ul> <h2>tqdm v4.66.6 stable</h2> <ul> <li>cli: zip-safe <code>--manpath</code>, <code>--comppath</code> (<a href="https://redirect.github.com/tqdm/tqdm/issues/1627">#1627</a>)</li> <li>misc framework updates (<a href="https://redirect.github.com/tqdm/tqdm/issues/1627">#1627</a>) <ul> <li>fix <code>pytest</code> <code>DeprecationWarning</code></li> <li>fix <code>snapcraft</code> build</li> <li>fix <code>nbval</code> <code>DeprecationWarning</code></li> <li>update & tidy workflows</li> <li>bump pre-commit</li> <li>docs: update URLs</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tqdm/tqdm/commit/0ed5d7f18fa3153834cbac0aa57e8092b217cc16"><code>0ed5d7f</code></a> bump version, merge pull request <a href="https://redirect.github.com/tqdm/tqdm/issues/1629">#1629</a> from tqdm/fix-gui</li> <li><a href="https://github.com/tqdm/tqdm/commit/a2d5f1c9d1cbdbcf56f52dc4365ea4124e3e33f7"><code>a2d5f1c</code></a> tests: fix codecov rate limit</li> <li><a href="https://github.com/tqdm/tqdm/commit/cac7150d7c8a650c7e76004cd7f8643990932c7f"><code>cac7150</code></a> tests: bump pytest-asyncio</li> <li><a href="https://github.com/tqdm/tqdm/commit/6338f6216996918fdc9c9a73bf095acac54ce0bb"><code>6338f62</code></a> deps: fix pybuild</li> <li><a href="https://github.com/tqdm/tqdm/commit/342b15ed68ae7c5ec1082cadb1b563c7dfde610f"><code>342b15e</code></a> tests: sync deps</li> <li><a href="https://github.com/tqdm/tqdm/commit/c66458d9ac2ad096937406f79d105af891cee6e7"><code>c66458d</code></a> gui: fix matplotlib</li> <li><a href="https://github.com/tqdm/tqdm/commit/35a6ee9a4527bab5c0c7234531269e0c7fd0f2fd"><code>35a6ee9</code></a> bump version, merge pull request <a href="https://redirect.github.com/tqdm/tqdm/issues/1536">#1536</a> from guigoruiz1</li> <li><a href="https://github.com/tqdm/tqdm/commit/8aa9470e485a90679936d3781a4f953cf5afa8f4"><code>8aa9470</code></a> add discord requests dep</li> <li><a href="https://github.com/tqdm/tqdm/commit/1db24b4ff442c43752cf56a55b1782998c76801c"><code>1db24b4</code></a> better user-agent</li> <li><a href="https://github.com/tqdm/tqdm/commit/61365d8321ae4ca433d2c6cda770a73a8e0e62cb"><code>61365d8</code></a> handle rate limit</li> <li>Additional commits viewable in <a href="https://github.com/tqdm/tqdm/compare/v4.66.5...v4.67.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=tqdm&package-manager=pip&previous-version=4.66.5&new-version=4.67.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1b0cf42c3e7..1fbfb25a0c7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -251,7 +251,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.66.5 +tqdm==4.67.1 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 67f23c108a0..fa2b7d93f4b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -243,7 +243,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.66.5 +tqdm==4.67.1 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 4b835224534..d8c44877560 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -105,7 +105,7 @@ tomli==2.0.2 # mypy # pytest # slotscheck -tqdm==4.66.5 +tqdm==4.67.1 # via python-on-whales trustme==1.1.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 9eadb32c016..7e866e07dd8 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -132,7 +132,7 @@ tomli==2.0.2 # coverage # mypy # pytest -tqdm==4.66.5 +tqdm==4.67.1 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in From 5df6107bfca686e4baa29056ca0e61a4810a8232 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 11:52:07 +0000 Subject: [PATCH 1093/1511] Bump tomli from 2.0.2 to 2.2.1 (#10213) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [tomli](https://github.com/hukkin/tomli) from 2.0.2 to 2.2.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/hukkin/tomli/blob/master/CHANGELOG.md">tomli's changelog</a>.</em></p> <blockquote> <h2>2.2.1</h2> <ul> <li>Fixed <ul> <li>Don't attempt to compile binary wheels for Python 3.8, 3.9 and 3.10 where cibuildwheel depends on a conflicting Tomli version</li> </ul> </li> </ul> <h2>2.2.0</h2> <ul> <li>Added <ul> <li>mypyc generated binary wheels for common platforms</li> </ul> </li> </ul> <h2>2.1.0</h2> <ul> <li>Deprecated <ul> <li>Instantiating <code>TOMLDecodeError</code> with free-form arguments. <code>msg</code>, <code>doc</code> and <code>pos</code> arguments should be given.</li> </ul> </li> <li>Added <ul> <li><code>msg</code>, <code>doc</code>, <code>pos</code>, <code>lineno</code> and <code>colno</code> attributes to <code>TOMLDecodeError</code></li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/hukkin/tomli/commit/73c3d102eb81fe0d2b87f905df4f740f8878d8da"><code>73c3d10</code></a> Bump version: 2.2.0 → 2.2.1</li> <li><a href="https://github.com/hukkin/tomli/commit/7c2368d2cb6c2a49b0fca08765fbba65686efd88"><code>7c2368d</code></a> Don't build wheels for Python versions requiring tomli</li> <li><a href="https://github.com/hukkin/tomli/commit/c48e4e111c8558abcfe141f19fcaada3365f836c"><code>c48e4e1</code></a> Bump version: 2.1.0 → 2.2.0</li> <li><a href="https://github.com/hukkin/tomli/commit/76047414ff1a7c150bb10f0a171da13c5e6af743"><code>7604741</code></a> Update README</li> <li><a href="https://github.com/hukkin/tomli/commit/0724e2ab1858da7f5e05a9bffdb24c33589d951c"><code>0724e2a</code></a> Annotate global constants as Final to speed up compiled code</li> <li><a href="https://github.com/hukkin/tomli/commit/149547d2ec5b7d1badf8d0c1ab603735fa9b5a70"><code>149547d</code></a> Create binary wheels with mypyc (<a href="https://redirect.github.com/hukkin/tomli/issues/242">#242</a>)</li> <li><a href="https://github.com/hukkin/tomli/commit/443a0c1bc5da39b7ed84306912ee1900e6b72e2f"><code>443a0c1</code></a> pre-commit autoupdate and autofix</li> <li><a href="https://github.com/hukkin/tomli/commit/48461cfbaf0775c9025720f80d8afa4fa50c0183"><code>48461cf</code></a> Merge pull request <a href="https://redirect.github.com/hukkin/tomli/issues/240">#240</a> from hukkin/version-2.1.0</li> <li><a href="https://github.com/hukkin/tomli/commit/d6e045b1e6b36c56271d9f003794d915d713f962"><code>d6e045b</code></a> Bump version: 2.0.2 → 2.1.0</li> <li><a href="https://github.com/hukkin/tomli/commit/d1d6a8571b06b18005af8bf9078c1d37de5353e1"><code>d1d6a85</code></a> Add attributes to TOMLDecodeError. Deprecate free-form <code>__init__</code> args (<a href="https://redirect.github.com/hukkin/tomli/issues/238">#238</a>)</li> <li>Additional commits viewable in <a href="https://github.com/hukkin/tomli/compare/2.0.2...2.2.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=tomli&package-manager=pip&previous-version=2.0.2&new-version=2.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 13 ++----------- requirements/doc.txt | 13 ++----------- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 8 insertions(+), 26 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1fbfb25a0c7..64e8a4ae474 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -235,7 +235,7 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.2 +tomli==2.2.1 # via # build # cherry-picker diff --git a/requirements/dev.txt b/requirements/dev.txt index fa2b7d93f4b..2df3b5b583b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -227,7 +227,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.2 +tomli==2.2.1 # via # build # cherry-picker diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index d3b456f3274..933ef575ebb 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -22,10 +22,6 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==8.5.0 - # via sphinx -importlib-resources==6.4.5 - # via towncrier incremental==24.7.2 # via towncrier jinja2==3.1.4 @@ -40,8 +36,6 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.18.0 # via sphinx -pytz==2024.2 - # via babel requests==2.32.3 # via sphinx snowballstemmer==2.2.0 @@ -67,9 +61,10 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.2 +tomli==2.2.1 # via # incremental + # sphinx # towncrier towncrier==23.11.0 # via @@ -77,10 +72,6 @@ towncrier==23.11.0 # sphinxcontrib-towncrier urllib3==2.2.3 # via requests -zipp==3.20.2 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: setuptools==75.2.0 diff --git a/requirements/doc.txt b/requirements/doc.txt index a7c6e8cdfc8..68482c2c8db 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -22,10 +22,6 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==8.5.0 - # via sphinx -importlib-resources==6.4.5 - # via towncrier incremental==24.7.2 # via towncrier jinja2==3.1.4 @@ -38,8 +34,6 @@ packaging==24.1 # via sphinx pygments==2.18.0 # via sphinx -pytz==2024.2 - # via babel requests==2.32.3 # via sphinx snowballstemmer==2.2.0 @@ -62,9 +56,10 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -tomli==2.0.2 +tomli==2.2.1 # via # incremental + # sphinx # towncrier towncrier==23.11.0 # via @@ -72,10 +67,6 @@ towncrier==23.11.0 # sphinxcontrib-towncrier urllib3==2.2.3 # via requests -zipp==3.20.2 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: setuptools==75.2.0 diff --git a/requirements/lint.txt b/requirements/lint.txt index d8c44877560..98f8b924153 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -100,7 +100,7 @@ six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in -tomli==2.0.2 +tomli==2.2.1 # via # mypy # pytest diff --git a/requirements/test.txt b/requirements/test.txt index 7e866e07dd8..251077f4392 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -127,7 +127,7 @@ shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil -tomli==2.0.2 +tomli==2.2.1 # via # coverage # mypy From 69fdaf06c29ede5cf6967fcf85bab753acd6ee0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:10:37 +0000 Subject: [PATCH 1094/1511] Bump python-on-whales from 0.71.0 to 0.74.0 (#10206) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.71.0 to 0.74.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.74.0</h2> <h2>Important breaking change:</h2> <p>The <code>python-on-whales</code> command is not available anymore, meaning that you cannot download the docker client binary automatically. You need to install it yourself by using the Docker installation guide: <a href="https://docs.docker.com/engine/install/">https://docs.docker.com/engine/install/</a> . Note that you need only the client, you can skip the installation of the full docker engine. For example, on Ubuntu, that means you just need to do</p> <pre><code>sudo apt-get install docker-ce-cli docker-buildx-plugin docker-compose-plugin </code></pre> <p>instead of</p> <pre><code>sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin </code></pre> <h2>What's Changed</h2> <ul> <li>Fix breakage of <code>ContainerCLI.create(env_files=path)</code> by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/637">gabrieldemarmiesse/python-on-whales#637</a></li> <li>Remove download docker client functionality by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/633">gabrieldemarmiesse/python-on-whales#633</a></li> <li>Update docs references to download-cli by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/639">gabrieldemarmiesse/python-on-whales#639</a></li> <li>Display an error message if using the <code>python-on-whales</code> command by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/640">gabrieldemarmiesse/python-on-whales#640</a></li> <li>Set <code>defer_build=True</code> pydantic config, improving import time significantly by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/643">gabrieldemarmiesse/python-on-whales#643</a></li> <li>Add podman <code>is_infra</code> and <code>namespace</code> fields to <code>Container</code> object by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/641">gabrieldemarmiesse/python-on-whales#641</a></li> <li>Fix Pod property type annotations to allow for missing fields in inspect output by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/648">gabrieldemarmiesse/python-on-whales#648</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.73.0...v0.74.0">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.73.0...v0.74.0</a></p> <h2>v0.73.0</h2> <h2>What's Changed</h2> <ul> <li>:bug: Fix wrong links in <code>docker_client.md</code> by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/613">gabrieldemarmiesse/python-on-whales#613</a></li> <li>Add bootstrap cli flag for create/inspect by <a href="https://github.com/fizzgig1888"><code>@​fizzgig1888</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/600">gabrieldemarmiesse/python-on-whales#600</a></li> <li>Add support for 'podman container init' by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/615">gabrieldemarmiesse/python-on-whales#615</a></li> <li>Replace isort, black and flake8 with ruff by <a href="https://github.com/einarwar"><code>@​einarwar</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/617">gabrieldemarmiesse/python-on-whales#617</a></li> <li>Add Tuple[Union[str, int]] to ValidPortMapping by <a href="https://github.com/eclark0426"><code>@​eclark0426</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/619">gabrieldemarmiesse/python-on-whales#619</a></li> <li>Fix handling of 'podman pod create --share=...' by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/620">gabrieldemarmiesse/python-on-whales#620</a></li> <li>Allow for multiple env-files by <a href="https://github.com/einarwar"><code>@​einarwar</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/616">gabrieldemarmiesse/python-on-whales#616</a></li> <li>Fix "Render docs" CI job by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/628">gabrieldemarmiesse/python-on-whales#628</a></li> <li>Drop support for pydantic v1 by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/630">gabrieldemarmiesse/python-on-whales#630</a></li> <li>add auto/light/dark mode for docs by <a href="https://github.com/s-light"><code>@​s-light</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/625">gabrieldemarmiesse/python-on-whales#625</a></li> <li>Update pod component to use generic Iterable and Mapping types by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/599">gabrieldemarmiesse/python-on-whales#599</a></li> <li>Update container component to accept args as Iterable and Mapping by <a href="https://github.com/LewisGaul"><code>@​LewisGaul</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/627">gabrieldemarmiesse/python-on-whales#627</a></li> <li>added missing docstrings for docker.network.inspect(), docker.network.list(), docker.network.prune() by <a href="https://github.com/stock90975"><code>@​stock90975</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/631">gabrieldemarmiesse/python-on-whales#631</a></li> <li>Add "network exists" command by <a href="https://github.com/eclark0426"><code>@​eclark0426</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/632">gabrieldemarmiesse/python-on-whales#632</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/fizzgig1888"><code>@​fizzgig1888</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/600">gabrieldemarmiesse/python-on-whales#600</a></li> <li><a href="https://github.com/eclark0426"><code>@​eclark0426</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/619">gabrieldemarmiesse/python-on-whales#619</a></li> <li><a href="https://github.com/s-light"><code>@​s-light</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/625">gabrieldemarmiesse/python-on-whales#625</a></li> <li><a href="https://github.com/stock90975"><code>@​stock90975</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/631">gabrieldemarmiesse/python-on-whales#631</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/5fe0ea5e37053c51e1e5cae082a41ff0b1b892d2"><code>5fe0ea5</code></a> Bump version to 0.74.0 (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/652">#652</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/140865d6ac2b36b3d5a26b78f0b9cf8788fd7b0f"><code>140865d</code></a> Use uv in github actions and recommend uv for dev (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/650">#650</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/9a9f090c8669d5cfe61e5388e4c2d9eacbc2d964"><code>9a9f090</code></a> Fix Pod property type annotations to allow for missing fields in inspect outp...</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/b590c97ff0c85228329e70f04700d4ef65c23bc0"><code>b590c97</code></a> Add podman <code>is_infra</code> and <code>namespace</code> fields to <code>Container</code> object (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/641">#641</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/d992525ce6cd43d9ce14cc552886b07bc77ba9e2"><code>d992525</code></a> Set <code>defer_build=True</code> pydantic config, improving import time significantly (...</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/4531cedef39d5d12b10bbd0386eb685c954b64e3"><code>4531ced</code></a> Remove leftover Pydantic v1 handling (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/642">#642</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/43b4001331eaf1a1cc59b732c0cfea0103728b30"><code>43b4001</code></a> :bug: Fix buildx tests in the CI by upgrading the docker images (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/646">#646</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/67f75f4689651d44d759c5ac8cb5a5cc7d64fe94"><code>67f75f4</code></a> Specify ubuntu-24.04 in the GitHub actions config (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/645">#645</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/40e1f2782eee49493264442d6350b732305f5fb2"><code>40e1f27</code></a> Display an error message if using the <code>python-on-whales</code> command (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/640">#640</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/0a1ecad38b214fbbf652f2f3576fc797f6faee94"><code>0a1ecad</code></a> Update docs references to download-cli (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/639">#639</a>)</li> <li>Additional commits viewable in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.71.0...v0.74.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.71.0&new-version=0.74.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 15 ++------------- requirements/dev.txt | 15 ++------------- requirements/lint.txt | 29 ++++------------------------- requirements/test.txt | 26 +++----------------------- 4 files changed, 11 insertions(+), 74 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 64e8a4ae474..220ec39aa27 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -49,7 +49,6 @@ click==8.1.7 # pip-tools # slotscheck # towncrier - # typer # wait-for-it coverage==7.6.1 # via @@ -185,7 +184,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.73.0 +python-on-whales==0.74.0 # via # -r requirements/lint.in # -r requirements/test.in @@ -198,16 +197,11 @@ regex==2024.9.11 requests==2.32.3 # via # cherry-picker - # python-on-whales # sphinx rich==13.9.3 - # via - # pytest-codspeed - # typer + # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in -shellingham==1.5.4 - # via typer six==1.17.0 # via python-dateutil slotscheck==0.19.1 @@ -251,14 +245,10 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.67.1 - # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.12.5 - # via python-on-whales typing-extensions==4.12.2 # via # multidict @@ -267,7 +257,6 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich - # typer uritemplate==4.1.1 # via gidgethub urllib3==2.2.3 diff --git a/requirements/dev.txt b/requirements/dev.txt index 2df3b5b583b..d163c33e3c6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -49,7 +49,6 @@ click==8.1.7 # pip-tools # slotscheck # towncrier - # typer # wait-for-it coverage==7.6.1 # via @@ -180,7 +179,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.73.0 +python-on-whales==0.74.0 # via # -r requirements/lint.in # -r requirements/test.in @@ -193,16 +192,11 @@ regex==2024.9.11 requests==2.32.3 # via # cherry-picker - # python-on-whales # sphinx rich==13.9.3 - # via - # pytest-codspeed - # typer + # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in -shellingham==1.5.4 - # via typer six==1.17.0 # via python-dateutil slotscheck==0.19.1 @@ -243,14 +237,10 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.67.1 - # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.12.5 - # via python-on-whales typing-extensions==4.12.2 # via # multidict @@ -259,7 +249,6 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich - # typer uritemplate==4.1.1 # via gidgethub urllib3==2.2.3 diff --git a/requirements/lint.txt b/requirements/lint.txt index 98f8b924153..152333d5689 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,8 +10,6 @@ annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 # via valkey -certifi==2024.8.30 - # via requests cffi==1.17.1 # via # cryptography @@ -19,12 +17,8 @@ cffi==1.17.1 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.0 - # via requests click==8.1.7 - # via - # slotscheck - # typer + # via slotscheck cryptography==43.0.3 # via trustme distlib==0.3.9 @@ -40,9 +34,7 @@ freezegun==1.5.1 identify==2.6.1 # via pre-commit idna==3.7 - # via - # requests - # trustme + # via trustme iniconfig==2.0.0 # via pytest markdown-it-py==3.0.0 @@ -84,18 +76,12 @@ pytest-mock==3.14.0 # via -r requirements/lint.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.73.0 +python-on-whales==0.74.0 # via -r requirements/lint.in pyyaml==6.0.2 # via pre-commit -requests==2.32.3 - # via python-on-whales rich==13.9.3 - # via - # pytest-codspeed - # typer -shellingham==1.5.4 - # via typer + # via pytest-codspeed six==1.17.0 # via python-dateutil slotscheck==0.19.1 @@ -105,12 +91,8 @@ tomli==2.2.1 # mypy # pytest # slotscheck -tqdm==4.67.1 - # via python-on-whales trustme==1.1.0 # via -r requirements/lint.in -typer==0.12.5 - # via python-on-whales typing-extensions==4.12.2 # via # mypy @@ -118,9 +100,6 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich - # typer -urllib3==2.2.3 - # via requests uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 diff --git a/requirements/test.txt b/requirements/test.txt index 251077f4392..5b3575e95ca 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -18,19 +18,13 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -certifi==2024.8.30 - # via requests cffi==1.17.1 # via # cryptography # pycares # pytest-codspeed -charset-normalizer==3.4.0 - # via requests click==8.1.7 - # via - # typer - # wait-for-it + # via wait-for-it coverage==7.6.1 # via # -r requirements/test.in @@ -53,7 +47,6 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via - # requests # trustme # yarl iniconfig==2.0.0 @@ -109,22 +102,16 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.73.0 +python-on-whales==0.74.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in regex==2024.9.11 # via re-assert -requests==2.32.3 - # via python-on-whales rich==13.9.3 - # via - # pytest-codspeed - # typer + # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in -shellingham==1.5.4 - # via typer six==1.17.0 # via python-dateutil tomli==2.2.1 @@ -132,12 +119,8 @@ tomli==2.2.1 # coverage # mypy # pytest -tqdm==4.67.1 - # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in -typer==0.12.5 - # via python-on-whales typing-extensions==4.12.2 # via # multidict @@ -146,9 +129,6 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich - # typer -urllib3==2.2.3 - # via requests uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 From 1812f06ce734f00d248698a4ed1fb9726c644ea3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:14:30 +0000 Subject: [PATCH 1095/1511] Bump wheel from 0.44.0 to 0.45.1 (#10208) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [wheel](https://github.com/pypa/wheel) from 0.44.0 to 0.45.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/wheel/releases">wheel's releases</a>.</em></p> <blockquote> <h2>0.45.1</h2> <ul> <li>Fixed pure Python wheels converted from eggs and wininst files having the ABI tag in the file name</li> </ul> <h2>0.45.0</h2> <ul> <li> <p>Refactored the <code>convert</code> command to not need setuptools to be installed</p> </li> <li> <p>Don't configure setuptools logging unless running <code>bdist_wheel</code></p> </li> <li> <p>Added a redirection from <code>wheel.bdist_wheel.bdist_wheel</code> to <code>setuptools.command.bdist_wheel.bdist_wheel</code> to improve compatibility with <code>setuptools</code>' latest fixes.</p> <p>Projects are still advised to migrate away from the deprecated module and import the <code>setuptools</code>' implementation explicitly. (PR by <a href="https://github.com/abravalheri"><code>@​abravalheri</code></a>)</p> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/wheel/blob/main/docs/news.rst">wheel's changelog</a>.</em></p> <blockquote> <h1>Release Notes</h1> <p><strong>0.45.1 (2024-11-23)</strong></p> <ul> <li>Fixed pure Python wheels converted from eggs and wininst files having the ABI tag in the file name</li> </ul> <p><strong>0.45.0 (2024-11-08)</strong></p> <ul> <li> <p>Refactored the <code>convert</code> command to not need setuptools to be installed</p> </li> <li> <p>Don't configure setuptools logging unless running <code>bdist_wheel</code></p> </li> <li> <p>Added a redirection from <code>wheel.bdist_wheel.bdist_wheel</code> to <code>setuptools.command.bdist_wheel.bdist_wheel</code> to improve compatibility with <code>setuptools</code>' latest fixes.</p> <p>Projects are still advised to migrate away from the deprecated module and import the <code>setuptools</code>' implementation explicitly. (PR by <a href="https://github.com/abravalheri"><code>@​abravalheri</code></a>)</p> </li> </ul> <p><strong>0.44.0 (2024-08-04)</strong></p> <ul> <li>Canonicalized requirements in METADATA file (PR by Wim Jeantine-Glenn)</li> <li>Deprecated the <code>bdist_wheel</code> module, as the code was migrated to <code>setuptools</code> itself</li> </ul> <p><strong>0.43.0 (2024-03-11)</strong></p> <ul> <li>Dropped support for Python 3.7</li> <li>Updated vendored <code>packaging</code> to 24.0</li> </ul> <p><strong>0.42.0 (2023-11-26)</strong></p> <ul> <li>Allowed removing build tag with <code>wheel tags --build ""</code></li> <li>Fixed <code>wheel pack</code> and <code>wheel tags</code> writing updated <code>WHEEL</code> fields after a blank line, causing other tools to ignore them</li> <li>Fixed <code>wheel pack</code> and <code>wheel tags</code> writing <code>WHEEL</code> with CRLF line endings or a mix of CRLF and LF</li> <li>Fixed <code>wheel pack --build-number ""</code> not removing build tag from <code>WHEEL</code> (above changes by Benjamin Gilbert)</li> </ul> <p><strong>0.41.3 (2023-10-30)</strong></p> <ul> <li>Updated vendored <code>packaging</code> to 23.2</li> <li>Fixed ABI tag generation for CPython 3.13a1 on Windows (PR by Sam Gross)</li> </ul> <p><strong>0.41.2 (2023-08-22)</strong></p> <ul> <li>Fixed platform tag detection for GraalPy and 32-bit python running on an aarch64 kernel (PR by Matthieu Darbois)</li> <li>Fixed <code>wheel tags</code> to not list directories in <code>RECORD</code> files</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/wheel/commit/7855525de4093257e7bfb434877265e227356566"><code>7855525</code></a> Created a new release</li> <li><a href="https://github.com/pypa/wheel/commit/d343391c20f8f6cc89a61a6f1573522c59d3d7a3"><code>d343391</code></a> Fixed wrong wheel file names in converted pure-Python eggs/wininsts</li> <li><a href="https://github.com/pypa/wheel/commit/d78f0e372199f8294556345d867af4d3cf118418"><code>d78f0e3</code></a> Created a new release</li> <li><a href="https://github.com/pypa/wheel/commit/f064c699209e36ec2948537b7cadabf84a110c30"><code>f064c69</code></a> Added license files for vendored <code>packaging</code></li> <li><a href="https://github.com/pypa/wheel/commit/68387afcd33cb514a4da811d2fc5de73c8797e48"><code>68387af</code></a> Only configure setuptools logging if bdist_wheel is imported (<a href="https://redirect.github.com/pypa/wheel/issues/641">#641</a>)</li> <li><a href="https://github.com/pypa/wheel/commit/c81f5c954a8ca7698e6df9de39cf0013295949fa"><code>c81f5c9</code></a> Refactored the <code>wheel convert</code> command to not require setuptools (<a href="https://redirect.github.com/pypa/wheel/issues/640">#640</a>)</li> <li><a href="https://github.com/pypa/wheel/commit/e43464d32feaddddb235ffe21b4bf13c1193465d"><code>e43464d</code></a> Adjusted target Python versions in GitHub CI</li> <li><a href="https://github.com/pypa/wheel/commit/e9894e71bc62e5808710bc8c2c268de51aef52d4"><code>e9894e7</code></a> Tweaked pytest settings to make the tracebacks easier to read</li> <li><a href="https://github.com/pypa/wheel/commit/baf6bf89562cb42a0ca71cc1e804600b161952eb"><code>baf6bf8</code></a> Removed Cirrus CI configuration</li> <li><a href="https://github.com/pypa/wheel/commit/28c1ba1e2a6d08edc03c73e29293a571888981f9"><code>28c1ba1</code></a> Improved compatibility with future versions of <code>setuptools</code> (<a href="https://redirect.github.com/pypa/wheel/issues/638">#638</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/wheel/compare/0.44.0...0.45.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=wheel&package-manager=pip&previous-version=0.44.0&new-version=0.45.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 220ec39aa27..fd0c0bc64d8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -271,7 +271,7 @@ virtualenv==20.27.0 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -wheel==0.44.0 +wheel==0.45.1 # via pip-tools yarl==1.18.3 # via -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d163c33e3c6..9790d207450 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -263,7 +263,7 @@ virtualenv==20.27.0 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -wheel==0.44.0 +wheel==0.45.1 # via pip-tools yarl==1.18.3 # via -r requirements/runtime-deps.in From cdfdb82dd3dbe5fae86dca57b3b3f45c8b760c94 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:14:33 +0000 Subject: [PATCH 1096/1511] Bump docutils from 0.20.1 to 0.21.2 (#10207) Bumps [docutils](https://docutils.sourceforge.io) from 0.20.1 to 0.21.2. [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=docutils&package-manager=pip&previous-version=0.20.1&new-version=0.21.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fd0c0bc64d8..3e831f20fea 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cython==3.0.11 # via -r requirements/cython.in distlib==0.3.9 # via virtualenv -docutils==0.20.1 +docutils==0.21.2 # via sphinx exceptiongroup==1.2.2 # via pytest diff --git a/requirements/dev.txt b/requirements/dev.txt index 9790d207450..f84e0bdbcc8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -60,7 +60,7 @@ cryptography==43.0.3 # trustme distlib==0.3.9 # via virtualenv -docutils==0.20.1 +docutils==0.21.2 # via sphinx exceptiongroup==1.2.2 # via pytest diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 933ef575ebb..7d429607f56 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -16,7 +16,7 @@ charset-normalizer==3.4.0 # via requests click==8.1.7 # via towncrier -docutils==0.20.1 +docutils==0.21.2 # via sphinx idna==3.4 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index 68482c2c8db..5285514af94 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -16,7 +16,7 @@ charset-normalizer==3.4.0 # via requests click==8.1.7 # via towncrier -docutils==0.20.1 +docutils==0.21.2 # via sphinx idna==3.4 # via requests From dc5df321b837592991ab69f604d3f73be0979676 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:15:24 +0000 Subject: [PATCH 1097/1511] Bump virtualenv from 20.27.0 to 20.28.0 (#10210) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.27.0 to 20.28.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.28.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>fix: Update run_with_catch log flushing by <a href="https://github.com/neilramsay"><code>@​neilramsay</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2806">pypa/virtualenv#2806</a></li> <li>feat: Write CACHEDIR.TAG file by <a href="https://github.com/neilramsay"><code>@​neilramsay</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2805">pypa/virtualenv#2805</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.27.2...20.28.0">https://github.com/pypa/virtualenv/compare/20.27.2...20.28.0</a></p> <h2>20.27.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.27.1 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2791">pypa/virtualenv#2791</a></li> <li>Upgrade setuptools to 75.3 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2798">pypa/virtualenv#2798</a></li> <li>Upgrade setuptools to 75.5 and wheel to 0.45 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2804">pypa/virtualenv#2804</a></li> <li>No longer forcibly echo off during windows batch activation by <a href="https://github.com/wiktorinox"><code>@​wiktorinox</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2801">pypa/virtualenv#2801</a></li> <li>feat: Write CACHEDIR.TAG file by <a href="https://github.com/neilramsay"><code>@​neilramsay</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2803">pypa/virtualenv#2803</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/wiktorinox"><code>@​wiktorinox</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2801">pypa/virtualenv#2801</a></li> <li><a href="https://github.com/neilramsay"><code>@​neilramsay</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2803">pypa/virtualenv#2803</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.27.1...20.27.2">https://github.com/pypa/virtualenv/compare/20.27.1...20.27.2</a></p> <h2>20.27.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.27.0 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2785">pypa/virtualenv#2785</a></li> <li>Upgrade to pip 24.3 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2790">pypa/virtualenv#2790</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.27.0...20.27.1">https://github.com/pypa/virtualenv/compare/20.27.0...20.27.1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.28.0 (2024-11-25)</h2> <p>Features - 20.28.0</p> <pre><code>- Write CACHEDIR.TAG file on creation - by "user:`neilramsay`. (:issue:`2803`) <h2>v20.27.2 (2024-11-25)</h2> <p>Bugfixes - 20.27.2 </code></pre></p> <ul> <li> <p>Upgrade embedded wheels:</p> <ul> <li>setuptools to <code>75.3.0</code> from <code>75.2.0</code> (:issue:<code>2798</code>)</li> </ul> </li> <li> <p>Upgrade embedded wheels:</p> <ul> <li>wheel to <code>0.45.0</code> from <code>0.44.0</code></li> <li>setuptools to <code>75.5.0</code> (:issue:<code>2800</code>)</li> </ul> </li> <li> <p>no longer forcibly echo off during windows batch activation (:issue:<code>2801</code>)</p> </li> <li> <p>Upgrade embedded wheels:</p> <ul> <li>setuptools to <code>75.6.0</code> from <code>75.5.0</code></li> <li>wheel to <code>0.45.1</code> from <code>0.45.0</code> (:issue:<code>2804</code>)</li> </ul> </li> </ul> <h2>v20.27.1 (2024-10-28)</h2> <p>Bugfixes - 20.27.1</p> <pre><code>- Upgrade embedded wheels: <ul> <li>pip to <code>24.3.1</code> from <code>24.2</code> (:issue:<code>2789</code>) </code></pre></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/bfc04e3616d66edc55a31b9627bc5ef35efdf62a"><code>bfc04e3</code></a> release 20.28.0</li> <li><a href="https://github.com/pypa/virtualenv/commit/2a29a1b09b7502850ac123e9c51573ecd67a567e"><code>2a29a1b</code></a> feat: Write CACHEDIR.TAG file (<a href="https://redirect.github.com/pypa/virtualenv/issues/2805">#2805</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/d619967f7b1cb75a441a5a21c3320a9b4a617a65"><code>d619967</code></a> fix: Update run_with_catch log flushing (<a href="https://redirect.github.com/pypa/virtualenv/issues/2806">#2806</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/f74c00077ae13634d46ac7470108648915c31efe"><code>f74c000</code></a> release 20.27.2</li> <li><a href="https://github.com/pypa/virtualenv/commit/6f70bf50fce097c20d5651e0dbfea21eff6edc5c"><code>6f70bf5</code></a> Revert "feat: Write CACHEDIR.TAG file (<a href="https://redirect.github.com/pypa/virtualenv/issues/2803">#2803</a>)"</li> <li><a href="https://github.com/pypa/virtualenv/commit/f5d7cb40c186b861eac6adf7826a6dfbf3c99605"><code>f5d7cb4</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2799">#2799</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/be19526bc3e41661c39971e34e19ae111355a183"><code>be19526</code></a> feat: Write CACHEDIR.TAG file (<a href="https://redirect.github.com/pypa/virtualenv/issues/2803">#2803</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/b3e2b6f5b8b0953b9c3d5a383687530442e628ad"><code>b3e2b6f</code></a> No longer forcibly echo off during windows batch activation (<a href="https://redirect.github.com/pypa/virtualenv/issues/2801">#2801</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/fd6c16bc43b891d2f94abdb75a712c9fa9ee1686"><code>fd6c16b</code></a> Bump astral-sh/setup-uv from 3 to 4 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2802">#2802</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/898abfd417a37d5de119eebf8be2f7d3af7b0483"><code>898abfd</code></a> Upgrade setuptools to 75.5 and wheel to 0.45 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2804">#2804</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/virtualenv/compare/20.27.0...20.28.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.27.0&new-version=20.28.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3e831f20fea..527f433e348 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -267,7 +267,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.27.0 +virtualenv==20.28.0 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index f84e0bdbcc8..3bc68c82273 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -259,7 +259,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.27.0 +virtualenv==20.28.0 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 152333d5689..c48f6cc1ded 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -104,5 +104,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.27.0 +virtualenv==20.28.0 # via pre-commit From 96fec582fc02d3e17cc3422e4b146b4d449f8655 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:36:03 +0000 Subject: [PATCH 1098/1511] Bump rich from 13.9.3 to 13.9.4 (#10209) Bumps [rich](https://github.com/Textualize/rich) from 13.9.3 to 13.9.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>The Faster is Faster release</h2> <h2>[13.9.4] - 2024-11-01</h2> <h3>Changed</h3> <ul> <li>Optimizations to cell_len which may speed up Rich / Textual output <a href="https://redirect.github.com/Textualize/rich/pull/3546">Textualize/rich#3546</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[13.9.4] - 2024-11-01</h2> <h3>Changed</h3> <ul> <li>Optimizations to cell_len which may speed up Rich / Textual output <a href="https://redirect.github.com/Textualize/rich/pull/3546">Textualize/rich#3546</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/43d3b04725ab9731727fb1126e35980c62f32377"><code>43d3b04</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3548">#3548</a> from Textualize/bump1394</li> <li><a href="https://github.com/Textualize/rich/commit/e440ff23806372ec221fa8f22c57a9d31828de4b"><code>e440ff2</code></a> bump</li> <li><a href="https://github.com/Textualize/rich/commit/12301e3041455cde59f463a8e1522070e16ceb28"><code>12301e3</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3546">#3546</a> from Textualize/faster-cell-len</li> <li><a href="https://github.com/Textualize/rich/commit/02f3d148e8f7143519272ed6404cc6894dc13ec6"><code>02f3d14</code></a> comment</li> <li><a href="https://github.com/Textualize/rich/commit/aaaef278be38ebadea3d6f47dedd89fd910078ca"><code>aaaef27</code></a> leaner syntax</li> <li><a href="https://github.com/Textualize/rich/commit/6cef0bcb0e584eac1eb6021cc2202ecad70b6b11"><code>6cef0bc</code></a> leaner cell_len</li> <li><a href="https://github.com/Textualize/rich/commit/46150cdbf61426c4683c59a0e4f45dca23d38202"><code>46150cd</code></a> sum and map is faster</li> <li><a href="https://github.com/Textualize/rich/commit/9e7f363aebe01542210633dd4027ce777bf31e3c"><code>9e7f363</code></a> use sets</li> <li>See full diff in <a href="https://github.com/Textualize/rich/compare/v13.9.3...v13.9.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.9.3&new-version=13.9.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 527f433e348..1682a3e5c5a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -198,7 +198,7 @@ requests==2.32.3 # via # cherry-picker # sphinx -rich==13.9.3 +rich==13.9.4 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 3bc68c82273..996b5627a16 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -193,7 +193,7 @@ requests==2.32.3 # via # cherry-picker # sphinx -rich==13.9.3 +rich==13.9.4 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index c48f6cc1ded..951e091a8ac 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -80,7 +80,7 @@ python-on-whales==0.74.0 # via -r requirements/lint.in pyyaml==6.0.2 # via pre-commit -rich==13.9.3 +rich==13.9.4 # via pytest-codspeed six==1.17.0 # via python-dateutil diff --git a/requirements/test.txt b/requirements/test.txt index 5b3575e95ca..e099eb15272 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -108,7 +108,7 @@ re-assert==1.1.0 # via -r requirements/test.in regex==2024.9.11 # via re-assert -rich==13.9.3 +rich==13.9.4 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in From e67d8b71199ee68e806c251af7a7de220de0179c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:40:18 +0000 Subject: [PATCH 1099/1511] Bump packaging from 24.1 to 24.2 (#10215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [packaging](https://github.com/pypa/packaging) from 24.1 to 24.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/packaging/releases">packaging's releases</a>.</em></p> <blockquote> <h2>24.2</h2> <h2>What's Changed</h2> <ul> <li>The source is auto-formatted with ruff, not black by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/798">pypa/packaging#798</a></li> <li>Bump the github-actions group across 1 directory with 3 updates by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/813">pypa/packaging#813</a></li> <li>Apply ruff rules (RUF) by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/800">pypa/packaging#800</a></li> <li>Fix typo in Version <code>__str__</code> by <a href="https://github.com/aryanpingle"><code>@​aryanpingle</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/817">pypa/packaging#817</a></li> <li>Bump the github-actions group with 3 updates by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/819">pypa/packaging#819</a></li> <li>Get rid of duplicate test cases by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/821">pypa/packaging#821</a></li> <li>Fix doc for canonicalize_version and a typo in a docstring by <a href="https://github.com/Laurent-Dx"><code>@​Laurent-Dx</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/801">pypa/packaging#801</a></li> <li>docs: public/base_version comparison by <a href="https://github.com/henryiii"><code>@​henryiii</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/818">pypa/packaging#818</a></li> <li>Apply ruff/bugbear rules (B) by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/787">pypa/packaging#787</a></li> <li>Apply ruff/pyupgrade rules (UP) by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/786">pypa/packaging#786</a></li> <li>Add a changelog entry for dropping Python 3.7 support by <a href="https://github.com/alexwlchan"><code>@​alexwlchan</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/824">pypa/packaging#824</a></li> <li>Patch python_full_version unconditionally by <a href="https://github.com/jaraco"><code>@​jaraco</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/825">pypa/packaging#825</a></li> <li>Refactor canonicalize_version by <a href="https://github.com/jaraco"><code>@​jaraco</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/793">pypa/packaging#793</a></li> <li>Allow creating a SpecifierSet from a list of specifiers by <a href="https://github.com/pfmoore"><code>@​pfmoore</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/777">pypa/packaging#777</a></li> <li>Fix uninformative error message by <a href="https://github.com/abravalheri"><code>@​abravalheri</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/830">pypa/packaging#830</a></li> <li>Fix prerelease detection for <code>></code> and <code><</code> by <a href="https://github.com/notatallshaw"><code>@​notatallshaw</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/794">pypa/packaging#794</a></li> <li>Bump the github-actions group across 1 directory with 4 updates by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/839">pypa/packaging#839</a></li> <li>Add support for PEP 730 iOS tags. by <a href="https://github.com/freakboy3742"><code>@​freakboy3742</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/832">pypa/packaging#832</a></li> <li>Update the changelog to reflect 24.1 changes by <a href="https://github.com/pradyunsg"><code>@​pradyunsg</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/840">pypa/packaging#840</a></li> <li>Mention updating changelog in release process by <a href="https://github.com/pradyunsg"><code>@​pradyunsg</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/841">pypa/packaging#841</a></li> <li>Add a comment as to why <code>Metadata.name</code> isn't normalized by <a href="https://github.com/brettcannon"><code>@​brettcannon</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/842">pypa/packaging#842</a></li> <li>Use !r formatter for error messages with filenames. by <a href="https://github.com/Carreau"><code>@​Carreau</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/844">pypa/packaging#844</a></li> <li>PEP 639: Implement License-Expression and License-File by <a href="https://github.com/ewdurbin"><code>@​ewdurbin</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/828">pypa/packaging#828</a></li> <li>Bump the github-actions group with 4 updates by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/852">pypa/packaging#852</a></li> <li>Upgrade to latest mypy by <a href="https://github.com/hauntsaninja"><code>@​hauntsaninja</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/853">pypa/packaging#853</a></li> <li>Extraneous quotes by <a href="https://github.com/ewdurbin"><code>@​ewdurbin</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/848">pypa/packaging#848</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/aryanpingle"><code>@​aryanpingle</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/817">pypa/packaging#817</a></li> <li><a href="https://github.com/Laurent-Dx"><code>@​Laurent-Dx</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/801">pypa/packaging#801</a></li> <li><a href="https://github.com/alexwlchan"><code>@​alexwlchan</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/824">pypa/packaging#824</a></li> <li><a href="https://github.com/jaraco"><code>@​jaraco</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/825">pypa/packaging#825</a></li> <li><a href="https://github.com/notatallshaw"><code>@​notatallshaw</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/794">pypa/packaging#794</a></li> <li><a href="https://github.com/freakboy3742"><code>@​freakboy3742</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/832">pypa/packaging#832</a></li> <li><a href="https://github.com/Carreau"><code>@​Carreau</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/844">pypa/packaging#844</a></li> <li><a href="https://github.com/ewdurbin"><code>@​ewdurbin</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/828">pypa/packaging#828</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/packaging/compare/24.1...24.2">https://github.com/pypa/packaging/compare/24.1...24.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/packaging/blob/main/CHANGELOG.rst">packaging's changelog</a>.</em></p> <blockquote> <p>24.2 - 2024-11-08</p> <pre><code> * PEP 639: Implement License-Expression and License-File (:issue:`828`) * Use ``!r`` formatter for error messages with filenames (:issue:`844`) * Add support for PEP 730 iOS tags (:issue:`832`) * Fix prerelease detection for ``>`` and ``<`` (:issue:`794`) * Fix uninformative error message (:issue:`830`) * Refactor ``canonicalize_version`` (:issue:`793`) * Patch python_full_version unconditionally (:issue:`825`) * Fix doc for ``canonicalize_version`` to mention ``strip_trailing_zero`` and a typo in a docstring (:issue:`801`) * Fix typo in Version ``__str__`` (:issue:`817`) * Support creating a ``SpecifierSet`` from an iterable of ``Specifier`` objects (:issue:`775`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/packaging/commit/d8e3b31b734926ebbcaff654279f6855a73e052f"><code>d8e3b31</code></a> Bump for release</li> <li><a href="https://github.com/pypa/packaging/commit/2de393d910926a0408496ac5583f733c4b9f0f5e"><code>2de393d</code></a> Update changelog for release</li> <li><a href="https://github.com/pypa/packaging/commit/9c66f5c844bf3262f560c1521a0e6837079b16ff"><code>9c66f5c</code></a> Remove extraneous quotes in f-strings by using <code>!r</code> (<a href="https://redirect.github.com/pypa/packaging/issues/848">#848</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/4dc334c86d43f83371b194ca91618ed99e0e49ca"><code>4dc334c</code></a> Upgrade to latest mypy (<a href="https://redirect.github.com/pypa/packaging/issues/853">#853</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/d1a9f938343de11f7322151c1f6de25cbb61718b"><code>d1a9f93</code></a> Bump the github-actions group with 4 updates (<a href="https://redirect.github.com/pypa/packaging/issues/852">#852</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/029f41580098bcf52b69684843bdc7ea37959a7e"><code>029f415</code></a> PEP 639: Implement License-Expression and License-File (<a href="https://redirect.github.com/pypa/packaging/issues/828">#828</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/6c338a8425803476769151953cc5de5548e2befa"><code>6c338a8</code></a> Use !r formatter for error messages with filenames. (<a href="https://redirect.github.com/pypa/packaging/issues/844">#844</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/28e7da78f6f73b4856260e24051b35a4517c0149"><code>28e7da7</code></a> Add a comment as to why <code>Metadata.name</code> isn't normalized (<a href="https://redirect.github.com/pypa/packaging/issues/842">#842</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/ce0d79c5ab6d27e856a059fbc24c0b0a7c9d8581"><code>ce0d79c</code></a> Mention updating changelog in release process (<a href="https://redirect.github.com/pypa/packaging/issues/841">#841</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/ac5bdf3605ddcbfa1f672f7cf93a19fd6d3d77ea"><code>ac5bdf3</code></a> Update the changelog to reflect 24.1 changes (<a href="https://redirect.github.com/pypa/packaging/issues/840">#840</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/packaging/compare/24.1...24.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=packaging&package-manager=pip&previous-version=24.1&new-version=24.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 0f048bdd859..a5fb7154fb7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -30,7 +30,7 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl -packaging==24.1 +packaging==24.2 # via gunicorn propcache==0.2.0 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1682a3e5c5a..2c2adfe0494 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -120,7 +120,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via # build # gunicorn diff --git a/requirements/dev.txt b/requirements/dev.txt index 996b5627a16..1422b7d5394 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -117,7 +117,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via # build # gunicorn diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 7d429607f56..3eb6767ff74 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -30,7 +30,7 @@ jinja2==3.1.4 # towncrier markupsafe==2.1.5 # via jinja2 -packaging==24.1 +packaging==24.2 # via sphinx pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/doc.txt b/requirements/doc.txt index 5285514af94..fad7de91b2f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -30,7 +30,7 @@ jinja2==3.1.4 # towncrier markupsafe==2.1.5 # via jinja2 -packaging==24.1 +packaging==24.2 # via sphinx pygments==2.18.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 951e091a8ac..53e4cc98a1b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via pytest platformdirs==4.3.6 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index e099eb15272..6fe64c68e3b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -63,7 +63,7 @@ mypy==1.11.2 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy -packaging==24.1 +packaging==24.2 # via # gunicorn # pytest From 62497e44b556fb881437a2212902ec06406e611d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 14:48:14 +0000 Subject: [PATCH 1100/1511] Bump click from 8.1.7 to 8.1.8 (#10211) Bumps [click](https://github.com/pallets/click) from 8.1.7 to 8.1.8. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/click/releases">click's releases</a>.</em></p> <blockquote> <h2>8.1.8</h2> <p>This is the Click 8.1.8 fix release, which fixes bugs but does not otherwise change behavior and should not result in breaking changes compared to the latest feature release.</p> <p>PyPI: <a href="https://pypi.org/project/click/8.1.8/">https://pypi.org/project/click/8.1.8/</a> Changes: <a href="https://click.palletsprojects.com/en/stable/changes/#version-8-1-8">https://click.palletsprojects.com/en/stable/changes/#version-8-1-8</a> Milestone <a href="https://github.com/pallets/click/milestones/23?closed=1">https://github.com/pallets/click/milestones/23?closed=1</a></p> <ul> <li>Fix an issue with type hints for <code>click.open_file()</code>. <a href="https://redirect.github.com/pallets/click/issues/2717">#2717</a></li> <li>Fix issue where error message for invalid <code>click.Path</code> displays on multiple lines. <a href="https://redirect.github.com/pallets/click/issues/2697">#2697</a></li> <li>Fixed issue that prevented a default value of <code>""</code> from being displayed in the help for an option. <a href="https://redirect.github.com/pallets/click/issues/2500">#2500</a></li> <li>The test runner handles stripping color consistently on Windows. <a href="https://redirect.github.com/pallets/click/issues/2705">#2705</a></li> <li>Show correct value for flag default when using <code>default_map</code>. <a href="https://redirect.github.com/pallets/click/issues/2632">#2632</a></li> <li>Fix <code>click.echo(color=...)</code> passing <code>color</code> to coloroma so it can be forced on Windows. <a href="https://redirect.github.com/pallets/click/issues/2606">#2606</a>.</li> <li>More robust bash version check, fixing problem on Windows with git-bash. <a href="https://redirect.github.com/pallets/click/issues/2638">#2638</a></li> <li>Cache the help option generated by the <code>help_option_names</code> setting to respect its eagerness. <a href="https://redirect.github.com/pallets/click/issues/2811">#2811</a></li> <li>Replace uses of <code>os.system</code> with <code>subprocess.Popen</code>. <a href="https://redirect.github.com/pallets/click/issues/1476">#1476</a></li> <li>Exceptions generated during a command will use the context's <code>color</code> setting when being displayed. <a href="https://redirect.github.com/pallets/click/issues/2193">#2193</a></li> <li>Error message when defining option with invalid name is more descriptive. <a href="https://redirect.github.com/pallets/click/issues/2452">#2452</a></li> <li>Refactor code generating default <code>--help</code> option to deduplicate code. <a href="https://redirect.github.com/pallets/click/issues/2563">#2563</a></li> <li>Test <code>CLIRunner</code> resets patched <code>_compat.should_strip_ansi</code>. <a href="https://redirect.github.com/pallets/click/issues/2732">#2732</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/click/blob/main/CHANGES.rst">click's changelog</a>.</em></p> <blockquote> <h2>Version 8.1.8</h2> <p>Unreleased</p> <ul> <li>Fix an issue with type hints for <code>click.open_file()</code>. :issue:<code>2717</code></li> <li>Fix issue where error message for invalid <code>click.Path</code> displays on multiple lines. :issue:<code>2697</code></li> <li>Fixed issue that prevented a default value of <code>""</code> from being displayed in the help for an option. :issue:<code>2500</code></li> <li>The test runner handles stripping color consistently on Windows. :issue:<code>2705</code></li> <li>Show correct value for flag default when using <code>default_map</code>. :issue:<code>2632</code></li> <li>Fix <code>click.echo(color=...)</code> passing <code>color</code> to coloroma so it can be forced on Windows. :issue:<code>2606</code>.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/click/commit/934813e4d421071a1b3db3973c02fe2721359a6e"><code>934813e</code></a> release version 8.1.8</li> <li><a href="https://github.com/pallets/click/commit/c23223b13c847ae472faa258907ffb5c27b504fa"><code>c23223b</code></a> Add links to third-party projects enhancing Click (<a href="https://redirect.github.com/pallets/click/issues/2815">#2815</a>)</li> <li><a href="https://github.com/pallets/click/commit/822d4fd0bcfcd0ab22c9eec550ee2dae2a3d260c"><code>822d4fd</code></a> Add links to third-party projects</li> <li><a href="https://github.com/pallets/click/commit/8e7bed0466fd49acf8bcf1399f54d7dc783fd6a1"><code>8e7bed0</code></a> Break up arguments section (<a href="https://redirect.github.com/pallets/click/issues/2586">#2586</a>)</li> <li><a href="https://github.com/pallets/click/commit/3241541fc89fe9c79908a6099fa2235dd20016e8"><code>3241541</code></a> Remove some typing hints.</li> <li><a href="https://github.com/pallets/click/commit/bed037717d5f39cf875d83df4025e62beebc77f4"><code>bed0377</code></a> remove test pypi</li> <li><a href="https://github.com/pallets/click/commit/653459007a15e4d75187acc5a1e1a08cbd787814"><code>6534590</code></a> update dev dependencies</li> <li><a href="https://github.com/pallets/click/commit/b1e392e69b2a32566550aa41c38875e9cafe2456"><code>b1e392e</code></a> fix typos</li> <li><a href="https://github.com/pallets/click/commit/fdc6b020465751d26f9e74a707f2c058b0dd251f"><code>fdc6b02</code></a> Fix missing reset in isolation function (<a href="https://redirect.github.com/pallets/click/issues/2733">#2733</a>)</li> <li><a href="https://github.com/pallets/click/commit/ffd43e9dc3b90bd698088fc7ebac9dbc6a4444b2"><code>ffd43e9</code></a> Fixed missing reset on _compat.should_strip_ansi.</li> <li>Additional commits viewable in <a href="https://github.com/pallets/click/compare/8.1.7...8.1.8">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=click&package-manager=pip&previous-version=8.1.7&new-version=8.1.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2c2adfe0494..aad4f418ea0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -43,7 +43,7 @@ charset-normalizer==3.4.0 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in -click==8.1.7 +click==8.1.8 # via # cherry-picker # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 1422b7d5394..6a7dc4ff2d5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -43,7 +43,7 @@ charset-normalizer==3.4.0 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in -click==8.1.7 +click==8.1.8 # via # cherry-picker # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 3eb6767ff74..92d02490bc5 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -14,7 +14,7 @@ certifi==2024.8.30 # via requests charset-normalizer==3.4.0 # via requests -click==8.1.7 +click==8.1.8 # via towncrier docutils==0.21.2 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index fad7de91b2f..b9d01374dc0 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -14,7 +14,7 @@ certifi==2024.8.30 # via requests charset-normalizer==3.4.0 # via requests -click==8.1.7 +click==8.1.8 # via towncrier docutils==0.21.2 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 53e4cc98a1b..b905c59632a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -17,7 +17,7 @@ cffi==1.17.1 # pytest-codspeed cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.1.8 # via slotscheck cryptography==43.0.3 # via trustme diff --git a/requirements/test.txt b/requirements/test.txt index 6fe64c68e3b..b40119bcb40 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -23,7 +23,7 @@ cffi==1.17.1 # cryptography # pycares # pytest-codspeed -click==8.1.7 +click==8.1.8 # via wait-for-it coverage==7.6.1 # via From 9bd0fb169dbadc08619096a8c6af2c3fba80ef7c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 11:44:38 +0000 Subject: [PATCH 1101/1511] Bump markupsafe from 2.1.5 to 3.0.2 (#10233) Bumps [markupsafe](https://github.com/pallets/markupsafe) from 2.1.5 to 3.0.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/markupsafe/releases">markupsafe's releases</a>.</em></p> <blockquote> <h2>3.0.2</h2> <p>This is the MarkupSafe 3.0.2 fix release, which fixes bugs but does not otherwise change behavior and should not result in breaking changes.</p> <p>PyPI: <a href="https://pypi.org/project/MarkupSafe/3.0.2/">https://pypi.org/project/MarkupSafe/3.0.2/</a> Changes: <a href="https://markupsafe.palletsprojects.com/en/stable/changes/#version-3-0-2">https://markupsafe.palletsprojects.com/en/stable/changes/#version-3-0-2</a> Milestone: <a href="https://github.com/pallets/markupsafe/milestone/14?closed=1">https://github.com/pallets/markupsafe/milestone/14?closed=1</a></p> <ul> <li>Fix compatibility when <code>__str__</code> returns a <code>str</code> subclass. <a href="https://redirect.github.com/pallets/markupsafe/issues/472">#472</a></li> <li>Build requires setuptools >= 70.1. <a href="https://redirect.github.com/pallets/markupsafe/issues/475">#475</a></li> </ul> <h2>3.0.1</h2> <p>This is the MarkupSafe 3.0.1 fix release, which fixes bugs but does not otherwise change behavior and should not result in breaking changes.</p> <p>PyPI: <a href="https://pypi.org/project/MarkupSafe/3.0.1/">https://pypi.org/project/MarkupSafe/3.0.1/</a> Changes: <a href="https://markupsafe.palletsprojects.com/en/stable/changes/#version-3-0-1">https://markupsafe.palletsprojects.com/en/stable/changes/#version-3-0-1</a> Milestone: <a href="https://github.com/pallets/markupsafe/milestone/13?closed=1">https://github.com/pallets/markupsafe/milestone/13?closed=1</a></p> <ul> <li>Address compiler warnings that became errors in GCC 14. <a href="https://redirect.github.com/pallets/markupsafe/issues/466">#466</a></li> <li>Fix compatibility with proxy objects. <a href="https://redirect.github.com/pallets/markupsafe/issues/467">#467</a></li> </ul> <h2>3.0.0</h2> <p>This is the MarkupSafe 3.0.0 feature release. A feature release may include new features, remove previously deprecated code, add new deprecations, or introduce potentially breaking changes. The 3.0.x branch is now the supported fix branch, the 2.1.x branch will become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as <a href="https://pypi.org/project/pip-tools/">pip-tools</a> to pin all dependencies and control upgrades. Test with warnings treated as errors to be able to adapt to deprecation warnings early.</p> <p>PyPI: <a href="https://pypi.org/project/MarkupSafe/3.0.0/">https://pypi.org/project/MarkupSafe/3.0.0/</a> Changes: <a href="https://markupsafe.palletsprojects.com/en/stable/changes/#version-3-0-0">https://markupsafe.palletsprojects.com/en/stable/changes/#version-3-0-0</a> Milestone: <a href="https://github.com/pallets/markupsafe/milestone/10?closed=1">https://github.com/pallets/markupsafe/milestone/10?closed=1</a></p> <ul> <li>Support Python 3.13 and its experimental free-threaded build. <a href="https://redirect.github.com/pallets/markupsafe/issues/461">#461</a></li> <li>Drop support for Python 3.7 and 3.8.</li> <li>Use modern packaging metadata with <code>pyproject.toml</code> instead of <code>setup.cfg</code>. <a href="https://redirect.github.com/pallets/markupsafe/issues/348">#348</a></li> <li>Change <code>distutils</code> imports to <code>setuptools</code>. <a href="https://redirect.github.com/pallets/markupsafe/issues/399">#399</a></li> <li>Use deferred evaluation of annotations. <a href="https://redirect.github.com/pallets/markupsafe/issues/400">#400</a></li> <li>Update signatures for <code>Markup</code> methods to match <code>str</code> signatures. Use positional-only arguments. <a href="https://redirect.github.com/pallets/markupsafe/issues/400">#400</a></li> <li>Some <code>str</code> methods on <code>Markup</code> no longer escape their argument: <code>strip</code>, <code>lstrip</code>, <code>rstrip</code>, <code>removeprefix</code>, <code>removesuffix</code>, <code>partition</code>, and <code>rpartition</code>; <code>replace</code> only escapes its <code>new</code> argument. These methods are conceptually linked to search methods such as <code>in</code>, <code>find</code>, and <code>index</code>, which already do not escape their argument. <a href="https://redirect.github.com/pallets/markupsafe/issues/401">#401</a></li> <li>The <code>__version__</code> attribute is deprecated. Use feature detection, or <code>importlib.metadata.version("markupsafe")</code>, instead. <a href="https://redirect.github.com/pallets/markupsafe/issues/402">#402</a></li> <li>Speed up escaping plain strings by 40%. <a href="https://redirect.github.com/pallets/markupsafe/issues/434">#434</a></li> <li>Simplify speedups implementation. <a href="https://redirect.github.com/pallets/markupsafe/issues/437">#437</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/markupsafe/blob/main/CHANGES.rst">markupsafe's changelog</a>.</em></p> <blockquote> <h2>Version 3.0.2</h2> <p>Released 2024-10-18</p> <ul> <li>Fix compatibility when <code>__str__</code> returns a <code>str</code> subclass. :issue:<code>472</code></li> <li>Build requires setuptools >= 70.1. :issue:<code>475</code></li> </ul> <h2>Version 3.0.1</h2> <p>Released 2024-10-08</p> <ul> <li>Address compiler warnings that became errors in GCC 14. :issue:<code>466</code></li> <li>Fix compatibility with proxy objects. :issue:<code>467</code></li> </ul> <h2>Version 3.0.0</h2> <p>Released 2024-10-07</p> <ul> <li>Support Python 3.13 and its experimental free-threaded build. :pr:<code>461</code></li> <li>Drop support for Python 3.7 and 3.8.</li> <li>Use modern packaging metadata with <code>pyproject.toml</code> instead of <code>setup.cfg</code>. :pr:<code>348</code></li> <li>Change <code>distutils</code> imports to <code>setuptools</code>. :pr:<code>399</code></li> <li>Use deferred evaluation of annotations. :pr:<code>400</code></li> <li>Update signatures for <code>Markup</code> methods to match <code>str</code> signatures. Use positional-only arguments. :pr:<code>400</code></li> <li>Some <code>str</code> methods on <code>Markup</code> no longer escape their argument: <code>strip</code>, <code>lstrip</code>, <code>rstrip</code>, <code>removeprefix</code>, <code>removesuffix</code>, <code>partition</code>, and <code>rpartition</code>; <code>replace</code> only escapes its <code>new</code> argument. These methods are conceptually linked to search methods such as <code>in</code>, <code>find</code>, and <code>index</code>, which already do not escape their argument. :issue:<code>401</code></li> <li>The <code>__version__</code> attribute is deprecated. Use feature detection, or <code>importlib.metadata.version("markupsafe")</code>, instead. :pr:<code>402</code></li> <li>Speed up escaping plain strings by 40%. :pr:<code>434</code></li> <li>Simplify speedups implementation. :pr:<code>437</code></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/markupsafe/commit/28ace20b140d15c083e1cbc163ee6b7778ba098c"><code>28ace20</code></a> release version 3.0.2</li> <li><a href="https://github.com/pallets/markupsafe/commit/6b51fd8f7386983b7038ad973557367cbd48579a"><code>6b51fd8</code></a> build requires at least setuptools 70.1 (<a href="https://redirect.github.com/pallets/markupsafe/issues/478">#478</a>)</li> <li><a href="https://github.com/pallets/markupsafe/commit/99dda9fd708432bd07d02327b2668661aa3cdaa0"><code>99dda9f</code></a> build requires at least setuptools 70.1</li> <li><a href="https://github.com/pallets/markupsafe/commit/3d8fd8cc006124a49ce2f4268b4d1739e301583e"><code>3d8fd8c</code></a> fix version</li> <li><a href="https://github.com/pallets/markupsafe/commit/1933c4be9c2c88613f7660840cde27a1bb7567e0"><code>1933c4b</code></a> fix version</li> <li><a href="https://github.com/pallets/markupsafe/commit/e85aff4d878aa458d5c1e879bf475d8483647f71"><code>e85aff4</code></a> relax speedups str check (<a href="https://redirect.github.com/pallets/markupsafe/issues/477">#477</a>)</li> <li><a href="https://github.com/pallets/markupsafe/commit/8cb1691ca038ca39942e088b956f5b94d8f636bf"><code>8cb1691</code></a> relax speedups str check</li> <li><a href="https://github.com/pallets/markupsafe/commit/4dafb7c36f1f654f1edd85228d346252b0065d45"><code>4dafb7c</code></a> start version 3.1.0</li> <li><a href="https://github.com/pallets/markupsafe/commit/9c44ecf45141f691d373a66ce664c43b5a6cc761"><code>9c44ecf</code></a> update docs build</li> <li><a href="https://github.com/pallets/markupsafe/commit/275c76905617c3f0e34de14e8794fcf4dfb0f937"><code>275c769</code></a> Merge branch '2.1.x' into 3.0.x</li> <li>Additional commits viewable in <a href="https://github.com/pallets/markupsafe/compare/2.1.5...3.0.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=markupsafe&package-manager=pip&previous-version=2.1.5&new-version=3.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index aad4f418ea0..05e83bdea58 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -103,7 +103,7 @@ jinja2==3.1.4 # towncrier markdown-it-py==3.0.0 # via rich -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py diff --git a/requirements/dev.txt b/requirements/dev.txt index 6a7dc4ff2d5..8b18cc82a60 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -101,7 +101,7 @@ jinja2==3.1.4 # towncrier markdown-it-py==3.0.0 # via rich -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 92d02490bc5..b8b26068f7e 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -28,7 +28,7 @@ jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 packaging==24.2 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index b9d01374dc0..6b0973acbf1 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -28,7 +28,7 @@ jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 packaging==24.2 # via sphinx From 6f6fde20df504eb845788028dfc16f57bd2510a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 11:53:20 +0000 Subject: [PATCH 1102/1511] Bump pycares from 4.4.0 to 4.5.0 (#10234) Bumps [pycares](https://github.com/saghul/pycares) from 4.4.0 to 4.5.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/saghul/pycares/commit/cde3d471035a2e867b85ef64f80eed727a93d4c2"><code>cde3d47</code></a> Fixup CI</li> <li><a href="https://github.com/saghul/pycares/commit/6885e0e428c7119d4539ba7ff9ef73ef4a858d9d"><code>6885e0e</code></a> Fix building sdist</li> <li><a href="https://github.com/saghul/pycares/commit/f770a07b24f9cc1c9af9579d0c8e8484fbddbef1"><code>f770a07</code></a> Test building release wheels on PRs</li> <li><a href="https://github.com/saghul/pycares/commit/1a197b256020d14ee63102406aead7b68c117ef9"><code>1a197b2</code></a> chore(ci): fix upload & add more platforms to cibuildwheel (<a href="https://redirect.github.com/saghul/pycares/issues/204">#204</a>)</li> <li><a href="https://github.com/saghul/pycares/commit/70443e0446304e98c8cadaefa98d0d9b0286be54"><code>70443e0</code></a> Bump version to 4.5.0</li> <li><a href="https://github.com/saghul/pycares/commit/da178fd6c84c296f2a06439021cfcdcec0b93e58"><code>da178fd</code></a> Add 3.13 support, remove 3.8 (<a href="https://redirect.github.com/saghul/pycares/issues/202">#202</a>)</li> <li><a href="https://github.com/saghul/pycares/commit/57a82a0039ad2607092abe6aff0f06be17a400f2"><code>57a82a0</code></a> build(deps): bump actions/download-artifact in /.github/workflows</li> <li><a href="https://github.com/saghul/pycares/commit/8964404b6a9731d88c61998949474799556f29d9"><code>8964404</code></a> Drop distutils</li> <li><a href="https://github.com/saghul/pycares/commit/de2ed40596f543f989bbcea30632be751133c110"><code>de2ed40</code></a> Do not define HAVE_GETSERVBYPORT_R for platforms (<a href="https://redirect.github.com/saghul/pycares/issues/195">#195</a>)</li> <li><a href="https://github.com/saghul/pycares/commit/b005f58d5e39b166427cb4546dbcc26c61ec40d3"><code>b005f58</code></a> Update test_idna_encoding_query_a with new errno to align to new c-ares versi...</li> <li>Additional commits viewable in <a href="https://github.com/saghul/pycares/compare/v4.4.0...v4.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycares&package-manager=pip&previous-version=4.4.0&new-version=4.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index a5fb7154fb7..d9aa1f719f8 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ propcache==0.2.0 # via # -r requirements/runtime-deps.in # yarl -pycares==4.4.0 +pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 05e83bdea58..d81e361d557 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -140,7 +140,7 @@ propcache==0.2.0 # yarl proxy-py==2.4.9 # via -r requirements/test.in -pycares==4.4.0 +pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/dev.txt b/requirements/dev.txt index 8b18cc82a60..bd3ce480021 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -137,7 +137,7 @@ propcache==0.2.0 # yarl proxy-py==2.4.9 # via -r requirements/test.in -pycares==4.4.0 +pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/lint.txt b/requirements/lint.txt index b905c59632a..fc026f67f00 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -55,7 +55,7 @@ pluggy==1.5.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -pycares==4.4.0 +pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 2bf55e750fe..5ded4ca04e2 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,7 +32,7 @@ propcache==0.2.0 # via # -r requirements/runtime-deps.in # yarl -pycares==4.4.0 +pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/test.txt b/requirements/test.txt index b40119bcb40..6edf71aaf8c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -75,7 +75,7 @@ propcache==0.2.0 # yarl proxy-py==2.4.9 # via -r requirements/test.in -pycares==4.4.0 +pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi From 3af4152881e670aa607a72e0b5f3c9a5992289ff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 11:53:29 +0000 Subject: [PATCH 1103/1511] Bump certifi from 2024.8.30 to 2024.12.14 (#10235) Bumps [certifi](https://github.com/certifi/python-certifi) from 2024.8.30 to 2024.12.14. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/certifi/python-certifi/commit/4ba39005afa1958ee24af51a11b64299fba61025"><code>4ba3900</code></a> 2024.12.14 (<a href="https://redirect.github.com/certifi/python-certifi/issues/329">#329</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/9164660735d61e7eee69e7ff28dec5200eddf20f"><code>9164660</code></a> Bump pypa/gh-action-pypi-publish from 1.12.2 to 1.12.3 (<a href="https://redirect.github.com/certifi/python-certifi/issues/331">#331</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/3dc36518666bb84a2feeaa45d60a231af494c35b"><code>3dc3651</code></a> Bump pypa/gh-action-pypi-publish from 1.11.0 to 1.12.2 (<a href="https://redirect.github.com/certifi/python-certifi/issues/328">#328</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/c5bf18dcd30be7e743268c2d0ce484e539b589c0"><code>c5bf18d</code></a> Bump pypa/gh-action-pypi-publish from 1.10.3 to 1.11.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/327">#327</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/b9083917686e810b56e305cb45364af482b63099"><code>b908391</code></a> Bump actions/setup-python from 5.2.0 to 5.3.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/326">#326</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/bc26b41e0db0cfe4d05098246534826e49a97e5e"><code>bc26b41</code></a> Bump actions/checkout from 4.2.1 to 4.2.2 (<a href="https://redirect.github.com/certifi/python-certifi/issues/325">#325</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/57afc22de7f41e779b4541f5d16ddcab276f1f1d"><code>57afc22</code></a> Bump actions/upload-artifact from 4.4.1 to 4.4.3 (<a href="https://redirect.github.com/certifi/python-certifi/issues/323">#323</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/a495091b0dc2d34d1fd2bba5c3de1e8aea1eace2"><code>a495091</code></a> test against 3.13 final</li> <li><a href="https://github.com/certifi/python-certifi/commit/62f8144934352592141a5443a6b321d7d8706b33"><code>62f8144</code></a> Added 3.13 classifier (<a href="https://redirect.github.com/certifi/python-certifi/issues/322">#322</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/94d23a0d8627d87f534c2e31ede19fe42ed39917"><code>94d23a0</code></a> Bump pypa/gh-action-pypi-publish from 1.10.2 to 1.10.3 (<a href="https://redirect.github.com/certifi/python-certifi/issues/321">#321</a>)</li> <li>Additional commits viewable in <a href="https://github.com/certifi/python-certifi/compare/2024.08.30...2024.12.14">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2024.8.30&new-version=2024.12.14)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d81e361d557..b2a1ffd9a50 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -30,7 +30,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 # via pip-tools -certifi==2024.8.30 +certifi==2024.12.14 # via requests cffi==1.17.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index bd3ce480021..ed7b6e3d591 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 # via pip-tools -certifi==2024.8.30 +certifi==2024.12.14 # via requests cffi==1.17.1 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index b8b26068f7e..ad53f6dacd8 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx -certifi==2024.8.30 +certifi==2024.12.14 # via requests charset-normalizer==3.4.0 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index 6b0973acbf1..1276552b375 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx -certifi==2024.8.30 +certifi==2024.12.14 # via requests charset-normalizer==3.4.0 # via requests From 6e6118237c1335c4ac191b4d691f14aa443d2c72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 12:10:55 +0000 Subject: [PATCH 1104/1511] Bump setuptools from 75.2.0 to 75.6.0 (#10236) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.2.0 to 75.6.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.6.0</h1> <h2>Features</h2> <ul> <li>Preserve original <code>PKG-INFO</code> into <code>METADATA</code> when creating wheel (instead of calling <code>wheel.metadata.pkginfo_to_metadata</code>). This helps to be more compliant with the flow specified in PEP 517. (<a href="https://redirect.github.com/pypa/setuptools/issues/4701">#4701</a>)</li> <li>Changed the <code>WindowsSdkVersion</code>, <code>FrameworkVersion32</code> and <code>FrameworkVersion64</code> properties of <code>setuptools.msvc.PlatformInfo</code> to return an empty <code>tuple</code> instead of <code>None</code> as a fallthrough case -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4754">#4754</a>)</li> </ul> <h1>v75.5.0</h1> <h2>Features</h2> <ul> <li>Removed support for <code>SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION</code>, as it is deemed prone to errors. (<a href="https://redirect.github.com/pypa/setuptools/issues/4746">#4746</a>)</li> </ul> <h1>v75.4.0</h1> <h2>Features</h2> <ul> <li> <p>Added support for the environment variable <code>SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION=true</code>, allowing users to bypass the validation of <code>pyproject.toml</code>. This option should be used only as a last resort when resolving dependency issues, as it may lead to improper functioning. Users who enable this setting are responsible for ensuring that <code>pyproject.toml</code> complies with setuptools requirements. (<a href="https://redirect.github.com/pypa/setuptools/issues/4611">#4611</a>)</p> <p>.. attention:: This environment variable was removed in a later version of <code>setuptools</code>.</p> </li> <li> <p>Require Python 3.9 or later. (<a href="https://redirect.github.com/pypa/setuptools/issues/4718">#4718</a>)</p> </li> <li> <p>Remove dependency on <code>importlib_resources</code> and the vendored copy of the library. Instead, <code>setuptools</code> consistently rely on stdlib's <code>importlib.resources</code> (available on Python 3.9+). (<a href="https://redirect.github.com/pypa/setuptools/issues/4718">#4718</a>)</p> </li> <li> <p>Setuptools' <code>bdist_wheel</code> implementation no longer produces wheels with the <code>m</code> SOABI flag (pymalloc-related). This flag was removed on Python 3.8+ (see :obj:<code>sys.abiflags</code>). (<a href="https://redirect.github.com/pypa/setuptools/issues/4718">#4718</a>)</p> </li> <li> <p>Updated vendored packaging version to 24.2. (<a href="https://redirect.github.com/pypa/setuptools/issues/4740">#4740</a>)</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/bf2ced2a61833915a307c73405da99b6408154c7"><code>bf2ced2</code></a> Bump version: 75.5.0 → 75.6.0</li> <li><a href="https://github.com/pypa/setuptools/commit/8685c80e3e77baf84d77e12a314cf96ce74006f5"><code>8685c80</code></a> Empty tuple instead of None for PlatformInfo version properties fallthrough (...</li> <li><a href="https://github.com/pypa/setuptools/commit/50d671b1e4ca090affe57d3a95b2ad7fa43c0e27"><code>50d671b</code></a> Rename news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/2b471c275ff4e822dd7d41acf4fcaaf1bae41741"><code>2b471c2</code></a> Changed the <code>WindowsSdkVersion</code>, <code>FrameworkVersion32</code> and ``FrameworkVers...</li> <li><a href="https://github.com/pypa/setuptools/commit/2c77cd2e77007040fbc7bdcfc99bc4147c79cf7f"><code>2c77cd2</code></a> Runtime typing fixes for typeshed return type merge (<a href="https://redirect.github.com/pypa/setuptools/issues/4753">#4753</a></li> <li><a href="https://github.com/pypa/setuptools/commit/9a4c8d484ad0010b5a0d9a97e645a50b4d657d13"><code>9a4c8d4</code></a> Runtime typing fixes for typeshed return type merge</li> <li><a href="https://github.com/pypa/setuptools/commit/e622859e278e1751175ded6f8f41ea3de06e4855"><code>e622859</code></a> Preserve original <code>PKG-INFO</code> contents when creating wheel (instead of calling...</li> <li><a href="https://github.com/pypa/setuptools/commit/0b5b4174380d5d2fc8781bc3a459d8d5e2e64859"><code>0b5b417</code></a> Mark tests that may depend on external network</li> <li><a href="https://github.com/pypa/setuptools/commit/a4fa01db264b374b13d65cd64e2cd06ecf1ab9b5"><code>a4fa01d</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/089aca9754d716b5459ab53a2b0c58adf9dd672d"><code>089aca9</code></a> Ignore coverage in test code</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v75.2.0...v75.6.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.2.0&new-version=75.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b2a1ffd9a50..318b8a7ce6c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 # via pip-tools -setuptools==75.2.0 +setuptools==75.6.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index ed7b6e3d591..75581b5c06c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -271,7 +271,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 # via pip-tools -setuptools==75.2.0 +setuptools==75.6.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index ad53f6dacd8..f27a01a77bf 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -74,5 +74,5 @@ urllib3==2.2.3 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.2.0 +setuptools==75.6.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 1276552b375..45a7eafee78 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.2.3 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.2.0 +setuptools==75.6.0 # via incremental From 5e3ddf14c0e86f13181809f5585a0f2efa5849eb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 12:11:07 +0000 Subject: [PATCH 1105/1511] Bump aiohappyeyeballs from 2.4.3 to 2.4.4 (#10237) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.4.3 to 2.4.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h1>v2.4.4 (2024-11-30)</h1> <h2>Fix</h2> <ul> <li>fix: handle OSError on failure to close socket instead of raising IndexError (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/114">#114</a>)</li> </ul> <p>Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]<a href="https://github.com/users"><code>@​users</code></a>.noreply.github.com> Co-authored-by: J. Nick Koston <<a href="mailto:nick@koston.org">nick@koston.org</a>> (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c542f684d329fed04093caa2b31d8f7f6e0e0949"><code>c542f68</code></a>)</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.4.4 (2024-11-30)</h2> <h3>Fix</h3> <ul> <li>Handle oserror on failure to close socket instead of raising indexerror (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/114">#114</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c542f684d329fed04093caa2b31d8f7f6e0e0949"><code>c542f68</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/3c4f2a6892d62170d5c14896dc65714e5938a494"><code>3c4f2a6</code></a> 2.4.4</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c542f684d329fed04093caa2b31d8f7f6e0e0949"><code>c542f68</code></a> fix: handle OSError on failure to close socket instead of raising IndexError ...</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/fd90f564d5789b213922e5dd0994d8cdde196c4b"><code>fd90f56</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/116">#116</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/065380744672a1eabdc7387ae36d69842c720345"><code>0653807</code></a> chore: bump codecov-action to 5.0.3 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/115">#115</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/90e01edddd638928568990e5a23764142c6e9c2e"><code>90e01ed</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/113">#113</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/31825f2a3cc6c885dd08d66375f0fd37ae51ddd0"><code>31825f2</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/111">#111</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/4c23bcad40398ee614be03721e8c3d6e5a02d07e"><code>4c23bca</code></a> chore: add missing FUNDING.yml (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/110">#110</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b5dfff592eef2defd3fb38df8884f3c8f4a60a76"><code>b5dfff5</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/108">#108</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/5a3b4cb87174db386b4bd431617603da9fc3d74e"><code>5a3b4cb</code></a> chore: fix docs (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/106">#106</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.3...v2.4.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.4.3&new-version=2.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index d9aa1f719f8..e7b278cdcdf 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 318b8a7ce6c..91644fc6703 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 75581b5c06c..9600bc5e013 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 5ded4ca04e2..ca182b86a16 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 6edf71aaf8c..25ffb19dc0a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.3 +aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in From 83a51b8829f50ea719c47f017379d7bdca486774 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 12:11:22 +0000 Subject: [PATCH 1106/1511] Bump trustme from 1.1.0 to 1.2.0 (#10239) Bumps [trustme](https://github.com/python-trio/trustme) from 1.1.0 to 1.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python-trio/trustme/releases">trustme's releases</a>.</em></p> <blockquote> <h2>1.2.0</h2> <h2>Features</h2> <ul> <li>Add support for Python 3.13. (<a href="https://redirect.github.com/python-trio/trustme/issues/664">#664</a>)</li> <li>Allow setting of cert's notBefore attribute (<a href="https://redirect.github.com/python-trio/trustme/issues/628">#628</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Add the Authority Key Identifier extension to child CA certificates. (<a href="https://redirect.github.com/python-trio/trustme/issues/642">#642</a>)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Remove support for Python 3.8 and PyPy 3.9. (<a href="https://redirect.github.com/python-trio/trustme/issues/664">#664</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python-trio/trustme/commit/40e0fca1a1d5f8369cb03cec72212f2074b6ab63"><code>40e0fca</code></a> Release 1.2.0</li> <li><a href="https://github.com/python-trio/trustme/commit/83ffe5138cc8a97137635e9e73f47a6efda78aa0"><code>83ffe51</code></a> Switch from setup.py to pyproject.toml (<a href="https://redirect.github.com/python-trio/trustme/issues/665">#665</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/e3c3bda6241b4331996a6befbe8db17267db3d09"><code>e3c3bda</code></a> Fix coverage needs</li> <li><a href="https://github.com/python-trio/trustme/commit/a19f69970e07ff9b7292fa510b08d85dbd843220"><code>a19f699</code></a> Support Python 3.13, drop 3.8 and PyPy 3.9</li> <li><a href="https://github.com/python-trio/trustme/commit/843d25d07ac61a7e7855b38f7263b94544d3c516"><code>843d25d</code></a> Bump the dependencies group with 7 updates (<a href="https://redirect.github.com/python-trio/trustme/issues/662">#662</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/c7e64a69368debcd2b43bdc0a98df1e7d46a09a1"><code>c7e64a6</code></a> Bump GitHub Actions, fix CI and coverage (<a href="https://redirect.github.com/python-trio/trustme/issues/663">#663</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/7b7f43596a039ef27dcf3e26e45ed138e482acfc"><code>7b7f435</code></a> Bump the dependencies group with 7 updates (<a href="https://redirect.github.com/python-trio/trustme/issues/660">#660</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/621d131e703e187b74bd87d69521b4bc4d185e55"><code>621d131</code></a> Bump the dependencies group with 3 updates (<a href="https://redirect.github.com/python-trio/trustme/issues/659">#659</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/a4a82f3f8deb69ee747bed339a6bcdcf6e82dfbf"><code>a4a82f3</code></a> Bump the dependencies group with 14 updates (<a href="https://redirect.github.com/python-trio/trustme/issues/658">#658</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/586f7759d5c27beb44da60615a71848eb2a5a490"><code>586f775</code></a> Bump cryptography from 41.0.7 to 42.0.4 (<a href="https://redirect.github.com/python-trio/trustme/issues/638">#638</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python-trio/trustme/compare/v1.1.0...v1.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=trustme&package-manager=pip&previous-version=1.1.0&new-version=1.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 91644fc6703..33bb95ba874 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -245,7 +245,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -trustme==1.1.0 ; platform_machine != "i686" +trustme==1.2.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 9600bc5e013..ffc2aee9f88 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -237,7 +237,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -trustme==1.1.0 ; platform_machine != "i686" +trustme==1.2.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index fc026f67f00..b8c637c5f7e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -91,7 +91,7 @@ tomli==2.2.1 # mypy # pytest # slotscheck -trustme==1.1.0 +trustme==1.2.0 # via -r requirements/lint.in typing-extensions==4.12.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 25ffb19dc0a..62a5fee4da3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -119,7 +119,7 @@ tomli==2.2.1 # coverage # mypy # pytest -trustme==1.1.0 ; platform_machine != "i686" +trustme==1.2.0 ; platform_machine != "i686" # via -r requirements/test.in typing-extensions==4.12.2 # via From 850f5a470cf6fcc988f0173598fd28428cc91f23 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 12:11:26 +0000 Subject: [PATCH 1107/1511] Bump coverage from 7.6.1 to 7.6.9 (#10240) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.1 to 7.6.9. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.6.9 — 2024-12-06</h2> <ul> <li>Fix: <code>Tomas Uribe fixed <pull 1901_></code>_ a performance problem in the XML report. Large code bases should produce XML reports much faster now.</li> </ul> <p>.. _pull 1901: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1901">nedbat/coveragepy#1901</a></p> <p>.. _changes_7-6-8:</p> <h2>Version 7.6.8 — 2024-11-23</h2> <ul> <li> <p>Fix: the LCOV report code assumed that a branch line that took no branches meant that the entire line was unexecuted. This isn't true in a few cases: the line might always raise an exception, or might have been optimized away. Fixes <code>issue 1896</code>_.</p> </li> <li> <p>Fix: similarly, the HTML report will now explain that a line that jumps to none of its expected destinations must have always raised an exception. Previously, it would say something nonsensical like, "line 4 didn't jump to line 5 because line 4 was never true, and it didn't jump to line 7 because line 4 was always true." This was also shown in <code>issue 1896</code>_.</p> </li> </ul> <p>.. _issue 1896: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1896">nedbat/coveragepy#1896</a></p> <p>.. _changes_7-6-7:</p> <h2>Version 7.6.7 — 2024-11-15</h2> <ul> <li>Fix: ugh, the other assert from 7.6.5 can also be encountered in the wild, so it's been restored to a conditional. Sorry for the churn.</li> </ul> <p>.. _changes_7-6-6:</p> <h2>Version 7.6.6 — 2024-11-15</h2> <ul> <li>One of the new asserts from 7.6.5 caused problems in real projects, as reported in <code>issue 1891</code>_. The assert has been removed.</li> </ul> <p>.. _issue 1891: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1891">nedbat/coveragepy#1891</a></p> <p>.. _changes_7-6-5:</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/daac76dde1681d968f37095cacc551b464888753"><code>daac76d</code></a> docs: sample HTML for 7.6.9</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f7d7e42705083b8c124fcc8be64da555095adfc7"><code>f7d7e42</code></a> docs: prep for 7.6.9</li> <li><a href="https://github.com/nedbat/coveragepy/commit/8fe3b2bad5de5e750c104ad2ddb251a0500dd146"><code>8fe3b2b</code></a> docs: thanks, Tomas Uribe for <a href="https://redirect.github.com/nedbat/coveragepy/issues/1901">#1901</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/346cfc03885571fd448f8218a8b9f12836c2fcb2"><code>346cfc0</code></a> fix: use set instead of list for arcs_executed, fixes poor performance (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1901">#1901</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/484678e8d7c3b07e455e4c5bbce6f4a5bd03a9b4"><code>484678e</code></a> fix: COVERAGE_ONE_CORE should mean ctrace if possible</li> <li><a href="https://github.com/nedbat/coveragepy/commit/37451ed089cb4c5d25e2fe6c13485407d01be176"><code>37451ed</code></a> chore: make upgrade doc_upgrade</li> <li><a href="https://github.com/nedbat/coveragepy/commit/bd23445b181387b9c646d6dbf096b624bbc5953b"><code>bd23445</code></a> chore: bump the action-dependencies group with 2 updates (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1899">#1899</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/01cf50c7d9131af2de9aea1e3573bdff1dc48110"><code>01cf50c</code></a> test: add a test to satisfy a condition in results.py</li> <li><a href="https://github.com/nedbat/coveragepy/commit/74d3c50af1563b6cbd9ab1ef580267dc55982922"><code>74d3c50</code></a> chore: make upgrade</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f3b269cc5d5f1683aba34f57c7d5b991c5dd3743"><code>f3b269c</code></a> build: only comment once on an issue mentioned twice</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.6.1...7.6.9">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.6.1&new-version=7.6.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 33bb95ba874..fe5dad41761 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.1 +coverage==7.6.9 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index ffc2aee9f88..775041cb817 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.1 +coverage==7.6.9 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 62a5fee4da3..17471eae051 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -25,7 +25,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.6.1 +coverage==7.6.9 # via # -r requirements/test.in # pytest-cov From 7c2e2e73432f57439775575da63415a3d9a00c7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 12:30:56 +0000 Subject: [PATCH 1108/1511] Bump pytest-codspeed from 3.0.0 to 3.1.0 (#10242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) from 3.0.0 to 3.1.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/releases">pytest-codspeed's releases</a>.</em></p> <blockquote> <h2>v3.1.0</h2> <h2>What's changed</h2> <h3><!-- raw HTML omitted -->🚀 Features</h3> <ul> <li>Build the native extension during packaging to avoid runtime build issues by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/57">CodSpeedHQ/pytest-codspeed#57</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.0.0...v3.1.0">https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.0.0...v3.1.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/blob/master/CHANGELOG.md">pytest-codspeed's changelog</a>.</em></p> <blockquote> <h2>[3.1.0] - 2024-12-09</h2> <h3><!-- raw HTML omitted -->🏗️ Refactor</h3> <ul> <li>Remove the scripted semver generation by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> <h3><!-- raw HTML omitted -->⚙️ Internals</h3> <ul> <li>Fix typo in cibuildwheel config by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/57">#57</a></li> </ul> <h2>[3.1.0-beta] - 2024-12-06</h2> <h3><!-- raw HTML omitted -->🚀 Features</h3> <ul> <li>Check buildability and fallback when build doesn't work by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Compile the callgrind wrapper at build time by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> <h3><!-- raw HTML omitted -->🐛 Bug Fixes</h3> <ul> <li>Allow build on arm64 by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> <h3><!-- raw HTML omitted -->⚙️ Internals</h3> <ul> <li>Build wheels with cibuildwheel by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Allow forcing integrated tests by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Fix release script by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Use bumpver to manage versions by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Add a changelog by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Force native extension build in CI by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Updated matrix release workflow by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Use a common python version in the codspeed job by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Fix the codspeed workflow by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Use uv in CI by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Commit uv lock file by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/3b1a30c6e408a8c9a69af47cae56ec44a5a84ad5"><code>3b1a30c</code></a> Release v3.1.0 🚀</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/ad1d41190cafe61588449500901b990f72ba1d01"><code>ad1d411</code></a> refactor: remove the scripted semver generation</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/c4dfe748e9b008f31049750667118556320f66c6"><code>c4dfe74</code></a> chore: fix typo in cibuildwheel config</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/58bd8de9e7ed4c786b294100d09370dd770510d2"><code>58bd8de</code></a> Release v3.1.0-beta 🚀</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/804efe93f96d92cca26718e636687a0ce582a552"><code>804efe9</code></a> chore(ci): build wheels with cibuildwheel</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/c99efafc8e8ab9c0ca1ef07081389e5ac5121adf"><code>c99efaf</code></a> chore: allow forcing integrated tests</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/e7ef8537bca9ea5d72fbcc2003536721b4b8faef"><code>e7ef853</code></a> chore: fix release script</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/fab73b437748a4afdcd1e2ea3d89862a88acee12"><code>fab73b4</code></a> chore: use bumpver to manage versions</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/5076be076b53063e37f60cb32e5c836ca0a3c75f"><code>5076be0</code></a> chore: add a changelog</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/59989b0e8e009622ebd5b0ca205762847cdbfaa3"><code>59989b0</code></a> fix: allow build on arm64</li> <li>Additional commits viewable in <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.0.0...v3.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-codspeed&package-manager=pip&previous-version=3.0.0&new-version=3.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 6 ++---- requirements/dev.txt | 6 ++---- requirements/lint.txt | 6 ++---- requirements/test.txt | 4 +--- 4 files changed, 7 insertions(+), 15 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fe5dad41761..d5eaded92dc 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -69,9 +69,7 @@ exceptiongroup==1.2.2 execnet==2.1.1 # via pytest-xdist filelock==3.16.1 - # via - # pytest-codspeed - # virtualenv + # via virtualenv freezegun==1.5.1 # via # -r requirements/lint.in @@ -170,7 +168,7 @@ pytest==8.3.3 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.0.0 +pytest-codspeed==3.1.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 775041cb817..6fe52a5095d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -67,9 +67,7 @@ exceptiongroup==1.2.2 execnet==2.1.1 # via pytest-xdist filelock==3.16.1 - # via - # pytest-codspeed - # virtualenv + # via virtualenv freezegun==1.5.1 # via # -r requirements/lint.in @@ -165,7 +163,7 @@ pytest==8.3.3 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.0.0 +pytest-codspeed==3.1.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index b8c637c5f7e..2e8d0692029 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -26,9 +26,7 @@ distlib==0.3.9 exceptiongroup==1.2.2 # via pytest filelock==3.16.1 - # via - # pytest-codspeed - # virtualenv + # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in identify==2.6.1 @@ -70,7 +68,7 @@ pytest==8.3.3 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==3.0.0 +pytest-codspeed==3.1.0 # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 17471eae051..f6888348ade 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -35,8 +35,6 @@ exceptiongroup==1.2.2 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.16.1 - # via pytest-codspeed freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.5.0 @@ -92,7 +90,7 @@ pytest==8.3.3 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.0.0 +pytest-codspeed==3.1.0 # via -r requirements/test.in pytest-cov==5.0.0 # via -r requirements/test.in From a3b50dd0a36d091875b9815ec873401bf3c05b98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Dec 2024 13:50:14 +0100 Subject: [PATCH 1109/1511] Bump urllib3 from 2.2.3 to 2.3.0 (#10241) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.2.3 to 2.3.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/releases">urllib3's releases</a>.</em></p> <blockquote> <h2>2.3.0</h2> <h2>🚀 urllib3 is fundraising for HTTP/2 support</h2> <p><a href="https://sethmlarson.dev/urllib3-is-fundraising-for-http2-support">urllib3 is raising ~$40,000 USD</a> to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support for 2023. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects <a href="https://opencollective.com/urllib3">please consider contributing financially</a> to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.</p> <p>Thank you for your support.</p> <h2>Features</h2> <ul> <li>Added <code>HTTPResponse.shutdown()</code> to stop any ongoing or future reads for a specific response. It calls <code>shutdown(SHUT_RD)</code> on the underlying socket. This feature was <a href="https://opencollective.com/urllib3/contributions/815307">sponsored by LaunchDarkly</a>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/2868">urllib3/urllib3#2868</a>)</li> <li>Added support for JavaScript Promise Integration on Emscripten. This enables more efficient WebAssembly requests and streaming, and makes it possible to use in Node.js if you launch it as node <code>--experimental-wasm-stack-switching</code>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3400">urllib3/urllib3#3400</a>)</li> <li>Added the <code>proxy_is_tunneling</code> property to <code>HTTPConnection</code> and <code>HTTPSConnection</code>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3285">urllib3/urllib3#3285</a>)</li> <li>Added pickling support to <code>NewConnectionError</code> and <code>NameResolutionError</code>. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3480">urllib3/urllib3#3480</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Fixed an issue in debug logs where the HTTP version was rendering as "HTTP/11" instead of "HTTP/1.1". (<a href="https://redirect.github.com/urllib3/urllib3/issues/3489">urllib3/urllib3#3489</a>)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Removed support for Python 3.8. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3492">urllib3/urllib3#3492</a>)</li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/urllib3/urllib3/compare/2.2.3...2.3.0">https://github.com/urllib3/urllib3/compare/2.2.3...2.3.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/blob/main/CHANGES.rst">urllib3's changelog</a>.</em></p> <blockquote> <h1>2.3.0 (2024-12-22)</h1> <h2>Features</h2> <ul> <li>Added <code>HTTPResponse.shutdown()</code> to stop any ongoing or future reads for a specific response. It calls <code>shutdown(SHUT_RD)</code> on the underlying socket. This feature was <code>sponsored by LaunchDarkly <https://opencollective.com/urllib3/contributions/815307></code><strong>. (<code>[#2868](https://github.com/urllib3/urllib3/issues/2868) <https://github.com/urllib3/urllib3/issues/2868></code></strong>)</li> <li>Added support for JavaScript Promise Integration on Emscripten. This enables more efficient WebAssembly requests and streaming, and makes it possible to use in Node.js if you launch it as <code>node --experimental-wasm-stack-switching</code>. (<code>[#3400](https://github.com/urllib3/urllib3/issues/3400) <https://github.com/urllib3/urllib3/issues/3400></code>__)</li> <li>Added the <code>proxy_is_tunneling</code> property to <code>HTTPConnection</code> and <code>HTTPSConnection</code>. (<code>[#3285](https://github.com/urllib3/urllib3/issues/3285) <https://github.com/urllib3/urllib3/issues/3285></code>__)</li> <li>Added pickling support to <code>NewConnectionError</code> and <code>NameResolutionError</code>. (<code>[#3480](https://github.com/urllib3/urllib3/issues/3480) <https://github.com/urllib3/urllib3/issues/3480></code>__)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Fixed an issue in debug logs where the HTTP version was rendering as "HTTP/11" instead of "HTTP/1.1". (<code>[#3489](https://github.com/urllib3/urllib3/issues/3489) <https://github.com/urllib3/urllib3/issues/3489></code>__)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Removed support for Python 3.8. (<code>[#3492](https://github.com/urllib3/urllib3/issues/3492) <https://github.com/urllib3/urllib3/issues/3492></code>__)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/urllib3/urllib3/commit/2f68c5363ef632d73dd4d9300289d7ce5ff275b4"><code>2f68c53</code></a> Release 2.3.0</li> <li><a href="https://github.com/urllib3/urllib3/commit/f7bcf6986fa9c43fc7884b648f66688db593b491"><code>f7bcf69</code></a> Add HTTPResponse.shutdown() to stop blocking reads (<a href="https://redirect.github.com/urllib3/urllib3/issues/3527">#3527</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/e94224931feddf9e12bb25452bf0d0c21da8a7e0"><code>e942249</code></a> Update .readthedocs.yml addressing a deprecation (<a href="https://redirect.github.com/urllib3/urllib3/issues/3534">#3534</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/905549d64a948abd4b6962edecb8dd5569166275"><code>905549d</code></a> Upgrade Python pre-commit tools (<a href="https://redirect.github.com/urllib3/urllib3/issues/3529">#3529</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/716d8340b89f7d8ec549579d14e3c0a7e5f859a5"><code>716d834</code></a> Fix PyPI publish with Core metadata 2.4 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3531">#3531</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/7ab935c6ddd546c7d57b03c0269685c61c8e60c6"><code>7ab935c</code></a> Address zizmor issues</li> <li><a href="https://github.com/urllib3/urllib3/commit/181357ed2aecf9c523f2664c05f176cde9692994"><code>181357e</code></a> Bump Quart to fix CI (<a href="https://redirect.github.com/urllib3/urllib3/issues/3524">#3524</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/0e7e0df0586573d51c78076d4871050783bec7c8"><code>0e7e0df</code></a> Start testing with Python 3.14</li> <li><a href="https://github.com/urllib3/urllib3/commit/d67d09bfd04ecdae1280a563d06b32949befaf71"><code>d67d09b</code></a> Bump mypy version</li> <li><a href="https://github.com/urllib3/urllib3/commit/20032ec38a09680bcfb6d97b4c43b276af43cc64"><code>20032ec</code></a> Drop unneeded dependency pins and a warning filter</li> <li>Additional commits viewable in <a href="https://github.com/urllib3/urllib3/compare/2.2.3...2.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=2.2.3&new-version=2.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d5eaded92dc..350f004871d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -257,7 +257,7 @@ typing-extensions==4.12.2 # rich uritemplate==4.1.1 # via gidgethub -urllib3==2.2.3 +urllib3==2.3.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 6fe52a5095d..2f1f1d925d9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -249,7 +249,7 @@ typing-extensions==4.12.2 # rich uritemplate==4.1.1 # via gidgethub -urllib3==2.2.3 +urllib3==2.3.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index f27a01a77bf..4b9688946a1 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -70,7 +70,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.2.3 +urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/doc.txt b/requirements/doc.txt index 45a7eafee78..54b9969364c 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -65,7 +65,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.2.3 +urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: From 7e0eac2acf5fa5ad9eb6a0486db6f570a3872fe6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:11:27 +0000 Subject: [PATCH 1110/1511] Bump regex from 2024.9.11 to 2024.11.6 (#10253) Bumps [regex](https://github.com/mrabarnett/mrab-regex) from 2024.9.11 to 2024.11.6. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/mrabarnett/mrab-regex/blob/hg/changelog.txt">regex's changelog</a>.</em></p> <blockquote> <p>Version: 2024.11.7</p> <pre><code>Updated pyproject.toml and setup.py according to PEP 517. </code></pre> <p>Version: 2024.11.6</p> <pre><code>Git issue 546: Partial match not working in some instances with non-greedy capture </code></pre> <p>Version: 2024.9.14</p> <pre><code>Reverted to actions/download-artifact@v3 and actions/upload-artifact@v3 in main.yml because GitHub Actions failed when using them. </code></pre> <p>Version: 2024.9.13</p> <pre><code>Updated to actions/upload-artifact@v4 in main.yml. </code></pre> <p>Version: 2024.9.12</p> <pre><code>Updated to actions/download-artifact@v4 in main.yml. </code></pre> <p>Version: 2024.9.11</p> <pre><code>Updated to Unicode 16.0.0. </code></pre> <p>Version: 2024.7.24</p> <pre><code>Git issue 539: Bug: Partial matching fails on a simple example </code></pre> <p>Version: 2024.6.22</p> <pre><code>Git issue 535: Regex fails Unicode 15.1 GraphemeBreakTest due to missing new GB9c rule implementation </code></pre> <p>Version: 2024.5.15</p> <pre><code>Git issue 530: hangs with fuzzy and optionals <p>It's not hanging, it'll finish eventually. It's just an example of catastrophic backtracking.</p> <p>The error printed when Ctrl+C is pressed does show a bug, though, which is now fixed.<br /> </code></pre></p> <p>Version: 2024.5.10</p> <pre><code>Updated for Python 3.13. <p><time.h> now needs to be included explicitly because Python.h no longer includes it.<br /> </code></pre></p> <p>Version: 2024.4.28</p> <pre><code>Git issue 527: `VERBOSE`/`X` flag breaks `\N` escapes </code></pre> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/930983aa68ffc133ec086ef16cabdbb9c0c491ea"><code>930983a</code></a> Git issue 546: Partial match not working in some instances with non-greedy ca...</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/64834c729b8e117c1f9a471d9e1e51f32b69a121"><code>64834c7</code></a> Reverted to actions/download-artifact@v3 and actions/upload-artifact@v3 in ma...</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/1bcd42e8381feaf3040f4b4a6a10dab912e6b59c"><code>1bcd42e</code></a> Updated to actions/upload-artifact@v4 in main.yml.</li> <li><a href="https://github.com/mrabarnett/mrab-regex/commit/f46728542b4fea608022016ab560d9abc8ad7f7f"><code>f467285</code></a> Updated to actions/download-artifact@v4 in main.yml.</li> <li>See full diff in <a href="https://github.com/mrabarnett/mrab-regex/compare/2024.9.11...2024.11.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=regex&package-manager=pip&previous-version=2024.9.11&new-version=2024.11.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 350f004871d..b72843d6ced 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -190,7 +190,7 @@ pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2024.9.11 +regex==2024.11.6 # via re-assert requests==2.32.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 2f1f1d925d9..448026c45a1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -185,7 +185,7 @@ pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2024.9.11 +regex==2024.11.6 # via re-assert requests==2.32.3 # via diff --git a/requirements/test.txt b/requirements/test.txt index f6888348ade..d0c09fc0ea1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -104,7 +104,7 @@ python-on-whales==0.74.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in -regex==2024.9.11 +regex==2024.11.6 # via re-assert rich==13.9.4 # via pytest-codspeed From 4d910c422a522d09a7b7fcac1d87e4a38ef60495 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:13:02 +0000 Subject: [PATCH 1111/1511] Bump identify from 2.6.1 to 2.6.3 (#10254) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.1 to 2.6.3. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/f075954372758b34ef542c2e895000ea660a47c9"><code>f075954</code></a> v2.6.3</li> <li><a href="https://github.com/pre-commit/identify/commit/149074eead092ed888c8eb36abce28bfefce44aa"><code>149074e</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/488">#488</a> from nicknovitski/direnv</li> <li><a href="https://github.com/pre-commit/identify/commit/880ac27232c7642bcf57cf2dfe89216bd3964a2a"><code>880ac27</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/487">#487</a> from vlotorev/main</li> <li><a href="https://github.com/pre-commit/identify/commit/8dcf6f396e21928b7bf450d7e9f1f4f8c2269948"><code>8dcf6f3</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/486">#486</a> from axtimhaus/patch-1</li> <li><a href="https://github.com/pre-commit/identify/commit/d2f995a8604761b174b98f63b7564bee7f692c27"><code>d2f995a</code></a> feat: identify direnv files as Bash</li> <li><a href="https://github.com/pre-commit/identify/commit/7116aa1cf4fd64e90c81848af1b84d948abd6767"><code>7116aa1</code></a> Add bitbake extensions and files</li> <li><a href="https://github.com/pre-commit/identify/commit/13e61053e8ba7481117d8216f554fec792b36a17"><code>13e6105</code></a> Add C# Script and F# Source and Script filetypes</li> <li><a href="https://github.com/pre-commit/identify/commit/6fd6f6bea8fedb15e54f1477a49325ea82b73911"><code>6fd6f6b</code></a> v2.6.2</li> <li><a href="https://github.com/pre-commit/identify/commit/6607f84704f5628126301899b819ec1700f82dd9"><code>6607f84</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/484">#484</a> from zupo/patch-1</li> <li><a href="https://github.com/pre-commit/identify/commit/4c4ccfa931f2bbd12e311e31fabf5960ca74475d"><code>4c4ccfa</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/485">#485</a> from pre-commit/pre-commit-ci-update-config</li> <li>Additional commits viewable in <a href="https://github.com/pre-commit/identify/compare/v2.6.1...v2.6.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.1&new-version=2.6.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b72843d6ced..187512f01cd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.1 +identify==2.6.3 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 448026c45a1..993c6a3008a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -80,7 +80,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.1 +identify==2.6.3 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 2e8d0692029..0d518445a09 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -29,7 +29,7 @@ filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.1 +identify==2.6.3 # via pre-commit idna==3.7 # via trustme From 13b7fa5edc05bcd6ed423869f55563b72f48516e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:17:03 +0000 Subject: [PATCH 1112/1511] Bump charset-normalizer from 3.4.0 to 3.4.1 (#10258) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [charset-normalizer](https://github.com/jawah/charset_normalizer) from 3.4.0 to 3.4.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/jawah/charset_normalizer/releases">charset-normalizer's releases</a>.</em></p> <blockquote> <h2>Version 3.4.1</h2> <h2>🚀 We're still raising awareness around HTTP/2, and HTTP/3!</h2> <p>Did you know that Internet Explorer 11 shipped with an optional HTTP/2 support back in 2013? also libcurl did ship it in 2014[...] Using Requests today is the rough equivalent of using EOL Windows 8! We promptly invite Python developers to look at the first drop-in replacement for Requests, <a href="https://github.com/jawah/niquests">namely Niquests</a>. Ship with native WebSocket, SSE, Happy Eyeballs, DNS over HTTPS, and so on[...] All of this while remaining compatible with all Requests prior plug-ins / add-ons.</p> <p>It leverages charset-normalizer in a better way! Check it out, you will gain up to being 3X faster and get a real/respectable support with it.</p> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1">3.4.1</a> (2024-12-24)</h2> <h3>Changed</h3> <ul> <li>Project metadata are now stored using <code>pyproject.toml</code> instead of <code>setup.cfg</code> using setuptools as the build backend.</li> <li>Enforce annotation delayed loading for a simpler and consistent types in the project.</li> <li>Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8</li> </ul> <h3>Added</h3> <ul> <li>pre-commit configuration.</li> <li>noxfile.</li> </ul> <h3>Removed</h3> <ul> <li><code>build-requirements.txt</code> as per using <code>pyproject.toml</code> native build configuration.</li> <li><code>bin/integration.py</code> and <code>bin/serve.py</code> in favor of downstream integration test (see noxfile).</li> <li><code>setup.cfg</code> in favor of <code>pyproject.toml</code> metadata configuration.</li> <li>Unused <code>utils.range_scan</code> function.</li> </ul> <h3>Fixed</h3> <ul> <li>Converting content to Unicode bytes may insert <code>utf_8</code> instead of preferred <code>utf-8</code>. (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/572">#572</a>)</li> <li>Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md">charset-normalizer's changelog</a>.</em></p> <blockquote> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1">3.4.1</a> (2024-12-24)</h2> <h3>Changed</h3> <ul> <li>Project metadata are now stored using <code>pyproject.toml</code> instead of <code>setup.cfg</code> using setuptools as the build backend.</li> <li>Enforce annotation delayed loading for a simpler and consistent types in the project.</li> <li>Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8</li> </ul> <h3>Added</h3> <ul> <li>pre-commit configuration.</li> <li>noxfile.</li> </ul> <h3>Removed</h3> <ul> <li><code>build-requirements.txt</code> as per using <code>pyproject.toml</code> native build configuration.</li> <li><code>bin/integration.py</code> and <code>bin/serve.py</code> in favor of downstream integration test (see noxfile).</li> <li><code>setup.cfg</code> in favor of <code>pyproject.toml</code> metadata configuration.</li> <li>Unused <code>utils.range_scan</code> function.</li> </ul> <h3>Fixed</h3> <ul> <li>Converting content to Unicode bytes may insert <code>utf_8</code> instead of preferred <code>utf-8</code>. (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/572">#572</a>)</li> <li>Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jawah/charset_normalizer/commit/ffdf7f5f08beb0ceb92dc0637e97382ba27cecfa"><code>ffdf7f5</code></a> :wrench: fix long description content-type inferred as rst instead of md</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/c7197b7b425835dd7abf028f45e6b533060886e3"><code>c7197b7</code></a> :pencil: fix changelog entries (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/582">#582</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/c390e1f231473f2766dd860dc70a1ee1ae5609e6"><code>c390e1f</code></a> Merge pull request <a href="https://redirect.github.com/jawah/charset_normalizer/issues/581">#581</a> from jawah/refresh-part-2</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/f9d6b8cf32c36cbeefcd42f585bf57bfc39cee11"><code>f9d6b8c</code></a> :lock: add CODEOWNERS</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/7ce1ef1de3148d18eb6a01448c9a15bf5324a9cf"><code>7ce1ef1</code></a> :wrench: use ubuntu-22.04 for cibuildwheel in continuous deployment workflow</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/deed20577ba5358bb9624c17e6c8aa6ab26f6e08"><code>deed205</code></a> :wrench: update LICENSE copyright</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/f11f5710799db58947a6fb61c20dbb75e57e3b5d"><code>f11f571</code></a> :wrench: include noxfile in sdist</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/1ec7c0632f15324afd769208553bf603be5f917e"><code>1ec7c06</code></a> :wrench: update changelog</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/14b4649fa24ee0d58e351c106011fb1bace4a9bc"><code>14b4649</code></a> :bug: output(...) replace declarative mark using non iana compliant encoding ...</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/1b06bc0407dc0f47e9629cbc802977711d0ffc7b"><code>1b06bc0</code></a> Merge branch 'refresh-part-2' of github.com:jawah/charset_normalizer into ref...</li> <li>Additional commits viewable in <a href="https://github.com/jawah/charset_normalizer/compare/3.4.0...3.4.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=charset-normalizer&package-manager=pip&previous-version=3.4.0&new-version=3.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 187512f01cd..b328bbe7e6e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -39,7 +39,7 @@ cffi==1.17.1 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 993c6a3008a..5342971233f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -39,7 +39,7 @@ cffi==1.17.1 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4b9688946a1..719b9b7e305 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via sphinx certifi==2024.12.14 # via requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests click==8.1.8 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 54b9969364c..4816b4ef1ee 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via sphinx certifi==2024.12.14 # via requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests click==8.1.8 # via towncrier From 64cc96ead9414096cd2eb8a1d65380df5a6f8fb3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:25:45 +0000 Subject: [PATCH 1113/1511] Bump jinja2 from 3.1.4 to 3.1.5 (#10260) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/jinja/releases">jinja2's releases</a>.</em></p> <blockquote> <h2>3.1.5</h2> <p>This is the Jinja 3.1.5 security fix release, which fixes security issues and bugs but does not otherwise change behavior and should not result in breaking changes compared to the latest feature release.</p> <p>PyPI: <a href="https://pypi.org/project/Jinja2/3.1.5/">https://pypi.org/project/Jinja2/3.1.5/</a> Changes: <a href="https://jinja.palletsprojects.com/changes/#version-3-1-5">https://jinja.palletsprojects.com/changes/#version-3-1-5</a> Milestone: <a href="https://github.com/pallets/jinja/milestone/16?closed=1">https://github.com/pallets/jinja/milestone/16?closed=1</a></p> <ul> <li>The sandboxed environment handles indirect calls to <code>str.format</code>, such as by passing a stored reference to a filter that calls its argument. <a href="https://github.com/pallets/jinja/security/advisories/GHSA-q2x7-8rv6-6q7h">GHSA-q2x7-8rv6-6q7h</a></li> <li>Escape template name before formatting it into error messages, to avoid issues with names that contain f-string syntax. <a href="https://redirect.github.com/pallets/jinja/issues/1792">#1792</a>, <a href="https://github.com/pallets/jinja/security/advisories/GHSA-gmj6-6f8f-6699">GHSA-gmj6-6f8f-6699</a></li> <li>Sandbox does not allow <code>clear</code> and <code>pop</code> on known mutable sequence types. <a href="https://redirect.github.com/pallets/jinja/issues/2032">#2032</a></li> <li>Calling sync <code>render</code> for an async template uses <code>asyncio.run</code>. <a href="https://redirect.github.com/pallets/jinja/issues/1952">#1952</a></li> <li>Avoid unclosed <code>auto_aiter</code> warnings. <a href="https://redirect.github.com/pallets/jinja/issues/1960">#1960</a></li> <li>Return an <code>aclose</code>-able <code>AsyncGenerator</code> from <code>Template.generate_async</code>. <a href="https://redirect.github.com/pallets/jinja/issues/1960">#1960</a></li> <li>Avoid leaving <code>root_render_func()</code> unclosed in <code>Template.generate_async</code>. <a href="https://redirect.github.com/pallets/jinja/issues/1960">#1960</a></li> <li>Avoid leaving async generators unclosed in blocks, includes and extends. <a href="https://redirect.github.com/pallets/jinja/issues/1960">#1960</a></li> <li>The runtime uses the correct <code>concat</code> function for the current environment when calling block references. <a href="https://redirect.github.com/pallets/jinja/issues/1701">#1701</a></li> <li>Make <code>|unique</code> async-aware, allowing it to be used after another async-aware filter. <a href="https://redirect.github.com/pallets/jinja/issues/1781">#1781</a></li> <li><code>|int</code> filter handles <code>OverflowError</code> from scientific notation. <a href="https://redirect.github.com/pallets/jinja/issues/1921">#1921</a></li> <li>Make compiling deterministic for tuple unpacking in a <code>{% set ... %}</code> call. <a href="https://redirect.github.com/pallets/jinja/issues/2021">#2021</a></li> <li>Fix dunder protocol (<code>copy</code>/<code>pickle</code>/etc) interaction with <code>Undefined</code> objects. <a href="https://redirect.github.com/pallets/jinja/issues/2025">#2025</a></li> <li>Fix <code>copy</code>/<code>pickle</code> support for the internal <code>missing</code> object. <a href="https://redirect.github.com/pallets/jinja/issues/2027">#2027</a></li> <li><code>Environment.overlay(enable_async)</code> is applied correctly. <a href="https://redirect.github.com/pallets/jinja/issues/2061">#2061</a></li> <li>The error message from <code>FileSystemLoader</code> includes the paths that were searched. <a href="https://redirect.github.com/pallets/jinja/issues/1661">#1661</a></li> <li><code>PackageLoader</code> shows a clearer error message when the package does not contain the templates directory. <a href="https://redirect.github.com/pallets/jinja/issues/1705">#1705</a></li> <li>Improve annotations for methods returning copies. <a href="https://redirect.github.com/pallets/jinja/issues/1880">#1880</a></li> <li><code>urlize</code> does not add <code>mailto:</code> to values like <code>@a@b</code>. <a href="https://redirect.github.com/pallets/jinja/issues/1870">#1870</a></li> <li>Tests decorated with <code>@pass_context</code> can be used with the <code>|select</code> filter. <a href="https://redirect.github.com/pallets/jinja/issues/1624">#1624</a></li> <li>Using <code>set</code> for multiple assignment (<code>a, b = 1, 2</code>) does not fail when the target is a namespace attribute. <a href="https://redirect.github.com/pallets/jinja/issues/1413">#1413</a></li> <li>Using <code>set</code> in all branches of <code>{% if %}{% elif %}{% else %}</code> blocks does not cause the variable to be considered initially undefined. <a href="https://redirect.github.com/pallets/jinja/issues/1253">#1253</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/jinja/blob/main/CHANGES.rst">jinja2's changelog</a>.</em></p> <blockquote> <h2>Version 3.1.5</h2> <p>Released 2024-12-21</p> <ul> <li>The sandboxed environment handles indirect calls to <code>str.format</code>, such as by passing a stored reference to a filter that calls its argument. :ghsa:<code>q2x7-8rv6-6q7h</code></li> <li>Escape template name before formatting it into error messages, to avoid issues with names that contain f-string syntax. :issue:<code>1792</code>, :ghsa:<code>gmj6-6f8f-6699</code></li> <li>Sandbox does not allow <code>clear</code> and <code>pop</code> on known mutable sequence types. :issue:<code>2032</code></li> <li>Calling sync <code>render</code> for an async template uses <code>asyncio.run</code>. :pr:<code>1952</code></li> <li>Avoid unclosed <code>auto_aiter</code> warnings. :pr:<code>1960</code></li> <li>Return an <code>aclose</code>-able <code>AsyncGenerator</code> from <code>Template.generate_async</code>. :pr:<code>1960</code></li> <li>Avoid leaving <code>root_render_func()</code> unclosed in <code>Template.generate_async</code>. :pr:<code>1960</code></li> <li>Avoid leaving async generators unclosed in blocks, includes and extends. :pr:<code>1960</code></li> <li>The runtime uses the correct <code>concat</code> function for the current environment when calling block references. :issue:<code>1701</code></li> <li>Make <code>|unique</code> async-aware, allowing it to be used after another async-aware filter. :issue:<code>1781</code></li> <li><code>|int</code> filter handles <code>OverflowError</code> from scientific notation. :issue:<code>1921</code></li> <li>Make compiling deterministic for tuple unpacking in a <code>{% set ... %}</code> call. :issue:<code>2021</code></li> <li>Fix dunder protocol (<code>copy</code>/<code>pickle</code>/etc) interaction with <code>Undefined</code> objects. :issue:<code>2025</code></li> <li>Fix <code>copy</code>/<code>pickle</code> support for the internal <code>missing</code> object. :issue:<code>2027</code></li> <li><code>Environment.overlay(enable_async)</code> is applied correctly. :pr:<code>2061</code></li> <li>The error message from <code>FileSystemLoader</code> includes the paths that were searched. :issue:<code>1661</code></li> <li><code>PackageLoader</code> shows a clearer error message when the package does not contain the templates directory. :issue:<code>1705</code></li> <li>Improve annotations for methods returning copies. :pr:<code>1880</code></li> <li><code>urlize</code> does not add <code>mailto:</code> to values like <code>@a@b</code>. :pr:<code>1870</code></li> <li>Tests decorated with <code>@pass_context`` can be used with the ``|select`` filter. :issue:</code>1624`</li> <li>Using <code>set</code> for multiple assignment (<code>a, b = 1, 2</code>) does not fail when the target is a namespace attribute. :issue:<code>1413</code></li> <li>Using <code>set</code> in all branches of <code>{% if %}{% elif %}{% else %}</code> blocks does not cause the variable to be considered initially undefined. :issue:<code>1253</code></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/jinja/commit/877f6e51be8e1765b06d911cfaa9033775f051d1"><code>877f6e5</code></a> release version 3.1.5</li> <li><a href="https://github.com/pallets/jinja/commit/8d588592653b052f957b720e1fc93196e06f207f"><code>8d58859</code></a> remove test pypi</li> <li><a href="https://github.com/pallets/jinja/commit/eda8fe86fd716dfce24910294e9f1fc81fbc740c"><code>eda8fe8</code></a> update dev dependencies</li> <li><a href="https://github.com/pallets/jinja/commit/c8fdce1e0333f1122b244b03a48535fdd7b03d91"><code>c8fdce1</code></a> Fix bug involving calling set on a template parameter within all branches of ...</li> <li><a href="https://github.com/pallets/jinja/commit/66587ce989e5a478e0bb165371fa2b9d42b7040f"><code>66587ce</code></a> Fix bug where set would sometimes fail within if</li> <li><a href="https://github.com/pallets/jinja/commit/fbc3a696c729d177340cc089531de7e2e5b6f065"><code>fbc3a69</code></a> Add support for namespaces in tuple parsing (<a href="https://redirect.github.com/pallets/jinja/issues/1664">#1664</a>)</li> <li><a href="https://github.com/pallets/jinja/commit/b8f4831d41e6a7cb5c40d42f074ffd92d2daccfc"><code>b8f4831</code></a> more comments about nsref assignment</li> <li><a href="https://github.com/pallets/jinja/commit/ee832194cd9f55f75e5a51359b709d535efe957f"><code>ee83219</code></a> Add support for namespaces in tuple assignment</li> <li><a href="https://github.com/pallets/jinja/commit/1d55cddbb28e433779511f28f13a2d8c4ec45826"><code>1d55cdd</code></a> Triple quotes in docs (<a href="https://redirect.github.com/pallets/jinja/issues/2064">#2064</a>)</li> <li><a href="https://github.com/pallets/jinja/commit/8a8eafc6b992ba177f1d3dd483f8465f18a11116"><code>8a8eafc</code></a> edit block assignment section</li> <li>Additional commits viewable in <a href="https://github.com/pallets/jinja/compare/3.1.4...3.1.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jinja2&package-manager=pip&previous-version=3.1.4&new-version=3.1.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b328bbe7e6e..6ff8ea359cc 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -95,7 +95,7 @@ incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest -jinja2==3.1.4 +jinja2==3.1.5 # via # sphinx # towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index 5342971233f..9577e0925d1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -93,7 +93,7 @@ incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest -jinja2==3.1.4 +jinja2==3.1.5 # via # sphinx # towncrier diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 719b9b7e305..9292e082fa0 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -24,7 +24,7 @@ imagesize==1.4.1 # via sphinx incremental==24.7.2 # via towncrier -jinja2==3.1.4 +jinja2==3.1.5 # via # sphinx # towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 4816b4ef1ee..625c45c3d66 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -24,7 +24,7 @@ imagesize==1.4.1 # via sphinx incremental==24.7.2 # via towncrier -jinja2==3.1.4 +jinja2==3.1.5 # via # sphinx # towncrier From 40b7a9cc0da156b20ee7ffa22e39b3b3d714fecf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:25:49 +0000 Subject: [PATCH 1114/1511] Bump aiosignal from 1.3.1 to 1.3.2 (#10259) Bumps [aiosignal](https://github.com/aio-libs/aiosignal) from 1.3.1 to 1.3.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiosignal/releases">aiosignal's releases</a>.</em></p> <blockquote> <h2>1.3.2</h2> <h2>Deprecations and Removals</h2> <ul> <li> <p>Dropped Python 3.7 support. (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/413">#413</a>)</p> </li> <li> <p>Dropped Python 3.8 support. (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/645">#645</a>)</p> </li> </ul> <h2>Misc</h2> <ul> <li>(<a href="https://redirect.github.com/aio-libs/aiosignal/issues/362">#362</a>)</li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiosignal/blob/master/CHANGES.rst">aiosignal's changelog</a>.</em></p> <blockquote> <h1>1.3.2 (2024-12-13)</h1> <h2>Deprecations and Removals</h2> <ul> <li> <p>Dropped Python 3.7 support. <code>[#413](https://github.com/aio-libs/aiosignal/issues/413) <https://github.com/aio-libs/aiosignal/issues/413></code>_</p> </li> <li> <p>Dropped Python 3.8 support. <code>[#645](https://github.com/aio-libs/aiosignal/issues/645) <https://github.com/aio-libs/aiosignal/issues/645></code>_</p> </li> </ul> <h2>Misc</h2> <ul> <li><code>[#362](https://github.com/aio-libs/aiosignal/issues/362) <https://github.com/aio-libs/aiosignal/issues/362></code>_</li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiosignal/commit/f90f6d41891d9982b90ef2d2eddd9b3e41a63526"><code>f90f6d4</code></a> Fix deployment (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/649">#649</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/74015c6edb46a3b43eeb97e663357e2b3e3d66de"><code>74015c6</code></a> Release 1.3.2 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/648">#648</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/76a4814cb6d5e8e6426425001a53fafd8571ae39"><code>76a4814</code></a> Setup deployment</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/29d792ebbd464bb2e061cd46a53cfee850a4cd4e"><code>29d792e</code></a> Build(deps): Bump sphinx from 7.1.2 to 8.1.3 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/624">#624</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/0172922351ddf5fb24c6a008b50957d695de484c"><code>0172922</code></a> Tune CI (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/647">#647</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/65cfb0d61e1fd06e43f81f4b8bdf4b83239b79b0"><code>65cfb0d</code></a> Build(deps): Bump pytest-cov from 5.0.0 to 6.0.0 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/635">#635</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/d28799e74c830fded6c108bc221019651511ec59"><code>d28799e</code></a> Build(deps-dev): Bump cherry-picker from 2.2.0 to 2.4.0 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/638">#638</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/6c3af5c0b626782d310f5564a17e9af08603557e"><code>6c3af5c</code></a> Update pre-commit hooks (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/646">#646</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/6a463593d7e756fbec14518fd7e6fa6d9d3a3a3b"><code>6a46359</code></a> Build(deps): Bump pytest-asyncio from 0.24.0 to 0.25.0 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/644">#644</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/aece0ec6b9e416a4479ee46a2a3fd0225db92901"><code>aece0ec</code></a> Build(deps): Bump pre-commit from 3.5.0 to 4.0.1 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/621">#621</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/aiosignal/compare/v1.3.1...v1.3.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiosignal&package-manager=pip&previous-version=1.3.1&new-version=1.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e7b278cdcdf..cea8fc91659 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in -aiosignal==1.3.1 +aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6ff8ea359cc..7a962f1520f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -12,7 +12,7 @@ aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in -aiosignal==1.3.1 +aiosignal==1.3.2 # via -r requirements/runtime-deps.in alabaster==1.0.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 9577e0925d1..c9ef1602ba5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -12,7 +12,7 @@ aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in -aiosignal==1.3.1 +aiosignal==1.3.2 # via -r requirements/runtime-deps.in alabaster==1.0.0 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index ca182b86a16..10d6c122729 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in -aiosignal==1.3.1 +aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d0c09fc0ea1..e8c77c05114 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in -aiosignal==1.3.1 +aiosignal==1.3.2 # via -r requirements/runtime-deps.in annotated-types==0.7.0 # via pydantic From 8f22e58f250641da940a27fc4b861fc39dadb100 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:25:51 +0000 Subject: [PATCH 1115/1511] Bump attrs from 24.2.0 to 24.3.0 (#10261) Bumps [attrs](https://github.com/sponsors/hynek) from 24.2.0 to 24.3.0. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/sponsors/hynek/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=24.2.0&new-version=24.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index cea8fc91659..ce4157c5917 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.2.0 +attrs==24.3.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7a962f1520f..b58c0a13df8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -22,7 +22,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==24.2.0 +attrs==24.3.0 # via -r requirements/runtime-deps.in babel==2.16.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index c9ef1602ba5..086a5464aa6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -22,7 +22,7 @@ async-timeout==4.0.3 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==24.2.0 +attrs==24.3.0 # via -r requirements/runtime-deps.in babel==2.16.0 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 10d6c122729..a790e54b1ed 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.2.0 +attrs==24.3.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index e8c77c05114..6adaab1671b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.2.0 +attrs==24.3.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 1772493da140d5cdcb0463e180be8654d03c68d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Dec 2024 12:40:54 +0000 Subject: [PATCH 1116/1511] Bump cherry-picker from 2.2.0 to 2.4.0 (#10256) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cherry-picker](https://github.com/python/cherry-picker) from 2.2.0 to 2.4.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python/cherry-picker/releases">cherry-picker's releases</a>.</em></p> <blockquote> <h2>cherry-picker-v2.4.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Add support for Python 3.14 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/145">python/cherry-picker#145</a></li> <li>Allow passing a base branch that doesn't have version info by <a href="https://github.com/Jackenmen"><code>@​Jackenmen</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/70">python/cherry-picker#70</a></li> <li>Generate <code>__version__</code> at build to avoid slow <code>importlib.metadata</code> import by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/141">python/cherry-picker#141</a></li> <li>PyPI metatada: re-add description by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/140">python/cherry-picker#140</a></li> <li>Remove outdated README info by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/143">python/cherry-picker#143</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/python/cherry-picker/compare/cherry-picker-v2.3.0...cherry-picker-v2.4.0">https://github.com/python/cherry-picker/compare/cherry-picker-v2.3.0...cherry-picker-v2.4.0</a></p> <h2>cherry-picker v2.3.0</h2> <h2>What's Changed</h2> <ul> <li>Add Python 3.13 to the testing matrix by <a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/127">python/cherry-picker#127</a> <ul> <li>Add Trove classifier for Python 3.13 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/134">python/cherry-picker#134</a></li> </ul> </li> <li>Drop support for Python 3.8 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/133">python/cherry-picker#133</a> <ul> <li>Update minimum Python version to 3.9 in the configuration by <a href="https://github.com/OlenaYefymenko"><code>@​OlenaYefymenko</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/137">python/cherry-picker#137</a></li> </ul> </li> <li>Resolve usernames when the remote ends with a trailing slash by <a href="https://github.com/AA-Turner"><code>@​AA-Turner</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/110">python/cherry-picker#110</a></li> <li>Optimize <code>validate_sha()</code> with <code>--max-count=1</code> by <a href="https://github.com/aloisklink"><code>@​aloisklink</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/111">python/cherry-picker#111</a></li> <li>Make # replacing more strict by <a href="https://github.com/serhiy-storchaka"><code>@​serhiy-storchaka</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/115">python/cherry-picker#115</a></li> <li>Remove multiple commit prefixes by <a href="https://github.com/serhiy-storchaka"><code>@​serhiy-storchaka</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/118">python/cherry-picker#118</a></li> <li>Handle whitespace when calculating usernames by <a href="https://github.com/AA-Turner"><code>@​AA-Turner</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/132">python/cherry-picker#132</a></li> <li>Generate digital attestations for PyPI (PEP 740) by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/135">python/cherry-picker#135</a></li> <li>Publish to PyPI using Trusted Publishers by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/94">python/cherry-picker#94</a></li> <li>Fetch tags so hatch-vcs can set the version number by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/95">python/cherry-picker#95</a></li> <li>Add release checklist by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/98">python/cherry-picker#98</a> <ul> <li>Fix Markdown links by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/101">python/cherry-picker#101</a></li> </ul> </li> <li>Move changelog to own file by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/104">python/cherry-picker#104</a></li> <li>Add GHA ecosystem to <code>dependabot.yml</code>. by <a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/103">python/cherry-picker#103</a></li> <li>Fix CI: ignore CVE-2023-5752 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/102">python/cherry-picker#102</a> <ul> <li>Revert <a href="https://redirect.github.com/python/cherry-picker/issues/102">#102</a> after upstream fix by <a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/126">python/cherry-picker#126</a></li> </ul> </li> <li>Lint on GitHub Actions via pre-commit by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/93">python/cherry-picker#93</a></li> <li>Convert README to Markdown by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/125">python/cherry-picker#125</a></li> <li>Ignore Jinja2 CVE warning in <code>safety</code> dep by <a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/129">python/cherry-picker#129</a></li> <li>Remove <code>setuptools</code> installation for <code>safety</code> 3 by <a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/128">python/cherry-picker#128</a></li> <li>Update actions/download-artifact to v4 in deploy.yml. by <a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/131">python/cherry-picker#131</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/ezio-melotti"><code>@​ezio-melotti</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/103">python/cherry-picker#103</a></li> <li><a href="https://github.com/AA-Turner"><code>@​AA-Turner</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/110">python/cherry-picker#110</a></li> <li><a href="https://github.com/aloisklink"><code>@​aloisklink</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/111">python/cherry-picker#111</a></li> <li><a href="https://github.com/serhiy-storchaka"><code>@​serhiy-storchaka</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/115">python/cherry-picker#115</a></li> <li><a href="https://github.com/OlenaYefymenko"><code>@​OlenaYefymenko</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/137">python/cherry-picker#137</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/python/cherry-picker/compare/cherry-picker-v2.0.0...cherry-picker-v2.3.0">https://github.com/python/cherry-picker/compare/cherry-picker-v2.0.0...cherry-picker-v2.3.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/cherry-picker/blob/main/CHANGELOG.md">cherry-picker's changelog</a>.</em></p> <blockquote> <h2>2.4.0</h2> <ul> <li>Add support for Python 3.14 (<a href="https://redirect.github.com/python/cherry-picker/pull/145">PR 145</a></li> <li>Allow passing a base branch that doesn't have version info (<a href="https://redirect.github.com/python/cherry-picker/pull/70">PR 70</a> <ul> <li>This makes cherry-picker useful for projects other than CPython that don't have versioned branch names.</li> </ul> </li> </ul> <h2>2.3.0</h2> <ul> <li>Add support for Python 3.13 (<a href="https://redirect.github.com/python/cherry-picker/pull/127">PR 127</a>, <a href="https://redirect.github.com/python/cherry-picker/pull/134">PR 134</a>)</li> <li>Drop support for EOL Python 3.8 (<a href="https://redirect.github.com/python/cherry-picker/pull/133">PR 133</a>, <a href="https://redirect.github.com/python/cherry-picker/pull/137">PR 137</a>)</li> <li>Resolve usernames when the remote ends with a trailing slash (<a href="https://redirect.github.com/python/cherry-picker/pull/110">PR 110</a>)</li> <li>Optimize <code>validate_sha()</code> with <code>--max-count=1</code> (<a href="https://redirect.github.com/python/cherry-picker/pull/111">PR 111</a>)</li> <li>Make # replacing more strict (<a href="https://redirect.github.com/python/cherry-picker/pull/115">PR 115</a>)</li> <li>Remove multiple commit prefixes (<a href="https://redirect.github.com/python/cherry-picker/pull/118">PR 118</a>)</li> <li>Handle whitespace when calculating usernames (<a href="https://redirect.github.com/python/cherry-picker/pull/132">PR 132</a>)</li> <li>Publish to PyPI using Trusted Publishers (<a href="https://redirect.github.com/python/cherry-picker/pull/94">PR 94</a>)</li> <li>Generate digital attestations for PyPI (<a href="https://peps.python.org/pep-0740/">PEP 740</a>) (<a href="https://redirect.github.com/python/cherry-picker/pull/135">PR 135</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/cherry-picker/commit/34d27ebfaec3cdacd8668f63bdfebb43bcf58d65"><code>34d27eb</code></a> Update changelog for 2.4.0 (<a href="https://redirect.github.com/python/cherry-picker/issues/146">#146</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/c81925c40294caef500dcab322777eb6e736055f"><code>c81925c</code></a> Add support for Python 3.14 (<a href="https://redirect.github.com/python/cherry-picker/issues/145">#145</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/940787a84e15a622e56ca61a3b6c7c81a86e6e17"><code>940787a</code></a> Allow passing a base branch that doesn't have version info (<a href="https://redirect.github.com/python/cherry-picker/issues/70">#70</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/a1552fb35a073b0789bf4f0efb1714f302953c3a"><code>a1552fb</code></a> Remove outdated README info (<a href="https://redirect.github.com/python/cherry-picker/issues/143">#143</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/7721133ef148db8a5d6787fd1920a80587d2607b"><code>7721133</code></a> PyPI metatada: re-add description (<a href="https://redirect.github.com/python/cherry-picker/issues/140">#140</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/a7d1d0de9d5bcf1461aaae45c28ea8ffa6dfc949"><code>a7d1d0d</code></a> Generate <code>__version__</code> at build to avoid slow <code>importlib.metadata</code> import (<a href="https://redirect.github.com/python/cherry-picker/issues/141">#141</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/71490d0ded27a1dabfacc11ad763fa636e470819"><code>71490d0</code></a> Exclude bots from generated release notes (<a href="https://redirect.github.com/python/cherry-picker/issues/139">#139</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/52565cb447f0416a0cc51522a5bea64ec396e323"><code>52565cb</code></a> Update changelog for 2.3.0 (<a href="https://redirect.github.com/python/cherry-picker/issues/138">#138</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/7fdaa0846ec6fca37f7778109778dbdb3b66d68c"><code>7fdaa08</code></a> Update minimum Python version to 3.9 in the configuration (<a href="https://redirect.github.com/python/cherry-picker/issues/137">#137</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/e2540afb076226e667b832c7c4b5d4669e0fd1af"><code>e2540af</code></a> Generate digital attestations for PyPI (PEP 740) (<a href="https://redirect.github.com/python/cherry-picker/issues/135">#135</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/cherry-picker/compare/cherry-picker-v2.2.0...cherry-picker-v2.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cherry-picker&package-manager=pip&previous-version=2.2.0&new-version=2.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b58c0a13df8..ab6808c3b3a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -41,7 +41,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.1 # via requests -cherry-picker==2.2.0 +cherry-picker==2.4.0 # via -r requirements/dev.in click==8.1.8 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 086a5464aa6..a68749a7247 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -41,7 +41,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.1 # via requests -cherry-picker==2.2.0 +cherry-picker==2.4.0 # via -r requirements/dev.in click==8.1.8 # via From 08dc3eaa760264badff5accf37ea32b2b8395f1a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:07:54 +0000 Subject: [PATCH 1117/1511] Bump pydantic from 2.9.2 to 2.10.4 (#10262) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.9.2 to 2.10.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.10.4 2024-12-18</h2> <h2>What's Changed</h2> <h3>Packaging</h3> <ul> <li>Bump <code>pydantic-core</code> to v2.27.2 by <a href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11138">#11138</a></li> </ul> <h3>Fixes</h3> <ul> <li>Fix for comparison of <code>AnyUrl</code> objects by <a href="https://github.com/alexprabhat99"><code>@​alexprabhat99</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11082">#11082</a></li> <li>Properly fetch PEP 695 type params for functions, do not fetch annotations from signature by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11093">#11093</a></li> <li>Include JSON Schema input core schema in function schemas by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11085">#11085</a></li> <li>Add <code>len</code> to <code>_BaseUrl</code> to avoid TypeError by <a href="https://github.com/Kharianne"><code>@​Kharianne</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11111">#11111</a></li> <li>Make sure the type reference is removed from the seen references by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11143">#11143</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/alexprabhat99"><code>@​alexprabhat99</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11082">#11082</a></li> <li><a href="https://github.com/Kharianne"><code>@​Kharianne</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11111">#11111</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.10.3...v2.10.4">https://github.com/pydantic/pydantic/compare/v2.10.3...v2.10.4</a></p> <h2>v2.10.3 2024-12-03</h2> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Set fields when <code>defer_build</code> is set on Pydantic dataclasses by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10984">#10984</a></li> <li>Do not resolve the JSON Schema reference for <code>dict</code> core schema keys by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10989">#10989</a></li> <li>Use the globals of the function when evaluating the return type for <code>PlainSerializer</code> and <code>WrapSerializer</code> functions by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11008">#11008</a></li> <li>Fix host required enforcement for urls to be compatible with v2.9 behavior by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11027">#11027</a></li> <li>Add a <code>default_factory_takes_validated_data</code> property to <code>FieldInfo</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11034">#11034</a></li> <li>Fix url json schema in <code>serialization</code> mode by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11035">#11035</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.10.2...v2.10.3">https://github.com/pydantic/pydantic/compare/v2.10.2...v2.10.3</a></p> <h2>v2.10.2 2024-11-26</h2> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Only evaluate <code>FieldInfo</code> annotations if required during schema building by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10769">#10769</a></li> <li>Do not evaluate annotations for private fields by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10962">#10962</a></li> <li>Support serialization as any for <code>Secret</code> types and <code>Url</code> types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10947">#10947</a></li> <li>Fix type hint of <code>Field.default</code> to be compatible with Python 3.8 and 3.9 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10972">#10972</a></li> <li>Add hashing support for URL types by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10975">#10975</a></li> <li>Hide <code>BaseModel.__replace__</code> definition from type checkers by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10979">10979</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.10.1...v2.10.2">https://github.com/pydantic/pydantic/compare/v2.10.1...v2.10.2</a></p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.10.4 (2024-12-18)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.10.4">GitHub release</a></p> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Bump <code>pydantic-core</code> to v2.27.2 by <a href="https://github.com/davidhewitt"><code>@​davidhewitt</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11138">#11138</a></li> </ul> <h4>Fixes</h4> <ul> <li>Fix for comparison of <code>AnyUrl</code> objects by <a href="https://github.com/alexprabhat99"><code>@​alexprabhat99</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11082">#11082</a></li> <li>Properly fetch PEP 695 type params for functions, do not fetch annotations from signature by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11093">#11093</a></li> <li>Include JSON Schema input core schema in function schemas by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11085">#11085</a></li> <li>Add <code>len</code> to <code>_BaseUrl</code> to avoid TypeError by <a href="https://github.com/Kharianne"><code>@​Kharianne</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11111">#11111</a></li> <li>Make sure the type reference is removed from the seen references by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11143">#11143</a></li> </ul> <h3>New Contributors</h3> <ul> <li><a href="https://github.com/FyZzyss"><code>@​FyZzyss</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/10789">#10789</a></li> <li><a href="https://github.com/tamird"><code>@​tamird</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/10948">#10948</a></li> <li><a href="https://github.com/felixxm"><code>@​felixxm</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11077">#11077</a></li> <li><a href="https://github.com/alexprabhat99"><code>@​alexprabhat99</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11082">#11082</a></li> <li><a href="https://github.com/Kharianne"><code>@​Kharianne</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11111">#11111</a></li> </ul> <h2>v2.10.3 (2024-12-03)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.10.3">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Set fields when <code>defer_build</code> is set on Pydantic dataclasses by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10984">#10984</a></li> <li>Do not resolve the JSON Schema reference for <code>dict</code> core schema keys by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10989">#10989</a></li> <li>Use the globals of the function when evaluating the return type for <code>PlainSerializer</code> and <code>WrapSerializer</code> functions by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11008">#11008</a></li> <li>Fix host required enforcement for urls to be compatible with v2.9 behavior by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11027">#11027</a></li> <li>Add a <code>default_factory_takes_validated_data</code> property to <code>FieldInfo</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11034">#11034</a></li> <li>Fix url json schema in <code>serialization</code> mode by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11035">#11035</a></li> </ul> <h2>v2.10.2 (2024-11-25)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.10.2">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Only evaluate FieldInfo annotations if required during schema building by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10769">#10769</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/5bd3a6507b749fcd4833173fba88b3690ff77170"><code>5bd3a65</code></a> fix history.md</li> <li><a href="https://github.com/pydantic/pydantic/commit/46f094569a071a99b313ec21b36568ceb1615635"><code>46f0945</code></a> Prepare for v2.10.4 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11144">#11144</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/ea69e695f27fc8d93934bd07b262189dd7987dd9"><code>ea69e69</code></a> Make sure the type reference is removed from the seen references (<a href="https://redirect.github.com/pydantic/pydantic/issues/11145">#11145</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/a07c31e4a49bd3a01485ed0aabf55c5e0ac83ca7"><code>a07c31e</code></a> Include JSON Schema input core schema in function schemas (<a href="https://redirect.github.com/pydantic/pydantic/issues/11142">#11142</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/9166d551639c464d0a2ad772a7137e9218eda847"><code>9166d55</code></a> Update <code>WithJsonSchema</code> documentation, add usage documentation for `json_sche...</li> <li><a href="https://github.com/pydantic/pydantic/commit/572f57de01e6dd75673b4ad8e43afd7d56f70a0f"><code>572f57d</code></a> Rewrite validators documentation (<a href="https://redirect.github.com/pydantic/pydantic/issues/11060">#11060</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/9faa8d9cbd3258b9b0f01bd39da3552b03ed8c81"><code>9faa8d9</code></a> Fix for comaparison of AnyUrl objects (<a href="https://redirect.github.com/pydantic/pydantic/issues/11082">#11082</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/6fd6a03ab836025cfe62f988b6e9e797726f2a05"><code>6fd6a03</code></a> Fix package description limit bound (<a href="https://redirect.github.com/pydantic/pydantic/issues/11140">#11140</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/7814fde8a3d1c3e3c5e1357f2b24c06a0a715fee"><code>7814fde</code></a> Add <code>len</code> to <code>_BaseUrl</code> to avoid TypeError (<a href="https://redirect.github.com/pydantic/pydantic/issues/11111">#11111</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/e036881a772922de9ed952e722f84c46a9da693b"><code>e036881</code></a> Bump <code>pydantic-core</code> to 2.27.2 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11138">#11138</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pydantic/pydantic/compare/v2.9.2...v2.10.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.9.2&new-version=2.10.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ab6808c3b3a..5bc55be769e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -142,9 +142,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.2 +pydantic==2.10.4 # via python-on-whales -pydantic-core==2.23.4 +pydantic-core==2.27.2 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index a68749a7247..c280c173d61 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -139,9 +139,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.2 +pydantic==2.10.4 # via python-on-whales -pydantic-core==2.23.4 +pydantic-core==2.27.2 # via pydantic pygments==2.18.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 0d518445a09..c5050b9df82 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -57,9 +57,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.2 +pydantic==2.10.4 # via python-on-whales -pydantic-core==2.23.4 +pydantic-core==2.27.2 # via pydantic pygments==2.18.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 6adaab1671b..300fc869dfa 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,9 +77,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.9.2 +pydantic==2.10.4 # via python-on-whales -pydantic-core==2.23.4 +pydantic-core==2.27.2 # via pydantic pygments==2.18.0 # via rich From 6c323e11746f336dad3e4de575a9ee213e4e7bb0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:20:18 +0000 Subject: [PATCH 1118/1511] Bump wait-for-it from 2.2.2 to 2.3.0 (#10263) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [wait-for-it](https://github.com/hartwork/wait-for-it) from 2.2.2 to 2.3.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/hartwork/wait-for-it/releases">wait-for-it's releases</a>.</em></p> <blockquote> <h2>2.3.0</h2> <h2>What's Changed</h2> <ul> <li>Fix Sphinx build + cover by CI by <a href="https://github.com/hartwork"><code>@​hartwork</code></a> in <a href="https://redirect.github.com/hartwork/wait-for-it/pull/1">hartwork/wait-for-it#1</a></li> <li><code>docs/source/index.md</code>: Get terminal output back in sync by <a href="https://github.com/hartwork"><code>@​hartwork</code></a> in <a href="https://redirect.github.com/hartwork/wait-for-it/pull/2">hartwork/wait-for-it#2</a></li> <li>Apply new project home by <a href="https://github.com/hartwork"><code>@​hartwork</code></a> in <a href="https://redirect.github.com/hartwork/wait-for-it/pull/3">hartwork/wait-for-it#3</a></li> <li><code>.readthedocs.yml</code>: First try at fixing the Read the Docs build by <a href="https://github.com/hartwork"><code>@​hartwork</code></a> in <a href="https://redirect.github.com/hartwork/wait-for-it/pull/4">hartwork/wait-for-it#4</a></li> <li>Drop end-of-life Python 3.8 and add Python 3.13 by <a href="https://github.com/hartwork"><code>@​hartwork</code></a> in <a href="https://redirect.github.com/hartwork/wait-for-it/pull/5">hartwork/wait-for-it#5</a></li> <li>Prepare release 2.3.0 by <a href="https://github.com/hartwork"><code>@​hartwork</code></a> in <a href="https://redirect.github.com/hartwork/wait-for-it/pull/6">hartwork/wait-for-it#6</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/hartwork/wait-for-it/compare/v2.2.2...v2.3.0">https://github.com/hartwork/wait-for-it/compare/v2.2.2...v2.3.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/hartwork/wait-for-it/commit/6ba15adecd9d597024d72e8bfdb866741bd9cc40"><code>6ba15ad</code></a> Merge pull request <a href="https://redirect.github.com/hartwork/wait-for-it/issues/6">#6</a> from hartwork/prepare-release</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/902a160ccaa5ec3502f939352f7f7c36e266b500"><code>902a160</code></a> Bump version to 2.3.0</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/b04be12d5573bbc83a113b6da8401dee290a897a"><code>b04be12</code></a> Merge pull request <a href="https://redirect.github.com/hartwork/wait-for-it/issues/5">#5</a> from hartwork/drop-python-3-8-and-add-3-13</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/42d90ee5ae3a37d7058dbd4045469e77f97efe5c"><code>42d90ee</code></a> Drop end-of-life Python 3.8 and add Python 3.13</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/3c95b9ae5e4ca5c44cb074a63cce3de71489d628"><code>3c95b9a</code></a> Merge pull request <a href="https://redirect.github.com/hartwork/wait-for-it/issues/4">#4</a> from hartwork/try-fixing-readthedocs-yml</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/2fa50ba351d2e90a97d23fbe04966213fc5d5193"><code>2fa50ba</code></a> .readthedocs.yml: First try at fixing the Read the Docs build</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/cbffb592fd8dc24b5eef05d77b7a3ef3b266a008"><code>cbffb59</code></a> Merge pull request <a href="https://redirect.github.com/hartwork/wait-for-it/issues/3">#3</a> from hartwork/new-project-home</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/ff7a582b2abc168c242dbf3aafae5ac1a7d7f067"><code>ff7a582</code></a> Apply new project home</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/cea7d83c3a98f9b1432add5bcb33d40640638bbb"><code>cea7d83</code></a> setup.py: Extend author info</li> <li><a href="https://github.com/hartwork/wait-for-it/commit/c3bb7c59d6227d3b0ea0cfbea1771ccbf966ddaa"><code>c3bb7c5</code></a> Stop GitHub Dependabot from requesting explicit review</li> <li>Additional commits viewable in <a href="https://github.com/hartwork/wait-for-it/compare/v2.2.2...v2.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=wait-for-it&package-manager=pip&previous-version=2.2.2&new-version=2.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5bc55be769e..775a06a74e7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -267,7 +267,7 @@ valkey==6.0.2 # via -r requirements/lint.in virtualenv==20.28.0 # via pre-commit -wait-for-it==2.2.2 +wait-for-it==2.3.0 # via -r requirements/test.in wheel==0.45.1 # via pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index c280c173d61..1045a452088 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -259,7 +259,7 @@ valkey==6.0.2 # via -r requirements/lint.in virtualenv==20.28.0 # via pre-commit -wait-for-it==2.2.2 +wait-for-it==2.3.0 # via -r requirements/test.in wheel==0.45.1 # via pip-tools diff --git a/requirements/test.txt b/requirements/test.txt index 300fc869dfa..85d6b183fa3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -129,7 +129,7 @@ typing-extensions==4.12.2 # rich uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -wait-for-it==2.2.2 +wait-for-it==2.3.0 # via -r requirements/test.in yarl==1.18.3 # via -r requirements/runtime-deps.in From 38224e03c50feeeadc1d6ab5a923b4cf26ecb038 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:20:32 +0000 Subject: [PATCH 1119/1511] Bump pre-commit from 3.5.0 to 4.0.1 (#10265) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.5.0 to 4.0.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pre-commit/pre-commit/releases">pre-commit's releases</a>.</em></p> <blockquote> <h2>pre-commit v4.0.1</h2> <h3>Fixes</h3> <ul> <li>Fix <code>pre-commit migrate-config</code> for unquoted deprecated stages names with purelib <code>pyyaml</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3324">#3324</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit-ci/issues/issues/234">pre-commit-ci/issues#234</a> issue by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> </ul> </li> </ul> <h2>pre-commit v4.0.0</h2> <h3>Features</h3> <ul> <li>Improve <code>pre-commit migrate-config</code> to handle more yaml formats. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3301">#3301</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> <li>Handle <code>stages</code> deprecation in <code>pre-commit migrate-config</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3302">#3302</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2732">#2732</a> issue by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> <li>Upgrade <code>ruby-build</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3199">#3199</a> PR by <a href="https://github.com/ThisGuyCodes"><code>@​ThisGuyCodes</code></a>.</li> </ul> </li> <li>Add "sensible regex" warnings to <code>repo: meta</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3311">#3311</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> <li>Add warnings for deprecated <code>stages</code> (<code>commit</code> -> <code>pre-commit</code>, <code>push</code> -> <code>pre-push</code>, <code>merge-commit</code> -> <code>pre-merge-commit</code>). <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3312">#3312</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3313">#3313</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3315">#3315</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2732">#2732</a> issue by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h3>Migrating</h3> <ul> <li><code>language: python_venv</code> has been removed -- use <code>language: python</code> instead. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3320">#3320</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2734">#2734</a> issue by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h2>pre-commit v3.8.0</h2> <h3>Features</h3> <ul> <li>Implement health checks for <code>language: r</code> so environments are recreated if the system version of R changes. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3206">#3206</a> issue by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3265">#3265</a> PR by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> </ul> </li> </ul> <h2>pre-commit v3.7.1</h2> <h3>Fixes</h3> <ul> <li>Fix <code>language: rust</code> default language version check when <code>rust-toolchain.toml</code> is present. <ul> <li>issue by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3201">#3201</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h2>pre-commit v3.7.0</h2> <h3>Features</h3> <ul> <li>Use a tty for <code>docker</code> and <code>docker_image</code> hooks when <code>--color</code> is specified. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3122">#3122</a> PR by <a href="https://github.com/glehmann"><code>@​glehmann</code></a>.</li> </ul> </li> </ul> <h3>Fixes</h3> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md">pre-commit's changelog</a>.</em></p> <blockquote> <h1>4.0.1 - 2024-10-08</h1> <h3>Fixes</h3> <ul> <li>Fix <code>pre-commit migrate-config</code> for unquoted deprecated stages names with purelib <code>pyyaml</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3324">#3324</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit-ci/issues/issues/234">pre-commit-ci/issues#234</a> issue by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> </ul> </li> </ul> <h1>4.0.0 - 2024-10-05</h1> <h3>Features</h3> <ul> <li>Improve <code>pre-commit migrate-config</code> to handle more yaml formats. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3301">#3301</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> <li>Handle <code>stages</code> deprecation in <code>pre-commit migrate-config</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3302">#3302</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2732">#2732</a> issue by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> <li>Upgrade <code>ruby-build</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3199">#3199</a> PR by <a href="https://github.com/ThisGuyCodes"><code>@​ThisGuyCodes</code></a>.</li> </ul> </li> <li>Add "sensible regex" warnings to <code>repo: meta</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3311">#3311</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> <li>Add warnings for deprecated <code>stages</code> (<code>commit</code> -> <code>pre-commit</code>, <code>push</code> -> <code>pre-push</code>, <code>merge-commit</code> -> <code>pre-merge-commit</code>). <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3312">#3312</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3313">#3313</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3315">#3315</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2732">#2732</a> issue by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h3>Migrating</h3> <ul> <li><code>language: python_venv</code> has been removed -- use <code>language: python</code> instead. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3320">#3320</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2734">#2734</a> issue by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h1>3.8.0 - 2024-07-28</h1> <h3>Features</h3> <ul> <li>Implement health checks for <code>language: r</code> so environments are recreated if the system version of R changes. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3206">#3206</a> issue by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3265">#3265</a> PR by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> </ul> </li> </ul> <h1>3.7.1 - 2024-05-10</h1> <h3>Fixes</h3> <ul> <li>Fix <code>language: rust</code> default language version check when <code>rust-toolchain.toml</code> is present. <ul> <li>issue by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a>.</li> </ul> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/pre-commit/commit/cc4a52241565440ce200666799eef70626457488"><code>cc4a522</code></a> v4.0.1</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/772d7d45d38b45a52355d8da708c068a0f242b00"><code>772d7d4</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3324">#3324</a> from pre-commit/migrate-config-purelib</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/222c62bc5d2907efbd6052c5fb89c4c027400044"><code>222c62b</code></a> fix migrate-config for purelib yaml</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/3d5548b487c4133181998a0a99148682625af8d1"><code>3d5548b</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3323">#3323</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/4235a877f3ac4998b41e9cce8a709ac13de159b5"><code>4235a87</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/dbccd57db0e9cf993ea909e929eea97f6e4389ea"><code>dbccd57</code></a> v4.0.0</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/d07e52901ccd9ceecb0b8c17627b2803a974669b"><code>d07e529</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3320">#3320</a> from pre-commit/remove-python-venv</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/801b956304e2ad2738bdb76d9c65ed52e967bb57"><code>801b956</code></a> remove deprecated python_venv alias</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/a2f7b80e8952594985e2719d0d12cb194a5e855e"><code>a2f7b80</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3315">#3315</a> from pre-commit/warn-deprecated-stage-names-on-init</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/d31722386e57a98d8d7d6d74228d255b9a9ffaf3"><code>d317223</code></a> add warning for deprecates stages for remote repos on init</li> <li>Additional commits viewable in <a href="https://github.com/pre-commit/pre-commit/compare/v3.5.0...v4.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pre-commit&package-manager=pip&previous-version=3.5.0&new-version=4.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 775a06a74e7..5cc48450a37 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -130,7 +130,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==3.5.0 +pre-commit==4.0.1 # via -r requirements/lint.in propcache==0.2.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 1045a452088..65f9b06f5a8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -127,7 +127,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==3.5.0 +pre-commit==4.0.1 # via -r requirements/lint.in propcache==0.2.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index c5050b9df82..aae57572972 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -51,7 +51,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==3.5.0 +pre-commit==4.0.1 # via -r requirements/lint.in pycares==4.5.0 # via aiodns From b7dcc373cddc64a58ba2cb974e6a2bf4ae9b06f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:29:32 +0000 Subject: [PATCH 1120/1511] Bump cryptography from 43.0.3 to 44.0.0 (#10266) Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.3 to 44.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>44.0.0 - 2024-11-27</p> <pre><code> * **BACKWARDS INCOMPATIBLE:** Dropped support for LibreSSL < 3.9. * Deprecated Python 3.7 support. Python 3.7 is no longer supported by the Python core team. Support for Python 3.7 will be removed in a future ``cryptography`` release. * Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.4.0. * macOS wheels are now built against the macOS 10.13 SDK. Users on older versions of macOS should upgrade, or they will need to build ``cryptography`` themselves. * Enforce the :rfc:`5280` requirement that extended key usage extensions must not be empty. * Added support for timestamp extraction to the :class:`~cryptography.fernet.MultiFernet` class. * Relax the Authority Key Identifier requirements on root CA certificates during X.509 verification to allow fields permitted by :rfc:`5280` but forbidden by the CA/Browser BRs. * Added support for :class:`~cryptography.hazmat.primitives.kdf.argon2.Argon2id` when using OpenSSL 3.2.0+. * Added support for the :class:`~cryptography.x509.Admissions` certificate extension. * Added basic support for PKCS7 decryption (including S/MIME 3.2) via :func:`~cryptography.hazmat.primitives.serialization.pkcs7.pkcs7_decrypt_der`, :func:`~cryptography.hazmat.primitives.serialization.pkcs7.pkcs7_decrypt_pem`, and :func:`~cryptography.hazmat.primitives.serialization.pkcs7.pkcs7_decrypt_smime`. <p>.. _v43-0-3:<br /> </code></pre></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/f299a48153650f2dd87716343f2daa7cd39a1f59"><code>f299a48</code></a> remove deprecated call (<a href="https://redirect.github.com/pyca/cryptography/issues/12052">#12052</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/439eb0594a9ffb7c9adedb2490998d83914d141e"><code>439eb05</code></a> Bump version for 44.0.0 (<a href="https://redirect.github.com/pyca/cryptography/issues/12051">#12051</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/2c5ad4d8dcec1b8f833198bc2f3b4634c4fd9d78"><code>2c5ad4d</code></a> chore(deps): bump maturin from 1.7.4 to 1.7.5 in /.github/requirements (<a href="https://redirect.github.com/pyca/cryptography/issues/12050">#12050</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/d23968adddd79aa8508d7c1f985da09383b3808f"><code>d23968a</code></a> chore(deps): bump libc from 0.2.165 to 0.2.166 (<a href="https://redirect.github.com/pyca/cryptography/issues/12049">#12049</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/133c0e02edf2f172318eb27d8f50525ed64c9ec3"><code>133c0e0</code></a> Bump x509-limbo and/or wycheproof in CI (<a href="https://redirect.github.com/pyca/cryptography/issues/12047">#12047</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/f2259d7aa0d134c839ebe298baa8b63de9ead804"><code>f2259d7</code></a> Bump BoringSSL and/or OpenSSL in CI (<a href="https://redirect.github.com/pyca/cryptography/issues/12046">#12046</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/e201c870b89fd2606d67230a97e50c3badb07907"><code>e201c87</code></a> fixed metadata in changelog (<a href="https://redirect.github.com/pyca/cryptography/issues/12044">#12044</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/c6104cc3669585941dc1d2b9c6507621c53d242f"><code>c6104cc</code></a> Prohibit Python 3.9.0, 3.9.1 -- they have a bug that causes errors (<a href="https://redirect.github.com/pyca/cryptography/issues/12045">#12045</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/d6cac753c2fcf8e0ca52ee7038a7d729ad5d763a"><code>d6cac75</code></a> Add support for decrypting S/MIME messages (<a href="https://redirect.github.com/pyca/cryptography/issues/11555">#11555</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/b8e5bfd4d7b35ba8d18b8052266e2cdae4963970"><code>b8e5bfd</code></a> chore(deps): bump libc from 0.2.164 to 0.2.165 (<a href="https://redirect.github.com/pyca/cryptography/issues/12042">#12042</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pyca/cryptography/compare/43.0.3...44.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=43.0.3&new-version=44.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5cc48450a37..59c1dfed71c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ coverage==7.6.9 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.3 +cryptography==44.0.0 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 65f9b06f5a8..90d7563238e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ coverage==7.6.9 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.3 +cryptography==44.0.0 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index aae57572972..bbdfd1f3970 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -19,7 +19,7 @@ cfgv==3.4.0 # via pre-commit click==8.1.8 # via slotscheck -cryptography==43.0.3 +cryptography==44.0.0 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 85d6b183fa3..8df87829116 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -29,7 +29,7 @@ coverage==7.6.9 # via # -r requirements/test.in # pytest-cov -cryptography==43.0.3 +cryptography==44.0.0 # via trustme exceptiongroup==1.2.2 # via pytest From 4790595f20c55da5714330d91e48540b4f0cf72c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:30:34 +0000 Subject: [PATCH 1121/1511] Bump pytest from 8.3.3 to 8.3.4 (#10267) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.3 to 8.3.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.3.4</h2> <h1>pytest 8.3.4 (2024-12-01)</h1> <h2>Bug fixes</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12592">#12592</a>: Fixed <code>KeyError</code>{.interpreted-text role="class"} crash when using <code>--import-mode=importlib</code> in a directory layout where a directory contains a child directory with the same name.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12818">#12818</a>: Assertion rewriting now preserves the source ranges of the original instructions, making it play well with tools that deal with the <code>AST</code>, like <a href="https://github.com/alexmojaki/executing">executing</a>.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12849">#12849</a>: ANSI escape codes for colored output now handled correctly in <code>pytest.fail</code>{.interpreted-text role="func"} with [pytrace=False]{.title-ref}.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/9353">#9353</a>: <code>pytest.approx</code>{.interpreted-text role="func"} now uses strict equality when given booleans.</p> </li> </ul> <h2>Improved documentation</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/10558">#10558</a>: Fix ambiguous docstring of <code>pytest.Config.getoption</code>{.interpreted-text role="func"}.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/10829">#10829</a>: Improve documentation on the current handling of the <code>--basetemp</code> option and its lack of retention functionality (<code>temporary directory location and retention</code>{.interpreted-text role="ref"}).</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12866">#12866</a>: Improved cross-references concerning the <code>recwarn</code>{.interpreted-text role="fixture"} fixture.</p> </li> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12966">#12966</a>: Clarify <code>filterwarnings</code>{.interpreted-text role="ref"} docs on filter precedence/order when using multiple <code>@pytest.mark.filterwarnings <pytest.mark.filterwarnings ref></code>{.interpreted-text role="ref"} marks.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12497">#12497</a>: Fixed two failing pdb-related tests on Python 3.13.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/53f8b4e634c5066c4f797a87b20060edbb086240"><code>53f8b4e</code></a> Update pypa/gh-action-pypi-publish to v1.12.2</li> <li><a href="https://github.com/pytest-dev/pytest/commit/98dff36c9dc0a44881e9e90daf381f9079adf4cc"><code>98dff36</code></a> Prepare release version 8.3.4</li> <li><a href="https://github.com/pytest-dev/pytest/commit/1b474e221d5ced2c8c73924a0087e6e24ab6cd61"><code>1b474e2</code></a> approx: use exact comparison for bool (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13013">#13013</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/b541721529feba7fcd0d069fa2437a817f340eba"><code>b541721</code></a> docs: Fix wrong statement about sys.modules with importlib import mode (<a href="https://redirect.github.com/pytest-dev/pytest/issues/1298">#1298</a>...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/16cb87b65036300d74472cd55eebca8fc3f8e703"><code>16cb87b</code></a> pytest.fail: fix ANSI escape codes for colored output (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12959">#12959</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12990">#12990</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/be6bc812b02454b2915755dd76ce74b877aeafad"><code>be6bc81</code></a> Issue <a href="https://redirect.github.com/pytest-dev/pytest/issues/12966">#12966</a> Clarify filterwarnings docs on precedence when using multiple ma...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/7aeb72bbc67bd1b8271eee57caa0a4e9b07038fc"><code>7aeb72b</code></a> Improve docs on basetemp and retention (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12912">#12912</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12928">#12928</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/c8758414cfd1646f273842e8f9292b2c15dcfcfb"><code>c875841</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12917">#12917</a> from pytest-dev/patchback/backports/8.3.x/ded1f44e5...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/6502816d977fcdbd65a3f4d8a63c0ce7c1f25649"><code>6502816</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12913">#12913</a> from jakkdl/dontfailonbadpath</li> <li><a href="https://github.com/pytest-dev/pytest/commit/52135b033fb949efbec6aed9dd9000275bb199fd"><code>52135b0</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/12885">#12885</a> from The-Compiler/pdb-py311 (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12887">#12887</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.3.3...8.3.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.3.3&new-version=8.3.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 59c1dfed71c..602cca60c60 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -160,7 +160,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==8.3.3 +pytest==8.3.4 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 90d7563238e..b3035cb7858 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -155,7 +155,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==8.3.3 +pytest==8.3.4 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index bbdfd1f3970..c7ea73277db 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -63,7 +63,7 @@ pydantic-core==2.27.2 # via pydantic pygments==2.18.0 # via rich -pytest==8.3.3 +pytest==8.3.4 # via # -r requirements/lint.in # pytest-codspeed diff --git a/requirements/test.txt b/requirements/test.txt index 8df87829116..4d1f840cc83 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -83,7 +83,7 @@ pydantic-core==2.27.2 # via pydantic pygments==2.18.0 # via rich -pytest==8.3.3 +pytest==8.3.4 # via # -r requirements/test.in # pytest-codspeed From fcdc28bb4a32843ce616c0906628a570366f5af0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:35:03 +0000 Subject: [PATCH 1122/1511] Bump async-timeout from 4.0.3 to 5.0.1 (#10268) Bumps [async-timeout](https://github.com/aio-libs/async-timeout) from 4.0.3 to 5.0.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/async-timeout/releases">async-timeout's releases</a>.</em></p> <blockquote> <h2>5.0.1</h2> <h2>Misc</h2> <ul> <li><code>[#423](https://github.com/aio-libs/async-timeout/issues/423) <https://github.com/aio-libs/async-timeout/issues/423></code>_</li> </ul> <h2>5.0.0</h2> <h2>Features</h2> <ul> <li>Make <code>asyncio_timeout</code> fully compatible with the standard <code>asyncio.Timeout</code> but keep backward compatibility with existing <code>asyncio_timeout.Timeout</code> API. (<code>[#422](https://github.com/aio-libs/async-timeout/issues/422) <https://github.com/aio-libs/async-timeout/issues/422></code>_)</li> </ul> <h2>Improved Documentation</h2> <ul> <li>On the <code>CHANGES/README.rst <https://github.com/aio-libs/async-timeout/tree/master/CHANGES/README.rst></code>_ page, a link to the <code>Towncrier philosophy</code> has been fixed. (<code>[#388](https://github.com/aio-libs/async-timeout/issues/388) <https://github.com/aio-libs/async-timeout/issues/388></code>_)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Drop deprecated sync context manager support, use <code>async with timeout(...): ...</code> instead. (<code>[#421](https://github.com/aio-libs/async-timeout/issues/421) <https://github.com/aio-libs/async-timeout/issues/421></code>_)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/async-timeout/blob/master/CHANGES.rst">async-timeout's changelog</a>.</em></p> <blockquote> <h1>5.0.1 (2024-11-06)</h1> <h2>Misc</h2> <ul> <li><code>[#423](https://github.com/aio-libs/async-timeout/issues/423) <https://github.com/aio-libs/async-timeout/issues/423></code>_</li> </ul> <h1>5.0.0 (2024-10-31)</h1> <h2>Features</h2> <ul> <li>Make <code>asyncio_timeout</code> fully compatible with the standard <code>asyncio.Timeout</code> but keep backward compatibility with existing <code>asyncio_timeout.Timeout</code> API. (<code>[#422](https://github.com/aio-libs/async-timeout/issues/422) <https://github.com/aio-libs/async-timeout/issues/422></code>_)</li> </ul> <h2>Improved Documentation</h2> <ul> <li>On the <code>CHANGES/README.rst <https://github.com/aio-libs/async-timeout/tree/master/CHANGES/README.rst></code>_ page, a link to the <code>Towncrier philosophy</code> has been fixed. (<code>[#388](https://github.com/aio-libs/async-timeout/issues/388) <https://github.com/aio-libs/async-timeout/issues/388></code>_)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Drop deprecated sync context manager support, use <code>async with timeout(...): ...</code> instead. (<code>[#421](https://github.com/aio-libs/async-timeout/issues/421) <https://github.com/aio-libs/async-timeout/issues/421></code>_)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/async-timeout/commit/cc52d410e733b302873e6e52db9a9b6578d053a4"><code>cc52d41</code></a> Bump to 5.0.1</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/1af7d7165d598af03e51952a679c690690dc7f8e"><code>1af7d71</code></a> Bump to 5.0.1</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/3bf26d96e3d1830a0e72ddab9c6a2c87e1f4380a"><code>3bf26d9</code></a> Fix <a href="https://redirect.github.com/aio-libs/async-timeout/issues/423">#423</a>: add requirements.txt to sdist package (<a href="https://redirect.github.com/aio-libs/async-timeout/issues/425">#425</a>)</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/380e8ba5bf77e3cd184df8a5f51dc929d93d8553"><code>380e8ba</code></a> Mention condition import of asyncio.timeout / async_timeout.timeout in README...</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/8a232d0f7fc1af88c8d921500d78bbefc2bc545c"><code>8a232d0</code></a> Update README</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/3d3e3e45420ac6ea0ef77c9e32388e252e12ac4d"><code>3d3e3e4</code></a> Bump to 5.0.0</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/d5c21f3dee7eeb06823af77284ad2a899611b28e"><code>d5c21f3</code></a> Mimic asyncio.Timeout behavior (<a href="https://redirect.github.com/aio-libs/async-timeout/issues/422">#422</a>)</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/a1111c2597b9ce8d34b823c1b993689339ea9fd6"><code>a1111c2</code></a> Drop deprecated sync context manager support (<a href="https://redirect.github.com/aio-libs/async-timeout/issues/421">#421</a>)</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/00fd343ce6ba2ccc291c0f6e09fea8998cbfe3a1"><code>00fd343</code></a> Imports cleanup</li> <li><a href="https://github.com/aio-libs/async-timeout/commit/7b61b33ae0ecee0e0bb0e09f2e5787c647d4ffb8"><code>7b61b33</code></a> Update CI config file</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/async-timeout/compare/v4.0.3...v5.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=async-timeout&package-manager=pip&previous-version=4.0.3&new-version=5.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index ce4157c5917..6d24a73a24f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in -async-timeout==4.0.3 ; python_version < "3.11" +async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==24.3.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 602cca60c60..03c8469587b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -18,7 +18,7 @@ alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic -async-timeout==4.0.3 ; python_version < "3.11" +async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey diff --git a/requirements/dev.txt b/requirements/dev.txt index b3035cb7858..86e91f468e6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -18,7 +18,7 @@ alabaster==1.0.0 # via sphinx annotated-types==0.7.0 # via pydantic -async-timeout==4.0.3 ; python_version < "3.11" +async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey diff --git a/requirements/lint.txt b/requirements/lint.txt index c7ea73277db..bfd9361e831 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 # via -r requirements/lint.in annotated-types==0.7.0 # via pydantic -async-timeout==4.0.3 +async-timeout==5.0.1 # via valkey cffi==1.17.1 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a790e54b1ed..a10f2328e88 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.4.4 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in -async-timeout==4.0.3 ; python_version < "3.11" +async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==24.3.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 4d1f840cc83..e9d5d06a3fb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in annotated-types==0.7.0 # via pydantic -async-timeout==4.0.3 ; python_version < "3.11" +async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==24.3.0 # via -r requirements/runtime-deps.in From c9bec0a576d8dc2afd4e74bcb9ed45bdef411c45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:46:44 +0000 Subject: [PATCH 1123/1511] Bump pytest-cov from 5.0.0 to 6.0.0 (#10269) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 5.0.0 to 6.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst">pytest-cov's changelog</a>.</em></p> <blockquote> <h2>6.0.0 (2024-10-29)</h2> <ul> <li>Updated various documentation inaccuracies, especially on subprocess handling.</li> <li>Changed fail under checks to use the precision set in the coverage configuration. Now it will perform the check just like <code>coverage report</code> would.</li> <li>Added a <code>--cov-precision</code> cli option that can override the value set in your coverage configuration.</li> <li>Dropped support for now EOL Python 3.8.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/95404375a0e436178e012e20b8865c23c54c8a50"><code>9540437</code></a> Bump version: 5.0.0 → 6.0.0</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/9f8175467afc67db9001fb364ad1f2dfe79b51f1"><code>9f81754</code></a> Further trim down envs and drop Python 3.8.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/b12b5ec65da4c66bbc0c35918957f9f875f465c3"><code>b12b5ec</code></a> Update conf.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/23f4b27b432a54fcc3b6df7363f0e73e568233fb"><code>23f4b27</code></a> Update changelog.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/291a04f49566054bb19644aba27c3799ac8b7f42"><code>291a04f</code></a> Bump test deps and trim config.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/08f1101455ba293dda388fdb3b61e62fd95a827d"><code>08f1101</code></a> Add <code>--cov-precision</code> option. Close <a href="https://redirect.github.com/pytest-dev/pytest-cov/issues/655">#655</a>.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/76fe2a7e48e5c9c53644994c5ba5a421c84286f5"><code>76fe2a7</code></a> Move the warnings/errors in a place that doesn't import anything.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/a9ea7b71711479d4c5ccc5e21e2eb1694b259cb0"><code>a9ea7b7</code></a> Implement error/warning for the bad dynamic_context being set in config.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/c299e01b6422284a6a7f7322e6b9bf8f44aa3c25"><code>c299e01</code></a> Add explicit suffixing to make it easier to see the identify the sources/usag...</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/c87e54643ef696ed2b0e2b9a4209581da8467fcb"><code>c87e546</code></a> Add reproducer for weird xdist dynamic_context interaction. Ref <a href="https://redirect.github.com/pytest-dev/pytest-cov/issues/604">#604</a>.</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest-cov/compare/v5.0.0...v6.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=5.0.0&new-version=6.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 03c8469587b..d7f7429fb03 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -172,7 +172,7 @@ pytest-codspeed==3.1.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==5.0.0 +pytest-cov==6.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 86e91f468e6..b9f77016b9e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-codspeed==3.1.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==5.0.0 +pytest-cov==6.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index e9d5d06a3fb..07870950c38 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -92,7 +92,7 @@ pytest==8.3.4 # pytest-xdist pytest-codspeed==3.1.0 # via -r requirements/test.in -pytest-cov==5.0.0 +pytest-cov==6.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in From b48fcb9c53eb93d967d0a901cdbca8f71eb8c323 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 11:50:50 +0000 Subject: [PATCH 1124/1511] Bump propcache from 0.2.0 to 0.2.1 (#10264) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [propcache](https://github.com/aio-libs/propcache) from 0.2.0 to 0.2.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/propcache/releases">propcache's releases</a>.</em></p> <blockquote> <h2>0.2.1</h2> <h2>Bug fixes</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>64df0a6</code>.</p> </li> <li> <p>Fixed <code>wrapped</code> and <code>func</code> not being accessible in the Cython versions of :func:<code>propcache.api.cached_property</code> and :func:<code>propcache.api.under_cached_property</code> decorators -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/propcache/issues/72">#72</a>.</p> </li> </ul> <h2>Removals and backward incompatible breaking changes</h2> <ul> <li> <p>Removed support for Python 3.8 as it has reached end of life -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/propcache/issues/57">#57</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>64df0a6</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/propcache/blob/master/CHANGES.rst">propcache's changelog</a>.</em></p> <blockquote> <h1>0.2.1</h1> <p><em>(2024-12-01)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>64df0a6</code>.</p> </li> <li> <p>Fixed <code>wrapped</code> and <code>func</code> not being accessible in the Cython versions of :func:<code>propcache.api.cached_property</code> and :func:<code>propcache.api.under_cached_property</code> decorators -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>72</code>.</p> </li> </ul> <h2>Removals and backward incompatible breaking changes</h2> <ul> <li> <p>Removed support for Python 3.8 as it has reached end of life -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>57</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>64df0a6</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/propcache/commit/24fd58a10551ec8e8db66d8612e10455774ca3aa"><code>24fd58a</code></a> Split wheel builds to disable manylinux armv7l wheels (<a href="https://redirect.github.com/aio-libs/propcache/issues/76">#76</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/08091df5566c2620f032a272be1942165edca10c"><code>08091df</code></a> Fix wheel builds when cffi needs to be built from source (<a href="https://redirect.github.com/aio-libs/propcache/issues/75">#75</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/b0a554f56be64b7f32a523c4a5425cf85182a244"><code>b0a554f</code></a> Release 0.2.1 (<a href="https://redirect.github.com/aio-libs/propcache/issues/74">#74</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/4c943884c05cd44a31aa0a31a2dea962c59baebf"><code>4c94388</code></a> Fix codecov upload (<a href="https://redirect.github.com/aio-libs/propcache/issues/73">#73</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/2514c3f033bb9e00441390e0d402295d0ca3c7df"><code>2514c3f</code></a> Ensure wrapped function is accessible in Cython versions (<a href="https://redirect.github.com/aio-libs/propcache/issues/72">#72</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/cf8aab9a32760d39b7a011e8fe1b2146cc2e31b1"><code>cf8aab9</code></a> Bump pypa/cibuildwheel from 2.21.3 to 2.22.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/71">#71</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/ef6af70cb5ebd1d5a0a739b6c6c388c0fae0fed2"><code>ef6af70</code></a> Cleanup mypy config for codspeed (<a href="https://redirect.github.com/aio-libs/propcache/issues/67">#67</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/d6980a766f5a7c6d0f018ee67831c895abf64b90"><code>d6980a7</code></a> 🧪 Exclude <code>requirements/</code> from pytest discovery</li> <li><a href="https://github.com/aio-libs/propcache/commit/57eb5ae409c1b9f3324ddd4d3c5ba7fc062a61e3"><code>57eb5ae</code></a> 🧪 Fail test jobs on Codecov upload problems</li> <li><a href="https://github.com/aio-libs/propcache/commit/6222b019ae850878b4dca0d3e5f072b9b6949416"><code>6222b01</code></a> 🧪 Declare a packaging flag @ codecov</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/propcache/compare/v0.2.0...v0.2.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=propcache&package-manager=pip&previous-version=0.2.0&new-version=0.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 6d24a73a24f..990179d8b17 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,7 +32,7 @@ multidict==6.1.0 # yarl packaging==24.2 # via gunicorn -propcache==0.2.0 +propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d7f7429fb03..615f7c84ef5 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -132,7 +132,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.0.1 # via -r requirements/lint.in -propcache==0.2.0 +propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/dev.txt b/requirements/dev.txt index b9f77016b9e..c11f5443ae2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -129,7 +129,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.0.1 # via -r requirements/lint.in -propcache==0.2.0 +propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a10f2328e88..7fb7ffb3589 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -28,7 +28,7 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl -propcache==0.2.0 +propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 07870950c38..eac7f7f1e39 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ packaging==24.2 # pytest pluggy==1.5.0 # via pytest -propcache==0.2.0 +propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl From 622d3669deb1b235f722b6a1c1d14ae1914e7dce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 27 Dec 2024 10:37:50 +0000 Subject: [PATCH 1125/1511] Bump sphinxcontrib-spelling from 8.0.0 to 8.0.1 (#10272) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [sphinxcontrib-spelling](https://github.com/sphinx-contrib/spelling) from 8.0.0 to 8.0.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/sphinx-contrib/spelling/releases">sphinxcontrib-spelling's releases</a>.</em></p> <blockquote> <h2>8.0.1</h2> <h2>What's Changed</h2> <ul> <li>Fix spelling:word-list example by <a href="https://github.com/adamchainz"><code>@​adamchainz</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/217">sphinx-contrib/spelling#217</a></li> <li>build(deps): bump actions/setup-python from 2 to 4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/211">sphinx-contrib/spelling#211</a></li> <li>add docs for setting up enchant library by <a href="https://github.com/dhellmann"><code>@​dhellmann</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/219">sphinx-contrib/spelling#219</a></li> <li>add virtualenvwrapper hooks by <a href="https://github.com/dhellmann"><code>@​dhellmann</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/220">sphinx-contrib/spelling#220</a></li> <li>build(deps): bump actions/checkout from 3 to 4 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/221">sphinx-contrib/spelling#221</a></li> <li>build(deps): bump actions/setup-python from 4 to 5 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/222">sphinx-contrib/spelling#222</a></li> <li>CI fixes by <a href="https://github.com/dhellmann"><code>@​dhellmann</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/230">sphinx-contrib/spelling#230</a></li> <li>gracefully handle if git is not installed by <a href="https://github.com/blmaier"><code>@​blmaier</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/229">sphinx-contrib/spelling#229</a></li> <li>use json api to retrieve names from pypi.org by <a href="https://github.com/dhellmann"><code>@​dhellmann</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/231">sphinx-contrib/spelling#231</a></li> <li>re-enable use of pypi filter in our doc build by <a href="https://github.com/dhellmann"><code>@​dhellmann</code></a> in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/232">sphinx-contrib/spelling#232</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/dependabot"><code>@​dependabot</code></a> made their first contribution in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/211">sphinx-contrib/spelling#211</a></li> <li><a href="https://github.com/blmaier"><code>@​blmaier</code></a> made their first contribution in <a href="https://redirect.github.com/sphinx-contrib/spelling/pull/229">sphinx-contrib/spelling#229</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/sphinx-contrib/spelling/compare/8.0.0...8.0.1">https://github.com/sphinx-contrib/spelling/compare/8.0.0...8.0.1</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/sphinx-contrib/spelling/commit/58821a69efc45047a66a409e1065cd0e264ad0c3"><code>58821a6</code></a> Merge pull request <a href="https://redirect.github.com/sphinx-contrib/spelling/issues/232">#232</a> from dhellmann/enable-pypi-names-docs</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/9b088c3a506bc3d458c5a6efa853560293f5f4db"><code>9b088c3</code></a> re-enable use of pypi filter in our doc build</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/d41eceb2e64274b8cd914c5ca18c59f0fd7791f4"><code>d41eceb</code></a> Merge pull request <a href="https://redirect.github.com/sphinx-contrib/spelling/issues/231">#231</a> from dhellmann/pypi-filter-factory-json</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/2eafb07f499afcab52fee7464e34ce68b36710f6"><code>2eafb07</code></a> use json api to retrieve names from pypi.org</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/65a9b841ce4e3a0b1461e2afcb607d89898debd5"><code>65a9b84</code></a> Merge pull request <a href="https://redirect.github.com/sphinx-contrib/spelling/issues/229">#229</a> from blmaier/fix-git-missing</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/7b10eea4faaeaaf743ed7d713c3ea8f7690e8194"><code>7b10eea</code></a> update history</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/22016934b308f7cb088ee0ce10d1cdcc3dc4724b"><code>2201693</code></a> gracefully handle if git is not installed</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/13ac4cc37ec767c4aae26b45a0f138bc937e9057"><code>13ac4cc</code></a> Merge pull request <a href="https://redirect.github.com/sphinx-contrib/spelling/issues/230">#230</a> from dhellmann/ci-fixes-2024</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/b86d87a9ee05f1121af7bc15b43133b555c3d554"><code>b86d87a</code></a> fix docstring test</li> <li><a href="https://github.com/sphinx-contrib/spelling/commit/596dfc2521c925ee5d0cdbd9e2fb8df2c3f38f4c"><code>596dfc2</code></a> switch linter to ruff</li> <li>Additional commits viewable in <a href="https://github.com/sphinx-contrib/spelling/compare/8.0.0...8.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sphinxcontrib-spelling&package-manager=pip&previous-version=8.0.0&new-version=8.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 3 ++- requirements/doc-spelling.txt | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 615f7c84ef5..c6669e2b7b0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -196,6 +196,7 @@ requests==2.32.3 # via # cherry-picker # sphinx + # sphinxcontrib-spelling rich==13.9.4 # via pytest-codspeed setuptools-git==1.2 @@ -223,7 +224,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" +sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9292e082fa0..d6793b46c9c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -37,7 +37,9 @@ pyenchant==3.2.2 pygments==2.18.0 # via sphinx requests==2.32.3 - # via sphinx + # via + # sphinx + # sphinxcontrib-spelling snowballstemmer==2.2.0 # via sphinx sphinx==8.1.3 @@ -57,7 +59,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" +sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in From 1b9fada3501360196b2951dd128fc33d243638e8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 27 Dec 2024 17:02:31 +0000 Subject: [PATCH 1126/1511] [PR #10270/2a55afee backport][3.11] fix: include carriage return in multipartform.decode() (#10274) **This is a backport of PR #10270 as merged into master (2a55afeed7e769877d9f6f1eb7aab7ff595e1313).** Co-authored-by: James Ward <james@notjam.es> --- CHANGES/10270.bugfix.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 2 +- tests/test_web_response.py | 2 +- 4 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10270.bugfix.rst diff --git a/CHANGES/10270.bugfix.rst b/CHANGES/10270.bugfix.rst new file mode 100644 index 00000000000..e3252464dc8 --- /dev/null +++ b/CHANGES/10270.bugfix.rst @@ -0,0 +1,2 @@ +``MultipartForm.decode()`` must follow RFC1341 7.2.1 with a ``CRLF`` after the boundary +-- by :user:`imnotjames`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 589784b29cb..fb1b87ccc9d 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -167,6 +167,7 @@ Jaesung Lee Jake Davis Jakob Ackermann Jakub Wilk +James Ward Jan Buchar Jan Gosmann Jarno Elonen diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e0bcce07449..bd4d8ae1ddf 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -979,7 +979,7 @@ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return "".join( "--" + self.boundary - + "\n" + + "\r\n" + part._binary_headers.decode(encoding, errors) + part.decode() for part, _e, _te in self._parts diff --git a/tests/test_web_response.py b/tests/test_web_response.py index f4acf23f61b..0591426c57b 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1201,7 +1201,7 @@ def read(self, size: int = -1) -> bytes: (BodyPartReader("x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None), ( mpwriter, - "--x\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", + "--x\r\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", ), ), ) From b222fb921b87d0178a4e6537bf6ea082d31c19bc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 27 Dec 2024 17:14:49 +0000 Subject: [PATCH 1127/1511] [PR #10270/2a55afee backport][3.12] fix: include carriage return in multipartform.decode() (#10275) **This is a backport of PR #10270 as merged into master (2a55afeed7e769877d9f6f1eb7aab7ff595e1313).** Co-authored-by: James Ward <james@notjam.es> --- CHANGES/10270.bugfix.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 2 +- tests/test_web_response.py | 2 +- 4 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10270.bugfix.rst diff --git a/CHANGES/10270.bugfix.rst b/CHANGES/10270.bugfix.rst new file mode 100644 index 00000000000..e3252464dc8 --- /dev/null +++ b/CHANGES/10270.bugfix.rst @@ -0,0 +1,2 @@ +``MultipartForm.decode()`` must follow RFC1341 7.2.1 with a ``CRLF`` after the boundary +-- by :user:`imnotjames`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 930815d8b62..7eb48579097 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -168,6 +168,7 @@ Jaesung Lee Jake Davis Jakob Ackermann Jakub Wilk +James Ward Jan Buchar Jan Gosmann Jarno Elonen diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e0bcce07449..bd4d8ae1ddf 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -979,7 +979,7 @@ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return "".join( "--" + self.boundary - + "\n" + + "\r\n" + part._binary_headers.decode(encoding, errors) + part.decode() for part, _e, _te in self._parts diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 1e65f7364b6..6eb52d480ff 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1209,7 +1209,7 @@ def read(self, size: int = -1) -> bytes: (BodyPartReader("x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None), ( mpwriter, - "--x\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", + "--x\r\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", ), ), ) From 8fe93ba82d12cd9bd7e4410809d35f6713462832 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 27 Dec 2024 17:29:33 +0000 Subject: [PATCH 1128/1511] Bump coverage from 7.6.9 to 7.6.10 (#10273) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.9 to 7.6.10. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.6.10 — 2024-12-26</h2> <ul> <li> <p>Fix: some descriptions of missing branches in HTML and LCOV reports were incorrect when multi-line statements were involved (<code>issue 1874</code>_ and <code>issue 1875</code>_). These are now fixed.</p> </li> <li> <p>Fix: Python 3.14 <code>defers evaluation of annotations <pep649_></code>_ by moving them into separate code objects. That code is rarely executed, so coverage.py would mark them as missing, as reported in <code>issue 1908</code>_. Now they are ignored by coverage automatically.</p> </li> <li> <p>Fixed an obscure and mysterious problem on PyPy 3.10 seemingly involving mocks, imports, and trace functions: <code>issue 1902</code>_. To be honest, I don't understand the problem or the solution, but <code>git bisect</code> helped find it, and now it's fixed.</p> </li> <li> <p>Docs: re-wrote the :ref:<code>subprocess</code> page to put multiprocessing first and to highlight the correct use of :class:<code>multiprocessing.Pool <python:multiprocessing.pool.Pool></code>.</p> </li> </ul> <p>.. _issue 1874: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1874">nedbat/coveragepy#1874</a> .. _issue 1875: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1875">nedbat/coveragepy#1875</a> .. _issue 1902: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1902">nedbat/coveragepy#1902</a> .. _issue 1908: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1908">nedbat/coveragepy#1908</a> .. _pep649: <a href="https://docs.python.org/3.14/whatsnew/3.14.html#pep-649-deferred-evaluation-of-annotations">https://docs.python.org/3.14/whatsnew/3.14.html#pep-649-deferred-evaluation-of-annotations</a></p> <p>.. _changes_7-6-9:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/f0dcf65f47120d9f74f6777134d3b8e92515ce6f"><code>f0dcf65</code></a> docs: sample HTML for 7.6.10</li> <li><a href="https://github.com/nedbat/coveragepy/commit/0f26f35863781bb5736301e5b9c6ce8ea7d5d0f7"><code>0f26f35</code></a> docs: prep for 7.6.10</li> <li><a href="https://github.com/nedbat/coveragepy/commit/81c5e4311171f1db78b75a126608763a3964e98d"><code>81c5e43</code></a> docs: rewrite the subprocess page</li> <li><a href="https://github.com/nedbat/coveragepy/commit/878410caf5a026773769fe68a43705dcc48ddfc3"><code>878410c</code></a> chore: make doc_upgrade</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f1d320d08b0180e58ec4fbbb4b5a93575e7d1b74"><code>f1d320d</code></a> chore: make upgrade</li> <li><a href="https://github.com/nedbat/coveragepy/commit/67f1440e0a384000e337ab54bd9cc01804aec201"><code>67f1440</code></a> debug: this condition is never true. really?</li> <li><a href="https://github.com/nedbat/coveragepy/commit/c85eaba206e1bf98302e0997e32c079e2c231f4b"><code>c85eaba</code></a> fix: multi-line statements no longer confuse branch target descriptions. <a href="https://redirect.github.com/nedbat/coveragepy/issues/187">#187</a>...</li> <li><a href="https://github.com/nedbat/coveragepy/commit/73e58fa9dd361b6496f2aa42ede490118ec0429c"><code>73e58fa</code></a> refactor: clarify the code that fixes with-statement exits</li> <li><a href="https://github.com/nedbat/coveragepy/commit/e16c9cc8c86edea415695ebd34ccad43b1b84a69"><code>e16c9cc</code></a> typo: backslask</li> <li><a href="https://github.com/nedbat/coveragepy/commit/865fd7fca4ed2e05f91f74c4c8809ca78adae229"><code>865fd7f</code></a> chore: bump the action-dependencies group with 4 updates (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1909">#1909</a>)</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.6.9...7.6.10">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.6.9&new-version=7.6.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c6669e2b7b0..d913a2c65f4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.9 +coverage==7.6.10 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index c11f5443ae2..7cc628723b4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.9 +coverage==7.6.10 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index eac7f7f1e39..11fc4409769 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -25,7 +25,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.6.9 +coverage==7.6.10 # via # -r requirements/test.in # pytest-cov From 7fea1913f0bd114c661580ac7f54641a6886c5bf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 31 Dec 2024 17:37:50 +0000 Subject: [PATCH 1129/1511] [PR #10283/e5d6016c backport][3.12] Clarify purpose of parameters `allow_redirects`, `max_redirects` for `ClientSession.request` (and related functions) (#10285) **This is a backport of PR #10283 as merged into master (e5d6016c8054a9b31f777e01bd400aa26bd6730d).** Co-authored-by: asrelo <47759736+asrelo@users.noreply.github.com> --- docs/client_reference.rst | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 7e7cdf12184..e18785ea796 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -452,11 +452,16 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed (up to ``max_redirects`` times) + and logged into :attr:`ClientResponse.history` and ``trace_configs``. + When ``False``, the original response is returned. + ``True`` by default (optional). :param int max_redirects: Maximum number of redirects to follow. - ``10`` by default. + :exc:`TooManyRedirects` is raised if the number is exceeded. + Ignored when ``allow_redirects=False``. + ``10`` by default. :param bool compress: Set to ``True`` if request has to be compressed with deflate encoding. If `compress` can not be combined @@ -558,8 +563,11 @@ The client session supports the context manager protocol for self closing. :param url: Request URL, :class:`str` or :class:`~yarl.URL` - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed and logged into + :attr:`ClientResponse.history`. + When ``False``, the original response is returned. + ``True`` by default (optional). :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -627,8 +635,11 @@ The client session supports the context manager protocol for self closing. :param url: Request URL, :class:`str` or :class:`~yarl.URL` - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``False`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed and logged into + :attr:`ClientResponse.history`. + When ``False``, the original response is returned. + ``False`` by default (optional). :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -645,8 +656,11 @@ The client session supports the context manager protocol for self closing. :param url: Request URL, :class:`str` or :class:`~yarl.URL` - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed and logged into + :attr:`ClientResponse.history`. + When ``False``, the original response is returned. + ``True`` by default (optional). :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -878,8 +892,11 @@ certification chaining. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed (up to ``max_redirects`` times) + and logged into :attr:`ClientResponse.history` and ``trace_configs``. + When ``False``, the original response is returned. + ``True`` by default (optional). :param aiohttp.protocol.HttpVersion version: Request HTTP version (optional) From b103bbca6b753e5b5357367cbaed10c7eade5a44 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 31 Dec 2024 17:37:59 +0000 Subject: [PATCH 1130/1511] [PR #10283/e5d6016c backport][3.11] Clarify purpose of parameters `allow_redirects`, `max_redirects` for `ClientSession.request` (and related functions) (#10284) **This is a backport of PR #10283 as merged into master (e5d6016c8054a9b31f777e01bd400aa26bd6730d).** Co-authored-by: asrelo <47759736+asrelo@users.noreply.github.com> --- docs/client_reference.rst | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index c9031de5383..19e221a27ca 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -448,11 +448,16 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed (up to ``max_redirects`` times) + and logged into :attr:`ClientResponse.history` and ``trace_configs``. + When ``False``, the original response is returned. + ``True`` by default (optional). :param int max_redirects: Maximum number of redirects to follow. - ``10`` by default. + :exc:`TooManyRedirects` is raised if the number is exceeded. + Ignored when ``allow_redirects=False``. + ``10`` by default. :param bool compress: Set to ``True`` if request has to be compressed with deflate encoding. If `compress` can not be combined @@ -554,8 +559,11 @@ The client session supports the context manager protocol for self closing. :param url: Request URL, :class:`str` or :class:`~yarl.URL` - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed and logged into + :attr:`ClientResponse.history`. + When ``False``, the original response is returned. + ``True`` by default (optional). :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -623,8 +631,11 @@ The client session supports the context manager protocol for self closing. :param url: Request URL, :class:`str` or :class:`~yarl.URL` - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``False`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed and logged into + :attr:`ClientResponse.history`. + When ``False``, the original response is returned. + ``False`` by default (optional). :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -641,8 +652,11 @@ The client session supports the context manager protocol for self closing. :param url: Request URL, :class:`str` or :class:`~yarl.URL` - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed and logged into + :attr:`ClientResponse.history`. + When ``False``, the original response is returned. + ``True`` by default (optional). :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -874,8 +888,11 @@ certification chaining. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) - :param bool allow_redirects: If set to ``False``, do not follow redirects. - ``True`` by default (optional). + :param bool allow_redirects: Whether to process redirects or not. + When ``True``, redirects are followed (up to ``max_redirects`` times) + and logged into :attr:`ClientResponse.history` and ``trace_configs``. + When ``False``, the original response is returned. + ``True`` by default (optional). :param aiohttp.protocol.HttpVersion version: Request HTTP version (optional) From d8f38531007360e73af11a84984a022793010f0d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 1 Jan 2025 14:05:11 +0100 Subject: [PATCH 1131/1511] [PR #10281/d54b3e27 backport][3.12] Update workflow to use upload-artifact v4 and download-artifact v4 (#10289) **This is a backport of PR #10281 as merged into master (d54b3e2712bb40644752f35787f701fef792f969).** <!-- Thank you for your contribution! --> ## What do these changes do? This updates the ci/cd workflow to use the upload-artifact v4 and download-artifact v4 github actions. The currently used upload-artifact and download-artifact will no longer work at the end of next month. The changes are needed since v4 no longer has mutable artifacts, which was used to collect wheels from different architectures. Fix #8588, Fix #8589, Fix #9009, Fix #10189, Fix #10191 ## Are there changes in behavior for the user? No, the ci/cd workflow has been tested although without publishing. ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Simon Lamon <32477463+silamon@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 28 +++++++++++++++++----------- CHANGES/10281.contrib.rst | 1 + 2 files changed, 18 insertions(+), 11 deletions(-) create mode 100644 CHANGES/10281.contrib.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d5e119b779d..93d1ddded65 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -114,7 +114,7 @@ jobs: run: | make generate-llhttp - name: Upload llhttp generated files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: llhttp path: vendor/llhttp/build @@ -177,7 +177,7 @@ jobs: python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files if: ${{ matrix.no-extensions == '' }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -264,7 +264,7 @@ jobs: run: | python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -325,7 +325,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -336,9 +336,9 @@ jobs: run: | python -m build --sdist - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: dist + name: dist-sdist path: dist build-wheels: @@ -388,7 +388,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -399,9 +399,15 @@ jobs: uses: pypa/cibuildwheel@v2.22.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - - uses: actions/upload-artifact@v3 + - name: Upload wheels + uses: actions/upload-artifact@v4 with: - name: dist + name: >- + dist-${{ matrix.os }}-${{ + matrix.qemu + && matrix.qemu + || 'native' + }} path: ./wheelhouse/*.whl deploy: @@ -426,10 +432,10 @@ jobs: run: | echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token - name: Download distributions - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: dist path: dist + pattern: dist-* - name: Collected dists run: | tree dist diff --git a/CHANGES/10281.contrib.rst b/CHANGES/10281.contrib.rst new file mode 100644 index 00000000000..b50b4d2f955 --- /dev/null +++ b/CHANGES/10281.contrib.rst @@ -0,0 +1 @@ +The CI/CD workflow has been updated to use `upload-artifact` v4 and `download-artifact` v4 GitHub Actions -- by :user:`silamon`. From 0c32bc88674f8d3814f0419eba14c0039d14d63e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 1 Jan 2025 14:05:30 +0100 Subject: [PATCH 1132/1511] [PR #10281/d54b3e27 backport][3.11] Update workflow to use upload-artifact v4 and download-artifact v4 (#10288) **This is a backport of PR #10281 as merged into master (d54b3e2712bb40644752f35787f701fef792f969).** <!-- Thank you for your contribution! --> ## What do these changes do? This updates the ci/cd workflow to use the upload-artifact v4 and download-artifact v4 github actions. The currently used upload-artifact and download-artifact will no longer work at the end of next month. The changes are needed since v4 no longer has mutable artifacts, which was used to collect wheels from different architectures. Fix #8588, Fix #8589, Fix #9009, Fix #10189, Fix #10191 ## Are there changes in behavior for the user? No, the ci/cd workflow has been tested although without publishing. ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Simon Lamon <32477463+silamon@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 28 +++++++++++++++++----------- CHANGES/10281.contrib.rst | 1 + 2 files changed, 18 insertions(+), 11 deletions(-) create mode 100644 CHANGES/10281.contrib.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d5e119b779d..93d1ddded65 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -114,7 +114,7 @@ jobs: run: | make generate-llhttp - name: Upload llhttp generated files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: llhttp path: vendor/llhttp/build @@ -177,7 +177,7 @@ jobs: python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files if: ${{ matrix.no-extensions == '' }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -264,7 +264,7 @@ jobs: run: | python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -325,7 +325,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -336,9 +336,9 @@ jobs: run: | python -m build --sdist - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: dist + name: dist-sdist path: dist build-wheels: @@ -388,7 +388,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: llhttp path: vendor/llhttp/build/ @@ -399,9 +399,15 @@ jobs: uses: pypa/cibuildwheel@v2.22.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - - uses: actions/upload-artifact@v3 + - name: Upload wheels + uses: actions/upload-artifact@v4 with: - name: dist + name: >- + dist-${{ matrix.os }}-${{ + matrix.qemu + && matrix.qemu + || 'native' + }} path: ./wheelhouse/*.whl deploy: @@ -426,10 +432,10 @@ jobs: run: | echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token - name: Download distributions - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: dist path: dist + pattern: dist-* - name: Collected dists run: | tree dist diff --git a/CHANGES/10281.contrib.rst b/CHANGES/10281.contrib.rst new file mode 100644 index 00000000000..b50b4d2f955 --- /dev/null +++ b/CHANGES/10281.contrib.rst @@ -0,0 +1 @@ +The CI/CD workflow has been updated to use `upload-artifact` v4 and `download-artifact` v4 GitHub Actions -- by :user:`silamon`. From a286cc937714bbe65ad17bc7c4364bb1f115ccad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Jan 2025 16:12:22 +0000 Subject: [PATCH 1133/1511] Bump identify from 2.6.3 to 2.6.4 (#10278) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.3 to 2.6.4. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/5adfe0958847d3d48752678d6b5c843c438faddc"><code>5adfe09</code></a> v2.6.4</li> <li><a href="https://github.com/pre-commit/identify/commit/5025d0ae7d646266f14acba7bccaaf8a25fd1385"><code>5025d0a</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/494">#494</a> from dbast/starlark</li> <li><a href="https://github.com/pre-commit/identify/commit/dac18a9191b6087c3d28dcbd83172317f5ae1f60"><code>dac18a9</code></a> Add Tiltfile support</li> <li><a href="https://github.com/pre-commit/identify/commit/267690cbc84c5e44e2b9ad5b27fd9d366eb1a436"><code>267690c</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/497">#497</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/bca404e03c50a3583e4d4a9399043972f7fab4a9"><code>bca404e</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/ce50079bd11a3777c86056205f4219e7fe3be0b4"><code>ce50079</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/492">#492</a> from qexat/add_gleam</li> <li><a href="https://github.com/pre-commit/identify/commit/ea174b5f24b75585fb4c2bc4b87817e8d541c7bb"><code>ea174b5</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/493">#493</a> from qexat/add_bzip3</li> <li><a href="https://github.com/pre-commit/identify/commit/b4ace9baf7e808b5d02e10911079879418079531"><code>b4ace9b</code></a> feat: identify bz3 files as bzip3 archives</li> <li><a href="https://github.com/pre-commit/identify/commit/e43a17c9df1a128c3852672e4ca71f77d93343db"><code>e43a17c</code></a> feat: identify gleam files</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.3...v2.6.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.3&new-version=2.6.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d913a2c65f4..f260d298f46 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.3 +identify==2.6.4 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 7cc628723b4..fcfb8b87d8b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -80,7 +80,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.3 +identify==2.6.4 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index bfd9361e831..b1aacb161f2 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -29,7 +29,7 @@ filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.3 +identify==2.6.4 # via pre-commit idna==3.7 # via trustme From 5cb5d968c8022a9165527a21d8db99859935b2d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 10:55:11 +0000 Subject: [PATCH 1134/1511] Bump trustme from 1.2.0 to 1.2.1 (#10291) Bumps [trustme](https://github.com/python-trio/trustme) from 1.2.0 to 1.2.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python-trio/trustme/releases">trustme's releases</a>.</em></p> <blockquote> <h2>1.2.1</h2> <h2>Bugfixes</h2> <ul> <li>Update from deprecated pyOpenSSL APIs to non-deprecated cryptography APIs. (<a href="https://redirect.github.com/python-trio/trustme/issues/670">python-trio/trustme#670</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python-trio/trustme/commit/6d39f51de72ce21bbb1d3842f8ed795b39e4cc9b"><code>6d39f51</code></a> Bump version to 1.2.1</li> <li><a href="https://github.com/python-trio/trustme/commit/4290066994d423bbf51cbf45a69300dac27361f0"><code>4290066</code></a> Polish off <a href="https://redirect.github.com/python-trio/trustme/issues/670">#670</a></li> <li><a href="https://github.com/python-trio/trustme/commit/a49a64b69d95e05d4fc1a14eff92cc056c61d164"><code>a49a64b</code></a> Switch to nox (<a href="https://redirect.github.com/python-trio/trustme/issues/668">#668</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/e3dc9046c40859c8b9664bf52b3610ff806e9722"><code>e3dc904</code></a> Use cryptography to load the pyOpenSSL certificates (<a href="https://redirect.github.com/python-trio/trustme/issues/670">#670</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/00a906ddeb470ef9a282c959e3483eda4fd8ae23"><code>00a906d</code></a> Bump the dependencies group with 10 updates (<a href="https://redirect.github.com/python-trio/trustme/issues/669">#669</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/49b59dd72f99d48c4d2a15fb7ec6419a7c857687"><code>49b59dd</code></a> Fix 1.2.0 release notes (<a href="https://redirect.github.com/python-trio/trustme/issues/667">#667</a>)</li> <li><a href="https://github.com/python-trio/trustme/commit/dd1778ba4440d68b8bc241c9f28c689a182271d8"><code>dd1778b</code></a> Bump version to 1.2.0+dev</li> <li>See full diff in <a href="https://github.com/python-trio/trustme/compare/v1.2.0...v1.2.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=trustme&package-manager=pip&previous-version=1.2.0&new-version=1.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f260d298f46..23ddeca1404 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -244,7 +244,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -trustme==1.2.0 ; platform_machine != "i686" +trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index fcfb8b87d8b..6288bd3c3fe 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -235,7 +235,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -trustme==1.2.0 ; platform_machine != "i686" +trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index b1aacb161f2..8fb469df46e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -89,7 +89,7 @@ tomli==2.2.1 # mypy # pytest # slotscheck -trustme==1.2.0 +trustme==1.2.1 # via -r requirements/lint.in typing-extensions==4.12.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 11fc4409769..f100eccd210 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -117,7 +117,7 @@ tomli==2.2.1 # coverage # mypy # pytest -trustme==1.2.0 ; platform_machine != "i686" +trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test.in typing-extensions==4.12.2 # via From bd9a636e8eb44e36679425abf27cd712a3aff88b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 Jan 2025 10:46:01 +0000 Subject: [PATCH 1135/1511] Bump virtualenv from 20.28.0 to 20.28.1 (#10292) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.28.0 to 20.28.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.28.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.28.0 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2807">pypa/virtualenv#2807</a></li> <li>Skip tcsh tests on broken tcsh versions by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2817">pypa/virtualenv#2817</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.28.0...20.28.1">https://github.com/pypa/virtualenv/compare/20.28.0...20.28.1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.28.1 (2025-01-02)</h2> <p>Bugfixes - 20.28.1</p> <pre><code>- Skip tcsh tests on broken tcsh versions - by :user:`gaborbernat`. (:issue:`2814`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/25f561510d9500d4fd6c19b58d9a79e97f8687f7"><code>25f5615</code></a> release 20.28.1</li> <li><a href="https://github.com/pypa/virtualenv/commit/eb8c7c3bc1ddde4e86d8da1f11e6df0dc615ddc8"><code>eb8c7c3</code></a> Skip tcsh tests on broken tcsh versions (<a href="https://redirect.github.com/pypa/virtualenv/issues/2817">#2817</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/baf2da4bad9d5f5d91ac130589024d0a40062d8b"><code>baf2da4</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2810">#2810</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/2f5569cd7796ab3f7080d5d27bf769fd466b2e0d"><code>2f5569c</code></a> Bump pypa/gh-action-pypi-publish from 1.12.2 to 1.12.3 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2813">#2813</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/9098223724f00e3eba47fb5fc35a0c880a0b6972"><code>9098223</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2807">#2807</a> from pypa/release-20.28.0</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.28.0...20.28.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.28.0&new-version=20.28.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 23ddeca1404..bd8f825e079 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -266,7 +266,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.28.0 +virtualenv==20.28.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 6288bd3c3fe..794aa9618c3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -257,7 +257,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.28.0 +virtualenv==20.28.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 8fb469df46e..47f9ee6388d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -102,5 +102,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.28.0 +virtualenv==20.28.1 # via pre-commit From 1146697f1849f3d853f06284159cb426e19898a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 11:17:25 +0000 Subject: [PATCH 1136/1511] Bump setuptools from 75.6.0 to 75.7.0 (#10297) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.6.0 to 75.7.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.7.0</h1> <h2>Features</h2> <ul> <li><code>pypa/distutils#310</code><a href="https://redirect.github.com/pypa/setuptools/issues/4478">#4478</a>)</li> <li>Synced with pypa/distutils@ff11eed0c including bugfix for duplicate CFLAGS and adaption to support Python 3.13 is_abs in the C compiler (<a href="https://redirect.github.com/pypa/setuptools/issues/4669">#4669</a>). (<a href="https://redirect.github.com/pypa/setuptools/issues/4790">#4790</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/4e82e8b75c8bd8cba1232a107dc171b4fd2c588c"><code>4e82e8b</code></a> Bump version: 75.6.0 → 75.7.0</li> <li><a href="https://github.com/pypa/setuptools/commit/50b15dbedc3a5665717b9204cd85291152f5b314"><code>50b15db</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4787">#4787</a> from Avasam/simplify-typed-assignements</li> <li><a href="https://github.com/pypa/setuptools/commit/86d8c992fc201fae91d135019e5abada183548e2"><code>86d8c99</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4790">#4790</a> from pypa/feature/distutils-ff11eed0c</li> <li><a href="https://github.com/pypa/setuptools/commit/c384f184d20e8232a1ce73f88d151b9808b66949"><code>c384f18</code></a> Py_GIL_Disabled was handled previously.</li> <li><a href="https://github.com/pypa/setuptools/commit/fbe834af40de70c181cd2ff5395f8e3488177874"><code>fbe834a</code></a> Merge branch 'main' into feature/distutils-ff11eed0c</li> <li><a href="https://github.com/pypa/setuptools/commit/5ed9d93e77aa3e2c70d8cea1bfeb15549932169f"><code>5ed9d93</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/e94ce0d2aa2d38b67e5c7ee84ca53fde9d4b5b28"><code>e94ce0d</code></a> Merge with pypa/distutils.</li> <li><a href="https://github.com/pypa/setuptools/commit/ff11eed0c36b35bd68615a8ebf36763b7c8a6f28"><code>ff11eed</code></a> Merge pull request <a href="https://redirect.github.com/pypa/distutils/issues/325">pypa/distutils#325</a> from thesamesam/cflags</li> <li><a href="https://github.com/pypa/setuptools/commit/ebfebc096000e16f9d640660047eaec891a98529"><code>ebfebc0</code></a> Merge pull request <a href="https://redirect.github.com/pypa/distutils/issues/302">pypa/distutils#302</a> from adang1345/fix-python313-abs</li> <li><a href="https://github.com/pypa/setuptools/commit/36ce8b329524088cfa53b9a4bffcce3a8d233539"><code>36ce8b3</code></a> Refactor for simplicity.</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v75.6.0...v75.7.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.6.0&new-version=75.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bd8f825e079..5b201089fbe 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -278,7 +278,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 # via pip-tools -setuptools==75.6.0 +setuptools==75.7.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 794aa9618c3..4ec810be26e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -269,7 +269,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 # via pip-tools -setuptools==75.6.0 +setuptools==75.7.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index d6793b46c9c..8b0d8032c7c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.6.0 +setuptools==75.7.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 625c45c3d66..7163dda8abd 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.6.0 +setuptools==75.7.0 # via incremental From bee424c97b1981a997feac414393d44bee83439b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 11:19:52 +0000 Subject: [PATCH 1137/1511] Bump pygments from 2.18.0 to 2.19.0 (#10298) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pygments](https://github.com/pygments/pygments) from 2.18.0 to 2.19.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pygments/pygments/releases">pygments's releases</a>.</em></p> <blockquote> <h2>2.19.0</h2> <ul> <li> <p>New lexers:</p> <ul> <li>CodeQL (<a href="https://redirect.github.com/pygments/pygments/issues/2819">#2819</a>)</li> <li>Debian Sources (<a href="https://redirect.github.com/pygments/pygments/issues/2788">#2788</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2747">#2747</a>)</li> <li>Gleam (<a href="https://redirect.github.com/pygments/pygments/issues/2662">#2662</a>)</li> <li>GoogleSQL (<a href="https://redirect.github.com/pygments/pygments/issues/2820">#2820</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2814">#2814</a>)</li> <li>JSON5 (<a href="https://redirect.github.com/pygments/pygments/issues/2734">#2734</a>, <a href="https://redirect.github.com/pygments/pygments/issues/1880">#1880</a>)</li> <li>Maple (<a href="https://redirect.github.com/pygments/pygments/issues/2763">#2763</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2548">#2548</a>)</li> <li>NumbaIR (<a href="https://redirect.github.com/pygments/pygments/issues/2433">#2433</a>)</li> <li>PDDL (<a href="https://redirect.github.com/pygments/pygments/issues/2799">#2799</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2616">#2616</a>)</li> <li>Rego (<a href="https://redirect.github.com/pygments/pygments/issues/2794">#2794</a>)</li> <li>TableGen (<a href="https://redirect.github.com/pygments/pygments/issues/2751">#2751</a>)</li> <li>Vue.js (<a href="https://redirect.github.com/pygments/pygments/issues/2832">#2832</a>)</li> </ul> </li> <li> <p>Updated lexers:</p> <ul> <li>BQN: Various improvements (<a href="https://redirect.github.com/pygments/pygments/issues/2789">#2789</a>)</li> <li>C#: Fix number highlighting (<a href="https://redirect.github.com/pygments/pygments/issues/986">#986</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2727">#2727</a>), add <code>file</code> keyword (<a href="https://redirect.github.com/pygments/pygments/issues/2726">#2726</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2805">#2805</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2806">#2806</a>), add various other keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2745">#2745</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2770">#2770</a>)</li> <li>CSS: Add <code>revert</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2766">#2766</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2775">#2775</a>)</li> <li>Debian control: Add <code>Change-By</code> field (<a href="https://redirect.github.com/pygments/pygments/issues/2757">#2757</a>)</li> <li>Elip: Improve punctuation handling (<a href="https://redirect.github.com/pygments/pygments/issues/2651">#2651</a>)</li> <li>Igor: Add <code>int</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2801">#2801</a>)</li> <li>Ini: Fix quoted strings with embedded comment characters (<a href="https://redirect.github.com/pygments/pygments/issues/2767">#2767</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2720">#2720</a>)</li> <li>Java: Support functions returning types containing a question mark (<a href="https://redirect.github.com/pygments/pygments/issues/2737">#2737</a>)</li> <li>JavaScript: Support private identiiers (<a href="https://redirect.github.com/pygments/pygments/issues/2729">#2729</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2671">#2671</a>)</li> <li>LLVM: Add <code>splat</code>, improve floating-point number parsing (<a href="https://redirect.github.com/pygments/pygments/issues/2755">#2755</a>)</li> <li>Lua: Improve variable detection, add built-in functions (<a href="https://redirect.github.com/pygments/pygments/issues/2829">#2829</a>)</li> <li>Macaulay2: Update to 1.24.11 (<a href="https://redirect.github.com/pygments/pygments/issues/2800">#2800</a>)</li> <li>PostgreSQL: Add more <code>EXPLAIN</code> keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2785">#2785</a>), handle <code>/</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2774">#2774</a>)</li> <li>S-Lexer: Fix keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2082">#2082</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2750">#2750</a>)</li> <li>TransactSQL: Fix single-line comments (<a href="https://redirect.github.com/pygments/pygments/issues/2717">#2717</a>)</li> <li>Turtle: Fix triple quoted strings (<a href="https://redirect.github.com/pygments/pygments/issues/2744">#2744</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2758">#2758</a>)</li> <li>Typst: Various improvements (<a href="https://redirect.github.com/pygments/pygments/issues/2724">#2724</a>)</li> <li>Various: Add <code>^</code> as an operator to Matlab, Octave and Scilab (<a href="https://redirect.github.com/pygments/pygments/issues/2798">#2798</a>)</li> <li>Vyper: Add <code>staticcall</code> and <code>extcall</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2719">#2719</a>)</li> </ul> </li> </ul> <ul> <li>Mark file extensions for <code>HTML/XML+Evoque</code> as aliases (<a href="https://redirect.github.com/pygments/pygments/issues/2743">#2743</a>)</li> <li>Add a color for <code>Operator.Word</code> to the <code>rrt</code> style (<a href="https://redirect.github.com/pygments/pygments/issues/2709">#2709</a>)</li> <li>Fix broken link in the documentation (<a href="https://redirect.github.com/pygments/pygments/issues/2803">#2803</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2804">#2804</a>)</li> <li>Drop executable bit where not needed (<a href="https://redirect.github.com/pygments/pygments/issues/2781">#2781</a>)</li> <li>Reduce Mojo priority relative to Python in ``analyze_text´` (<a href="https://redirect.github.com/pygments/pygments/issues/2771">#2771</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2772">#2772</a>)</li> <li>Fix documentation builds (<a href="https://redirect.github.com/pygments/pygments/issues/2712">#2712</a>)</li> <li>Match example file names to the lexer's name (<a href="https://redirect.github.com/pygments/pygments/issues/2713">#2713</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2715">#2715</a>)</li> <li>Ensure lexer metadata is present (<a href="https://redirect.github.com/pygments/pygments/issues/2714">#2714</a>)</li> <li>Search more directories on macOS for fonts (<a href="https://redirect.github.com/pygments/pygments/issues/2809">#2809</a>)</li> <li>Improve test robustness (<a href="https://redirect.github.com/pygments/pygments/issues/2812">#2812</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pygments/pygments/blob/master/CHANGES">pygments's changelog</a>.</em></p> <blockquote> <h2>Version 2.19.0</h2> <p>(released January 5th, 2025)</p> <ul> <li> <p>New lexers:</p> <ul> <li>CodeQL (<a href="https://redirect.github.com/pygments/pygments/issues/2819">#2819</a>)</li> <li>Debian Sources (<a href="https://redirect.github.com/pygments/pygments/issues/2788">#2788</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2747">#2747</a>)</li> <li>Gleam (<a href="https://redirect.github.com/pygments/pygments/issues/2662">#2662</a>)</li> <li>GoogleSQL (<a href="https://redirect.github.com/pygments/pygments/issues/2820">#2820</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2814">#2814</a>)</li> <li>JSON5 (<a href="https://redirect.github.com/pygments/pygments/issues/2734">#2734</a>, <a href="https://redirect.github.com/pygments/pygments/issues/1880">#1880</a>)</li> <li>Maple (<a href="https://redirect.github.com/pygments/pygments/issues/2763">#2763</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2548">#2548</a>)</li> <li>NumbaIR (<a href="https://redirect.github.com/pygments/pygments/issues/2433">#2433</a>)</li> <li>PDDL (<a href="https://redirect.github.com/pygments/pygments/issues/2799">#2799</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2616">#2616</a>)</li> <li>Rego (<a href="https://redirect.github.com/pygments/pygments/issues/2794">#2794</a>)</li> <li>TableGen (<a href="https://redirect.github.com/pygments/pygments/issues/2751">#2751</a>)</li> <li>Vue.js (<a href="https://redirect.github.com/pygments/pygments/issues/2832">#2832</a>)</li> </ul> </li> <li> <p>Updated lexers:</p> <ul> <li>BQN: Various improvements (<a href="https://redirect.github.com/pygments/pygments/issues/2789">#2789</a>)</li> <li>C#: Fix number highlighting (<a href="https://redirect.github.com/pygments/pygments/issues/986">#986</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2727">#2727</a>), add <code>file</code> keyword (<a href="https://redirect.github.com/pygments/pygments/issues/2726">#2726</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2805">#2805</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2806">#2806</a>), add various other keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2745">#2745</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2770">#2770</a>)</li> <li>CSS: Add <code>revert</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2766">#2766</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2775">#2775</a>)</li> <li>Debian control: Add <code>Change-By</code> field (<a href="https://redirect.github.com/pygments/pygments/issues/2757">#2757</a>)</li> <li>Elip: Improve punctuation handling (<a href="https://redirect.github.com/pygments/pygments/issues/2651">#2651</a>)</li> <li>Igor: Add <code>int</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2801">#2801</a>)</li> <li>Ini: Fix quoted strings with embedded comment characters (<a href="https://redirect.github.com/pygments/pygments/issues/2767">#2767</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2720">#2720</a>)</li> <li>Java: Support functions returning types containing a question mark (<a href="https://redirect.github.com/pygments/pygments/issues/2737">#2737</a>)</li> <li>JavaScript: Support private identiiers (<a href="https://redirect.github.com/pygments/pygments/issues/2729">#2729</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2671">#2671</a>)</li> <li>LLVM: Add <code>splat</code>, improve floating-point number parsing (<a href="https://redirect.github.com/pygments/pygments/issues/2755">#2755</a>)</li> <li>Lua: Improve variable detection, add built-in functions (<a href="https://redirect.github.com/pygments/pygments/issues/2829">#2829</a>)</li> <li>Macaulay2: Update to 1.24.11 (<a href="https://redirect.github.com/pygments/pygments/issues/2800">#2800</a>)</li> <li>PostgreSQL: Add more <code>EXPLAIN</code> keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2785">#2785</a>), handle <code>/</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2774">#2774</a>)</li> <li>S-Lexer: Fix keywords (<a href="https://redirect.github.com/pygments/pygments/issues/2082">#2082</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2750">#2750</a>)</li> <li>TransactSQL: Fix single-line comments (<a href="https://redirect.github.com/pygments/pygments/issues/2717">#2717</a>)</li> <li>Turtle: Fix triple quoted strings (<a href="https://redirect.github.com/pygments/pygments/issues/2744">#2744</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2758">#2758</a>)</li> <li>Typst: Various improvements (<a href="https://redirect.github.com/pygments/pygments/issues/2724">#2724</a>)</li> <li>Various: Add <code>^</code> as an operator to Matlab, Octave and Scilab (<a href="https://redirect.github.com/pygments/pygments/issues/2798">#2798</a>)</li> <li>Vyper: Add <code>staticcall</code> and <code>extcall</code> (<a href="https://redirect.github.com/pygments/pygments/issues/2719">#2719</a>)</li> </ul> </li> </ul> <ul> <li>Mark file extensions for <code>HTML/XML+Evoque</code> as aliases (<a href="https://redirect.github.com/pygments/pygments/issues/2743">#2743</a>)</li> <li>Add a color for <code>Operator.Word</code> to the <code>rrt</code> style (<a href="https://redirect.github.com/pygments/pygments/issues/2709">#2709</a>)</li> <li>Fix broken link in the documentation (<a href="https://redirect.github.com/pygments/pygments/issues/2803">#2803</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2804">#2804</a>)</li> <li>Drop executable bit where not needed (<a href="https://redirect.github.com/pygments/pygments/issues/2781">#2781</a>)</li> <li>Reduce Mojo priority relative to Python in ``analyze_text´` (<a href="https://redirect.github.com/pygments/pygments/issues/2771">#2771</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2772">#2772</a>)</li> <li>Fix documentation builds (<a href="https://redirect.github.com/pygments/pygments/issues/2712">#2712</a>)</li> <li>Match example file names to the lexer's name (<a href="https://redirect.github.com/pygments/pygments/issues/2713">#2713</a>, <a href="https://redirect.github.com/pygments/pygments/issues/2715">#2715</a>)</li> <li>Ensure lexer metadata is present (<a href="https://redirect.github.com/pygments/pygments/issues/2714">#2714</a>)</li> <li>Search more directories on macOS for fonts (<a href="https://redirect.github.com/pygments/pygments/issues/2809">#2809</a>)</li> <li>Improve test robustness (<a href="https://redirect.github.com/pygments/pygments/issues/2812">#2812</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pygments/pygments/commit/62dff316900a548aabf741d662b79edc39ed78b5"><code>62dff31</code></a> Prepare 2.19 release.</li> <li><a href="https://github.com/pygments/pygments/commit/0ee47fb292be8142f359c5ae94472a508a69ba6d"><code>0ee47fb</code></a> It's 2025.</li> <li><a href="https://github.com/pygments/pygments/commit/3f79c5d8d0c686e58a199aff479e42b3caeb7370"><code>3f79c5d</code></a> Check fixes.</li> <li><a href="https://github.com/pygments/pygments/commit/ed139881e3f4e39cd4db076c87e4b118d91ac705"><code>ed13988</code></a> Small fixes for Maple lexer.</li> <li><a href="https://github.com/pygments/pygments/commit/a522163c58feb0d15d44dddc123dcdd495bea572"><code>a522163</code></a> Merge pull request <a href="https://redirect.github.com/pygments/pygments/issues/2763">#2763</a> from Randl/master</li> <li><a href="https://github.com/pygments/pygments/commit/69216f7914b2fb5648e8ba9aabb953f22efb18d6"><code>69216f7</code></a> Small fixes for Numba IR.</li> <li><a href="https://github.com/pygments/pygments/commit/08b8a96e69490497959b91ade089f1fa80768b99"><code>08b8a96</code></a> Merge pull request <a href="https://redirect.github.com/pygments/pygments/issues/2433">#2433</a> from Matt711/numbair</li> <li><a href="https://github.com/pygments/pygments/commit/9ac75ea5a04cc6405c5055e87a419b2d8827af87"><code>9ac75ea</code></a> Small fixups for CodeQL.</li> <li><a href="https://github.com/pygments/pygments/commit/ef4f5e9a343a7d6e095d604e9843a29ac9f1ea3a"><code>ef4f5e9</code></a> Merge pull request <a href="https://redirect.github.com/pygments/pygments/issues/2819">#2819</a> from DarkaMaul/dm/add-codeql-lexer</li> <li><a href="https://github.com/pygments/pygments/commit/2dc2c3fc292f77dc9b71c4d59d489a82f4d43473"><code>2dc2c3f</code></a> Merge branch 'master' into dm/add-codeql-lexer</li> <li>Additional commits viewable in <a href="https://github.com/pygments/pygments/compare/2.18.0...2.19.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pygments&package-manager=pip&previous-version=2.18.0&new-version=2.19.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5b201089fbe..72ee160e7d6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ pydantic-core==2.27.2 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.18.0 +pygments==2.19.0 # via # rich # sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 4ec810be26e..54c0722ec1f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -143,7 +143,7 @@ pydantic==2.10.4 # via python-on-whales pydantic-core==2.27.2 # via pydantic -pygments==2.18.0 +pygments==2.19.0 # via # rich # sphinx diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 8b0d8032c7c..9d979436ded 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -34,7 +34,7 @@ packaging==24.2 # via sphinx pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.18.0 +pygments==2.19.0 # via sphinx requests==2.32.3 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index 7163dda8abd..281cd202d6e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -32,7 +32,7 @@ markupsafe==3.0.2 # via jinja2 packaging==24.2 # via sphinx -pygments==2.18.0 +pygments==2.19.0 # via sphinx requests==2.32.3 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 47f9ee6388d..0f9e999eae4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -61,7 +61,7 @@ pydantic==2.10.4 # via python-on-whales pydantic-core==2.27.2 # via pydantic -pygments==2.18.0 +pygments==2.19.0 # via rich pytest==8.3.4 # via diff --git a/requirements/test.txt b/requirements/test.txt index f100eccd210..4a187bbdc28 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -81,7 +81,7 @@ pydantic==2.10.4 # via python-on-whales pydantic-core==2.27.2 # via pydantic -pygments==2.18.0 +pygments==2.19.0 # via rich pytest==8.3.4 # via From d5dd9d049a47cfba92746973aa862b13a09668b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 11:26:36 +0000 Subject: [PATCH 1138/1511] Bump identify from 2.6.4 to 2.6.5 (#10299) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.4 to 2.6.5. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/1eadcb782d64c204597ec6b2142982a89ebda7ed"><code>1eadcb7</code></a> v2.6.5</li> <li><a href="https://github.com/pre-commit/identify/commit/2bb89b8d7e1e4971709db4261403c1c9605629de"><code>2bb89b8</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/498">#498</a> from MatejKafka/main</li> <li><a href="https://github.com/pre-commit/identify/commit/3fa1e74cedb42eeeaee6088578c78a412891f269"><code>3fa1e74</code></a> feat: identify .psm1 and .psd1 files as powershell</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.4...v2.6.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.4&new-version=2.6.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 72ee160e7d6..f5ef3c0b380 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.4 +identify==2.6.5 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 54c0722ec1f..922f5caed8b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -80,7 +80,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.4 +identify==2.6.5 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 0f9e999eae4..70360ce5c30 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -29,7 +29,7 @@ filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.4 +identify==2.6.5 # via pre-commit idna==3.7 # via trustme From 52e4ea6785579dee2e1b397a4ce7e844f88b8526 Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Tue, 7 Jan 2025 03:20:02 +0800 Subject: [PATCH 1139/1511] [PR #10300/3d06cc1][3.12] Use kwargs in aiohttp.client.request (#10302) (cherry picked from commit 3d06cc14ad82ce4c207449b593955c23bd5e4d88) --- CHANGES/10300.feature.rst | 2 + aiohttp/client.py | 178 ++++++++++++++------------------ docs/spelling_wordlist.txt | 2 + tests/test_client_functional.py | 16 +++ 4 files changed, 96 insertions(+), 102 deletions(-) create mode 100644 CHANGES/10300.feature.rst diff --git a/CHANGES/10300.feature.rst b/CHANGES/10300.feature.rst new file mode 100644 index 00000000000..3632c3d41a7 --- /dev/null +++ b/CHANGES/10300.feature.rst @@ -0,0 +1,2 @@ +Update :py:func:`~aiohttp.request` to make it accept ``_RequestOptions`` kwargs. +-- by :user:`Cycloctane`. diff --git a/aiohttp/client.py b/aiohttp/client.py index fbf691e89d1..2d5a9a4cdce 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -1469,106 +1469,80 @@ async def __aexit__( await self._session.close() -def request( - method: str, - url: StrOrURL, - *, - params: Query = None, - data: Any = None, - json: Any = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Optional[bool] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, object] = sentinel, - cookies: Optional[LooseCookies] = None, - version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - read_bufsize: Optional[int] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - max_line_size: int = 8190, - max_field_size: int = 8190, -) -> _SessionRequestContextManager: - """Constructs and sends a request. - - Returns response object. - method - HTTP method - url - request url - params - (optional) Dictionary or bytes to be sent in the query - string of the new request - data - (optional) Dictionary, bytes, or file-like object to - send in the body of the request - json - (optional) Any json compatible python object - headers - (optional) Dictionary of HTTP Headers to send with - the request - cookies - (optional) Dict object to send with the request - auth - (optional) BasicAuth named tuple represent HTTP Basic Auth - auth - aiohttp.helpers.BasicAuth - allow_redirects - (optional) If set to False, do not follow - redirects - version - Request HTTP version. - compress - Set to True if request has to be compressed - with deflate encoding. - chunked - Set to chunk size for chunked transfer encoding. - expect100 - Expect 100-continue response from server. - connector - BaseConnector sub-class instance to support - connection pooling. - read_until_eof - Read response until eof if response - does not have Content-Length header. - loop - Optional event loop. - timeout - Optional ClientTimeout settings structure, 5min - total timeout by default. - Usage:: - >>> import aiohttp - >>> resp = await aiohttp.request('GET', 'http://python.org/') - >>> resp - <ClientResponse(python.org/) [200]> - >>> data = await resp.read() - """ - connector_owner = False - if connector is None: - connector_owner = True - connector = TCPConnector(loop=loop, force_close=True) - - session = ClientSession( - loop=loop, - cookies=cookies, - version=version, - timeout=timeout, - connector=connector, - connector_owner=connector_owner, - ) +if sys.version_info >= (3, 11) and TYPE_CHECKING: - return _SessionRequestContextManager( - session._request( - method, - url, - params=params, - data=data, - json=json, - headers=headers, - skip_auto_headers=skip_auto_headers, - auth=auth, - allow_redirects=allow_redirects, - max_redirects=max_redirects, - compress=compress, - chunked=chunked, - expect100=expect100, - raise_for_status=raise_for_status, - read_until_eof=read_until_eof, - proxy=proxy, - proxy_auth=proxy_auth, - read_bufsize=read_bufsize, - max_line_size=max_line_size, - max_field_size=max_field_size, - ), - session, - ) + def request( + method: str, + url: StrOrURL, + *, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Unpack[_RequestOptions], + ) -> _SessionRequestContextManager: ... + +else: + + def request( + method: str, + url: StrOrURL, + *, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> async with aiohttp.request('GET', 'http://python.org/') as resp: + ... print(resp) + ... data = await resp.read() + <ClientResponse(https://www.python.org/) [200 OK]> + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=kwargs.pop("cookies", None), + version=version, + timeout=kwargs.pop("timeout", sentinel), + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request(method, url, **kwargs), + session, + ) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c4e10b44987..3e41af824e4 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -13,6 +13,7 @@ app app’s apps arg +args Arsenic async asyncio @@ -169,6 +170,7 @@ keepaliving kib KiB kwarg +kwargs latin lifecycle linux diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 05af9ae25ad..ba75e8e93c6 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3382,6 +3382,22 @@ async def handler(request: web.Request) -> web.Response: await server.close() +async def test_aiohttp_request_ssl( + aiohttp_server: AiohttpServer, + ssl_ctx: ssl.SSLContext, + client_ssl_ctx: ssl.SSLContext, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, ssl=ssl_ctx) + + async with aiohttp.request("GET", server.make_url("/"), ssl=client_ssl_ctx) as resp: + assert resp.status == 200 + + async def test_yield_from_in_session_request(aiohttp_client: AiohttpClient) -> None: # a test for backward compatibility with yield from syntax async def handler(request): From 3d3a088447434bb9b048d5e74d6da07ef416da09 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 7 Jan 2025 11:10:40 +0000 Subject: [PATCH 1140/1511] [PR #10301/77f25a0a backport][3.12] Update docs for aiohttp.request (#10308) **This is a backport of PR #10301 as merged into master (77f25a0a4da3cc20f4c5b6d12be9d273ea02f1df).** Co-authored-by: Cycloctane <Cycloctane@outlook.com> --- docs/client_reference.rst | 125 +++++++++++++++++++++++++++++--------- 1 file changed, 97 insertions(+), 28 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index e18785ea796..013c43a13e4 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -517,7 +517,7 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 :param str server_hostname: Sets or overrides the host name that the - target server’s certificate will be matched against. + target server's certificate will be matched against. See :py:meth:`asyncio.loop.create_connection` for more information. @@ -854,14 +854,21 @@ certification chaining. .. function:: request(method, url, *, params=None, data=None, \ json=None,\ - headers=None, cookies=None, auth=None, \ + cookies=None, headers=None, skip_auto_headers=None, auth=None, \ allow_redirects=True, max_redirects=10, \ - encoding='utf-8', \ - version=HttpVersion(major=1, minor=1), \ - compress=None, chunked=None, expect100=False, raise_for_status=False, \ + compress=False, chunked=None, expect100=False, raise_for_status=None, \ + read_until_eof=True, \ + proxy=None, proxy_auth=None, \ + timeout=sentinel, ssl=True, \ + server_hostname=None, \ + proxy_headers=None, \ + trace_request_ctx=None, \ read_bufsize=None, \ - connector=None, loop=None,\ - read_until_eof=True, timeout=sentinel) + auto_decompress=None, \ + max_line_size=None, \ + max_field_size=None, \ + version=aiohttp.HttpVersion11, \ + connector=None) :async: Asynchronous context manager for performing an asynchronous HTTP @@ -874,8 +881,20 @@ certification chaining. be encoded with :class:`~yarl.URL` (see :class:`~yarl.URL` to skip encoding). - :param dict params: Parameters to be sent in the query - string of the new request (optional) + :param params: Mapping, iterable of tuple of *key*/*value* pairs or + string to be sent as parameters in the query + string of the new request. Ignored for subsequent + redirected requests (optional) + + Allowed values are: + + - :class:`collections.abc.Mapping` e.g. :class:`dict`, + :class:`multidict.MultiDict` or + :class:`multidict.MultiDictProxy` + - :class:`collections.abc.Iterable` e.g. :class:`tuple` or + :class:`list` + - :class:`str` with preferably url-encoded content + (**Warning:** content will not be encoded by *aiohttp*) :param data: The data to send in the body of the request. This can be a :class:`FormData` object or anything that can be passed into @@ -885,28 +904,46 @@ certification chaining. :param json: Any json compatible python object (optional). *json* and *data* parameters could not be used at the same time. + :param dict cookies: HTTP Cookies to send with the request (optional) + :param dict headers: HTTP Headers to send with the request (optional) - :param dict cookies: Cookies to send with the request (optional) + :param skip_auto_headers: set of headers for which autogeneration + should be skipped. + + *aiohttp* autogenerates headers like ``User-Agent`` or + ``Content-Type`` if these headers are not explicitly + passed. Using ``skip_auto_headers`` parameter allows to skip + that generation. + + Iterable of :class:`str` or :class:`~multidict.istr` + (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param bool allow_redirects: Whether to process redirects or not. - When ``True``, redirects are followed (up to ``max_redirects`` times) - and logged into :attr:`ClientResponse.history` and ``trace_configs``. - When ``False``, the original response is returned. - ``True`` by default (optional). + When ``True``, redirects are followed (up to ``max_redirects`` times) + and logged into :attr:`ClientResponse.history` and ``trace_configs``. + When ``False``, the original response is returned. + ``True`` by default (optional). - :param aiohttp.protocol.HttpVersion version: Request HTTP version (optional) + :param int max_redirects: Maximum number of redirects to follow. + :exc:`TooManyRedirects` is raised if the number is exceeded. + Ignored when ``allow_redirects=False``. + ``10`` by default. :param bool compress: Set to ``True`` if request has to be compressed - with deflate encoding. - ``False`` instructs aiohttp to not compress data. + with deflate encoding. If `compress` can not be combined + with a *Content-Encoding* and *Content-Length* headers. ``None`` by default (optional). :param int chunked: Enables chunked transfer encoding. - ``None`` by default (optional). + It is up to the developer + to decide how to chunk data streams. If chunking is enabled, aiohttp + encodes the provided chunks in the "Transfer-encoding: chunked" format. + If *chunked* is set, then the *Transfer-encoding* and *content-length* + headers are disallowed. ``None`` by default (optional). :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). @@ -920,28 +957,60 @@ certification chaining. .. versionadded:: 3.4 - :param aiohttp.BaseConnector connector: BaseConnector sub-class - instance to support connection pooling. - :param bool read_until_eof: Read response until EOF if response does not have Content-Length header. ``True`` by default (optional). + :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) + + :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP + Basic Authorization (optional) + + :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) + total timeout, 30 seconds socket connect timeout by default. + + :param ssl: SSL validation mode. ``True`` for default SSL check + (:func:`ssl.create_default_context` is used), + ``False`` for skip SSL certificate validation, + :class:`aiohttp.Fingerprint` for fingerprint + validation, :class:`ssl.SSLContext` for custom SSL + certificate validation. + + Supersedes *verify_ssl*, *ssl_context* and + *fingerprint* parameters. + + :param str server_hostname: Sets or overrides the host name that the + target server's certificate will be matched against. + + See :py:meth:`asyncio.loop.create_connection` + for more information. + + :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy + if the parameter proxy has been provided. + + :param trace_request_ctx: Object used to give as a kw param for each new + :class:`TraceConfig` object instantiated, + used to give information to the + tracers that is only available at request time. + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). ``None`` by default, it means that the session global value is used. .. versionadded:: 3.7 - :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout, 30 seconds socket connect timeout by default. + :param bool auto_decompress: Automatically decompress response body. + May be used to enable/disable auto decompression on a per-request basis. - :param loop: :ref:`event loop<asyncio-event-loop>` - used for processing HTTP requests. - If param is ``None``, :func:`asyncio.get_event_loop` - is used for getting default event loop. + :param int max_line_size: Maximum allowed size of lines in responses. - .. deprecated:: 2.0 + :param int max_field_size: Maximum allowed size of header fields in responses. + + :param aiohttp.protocol.HttpVersion version: Request HTTP version, + ``HTTP 1.1`` by default. (optional) + + :param aiohttp.BaseConnector connector: BaseConnector sub-class + instance to support connection pooling. (optional) :return ClientResponse: a :class:`client response <ClientResponse>` object. From c42472fac83026562d9f41183918f214528ebce2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 7 Jan 2025 11:11:02 +0000 Subject: [PATCH 1141/1511] =?UTF-8?q?[PR=20#10304/b6ffb1d1=20backport][3.1?= =?UTF-8?q?2]=20Add=20aiohttp-openmetrics=20to=20list=20of=20third=20party?= =?UTF-8?q?=20aiohttp-related=20python=E2=80=A6=20(#10306)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10304 as merged into master (b6ffb1d1a3b710c600f06c9d21cf62f75d324767).** Co-authored-by: Jelmer Vernooij <jelmer@jelmer.uk> --- CHANGES/10304.doc.rst | 1 + docs/third_party.rst | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/10304.doc.rst diff --git a/CHANGES/10304.doc.rst b/CHANGES/10304.doc.rst new file mode 100644 index 00000000000..cedac3ef881 --- /dev/null +++ b/CHANGES/10304.doc.rst @@ -0,0 +1 @@ +Added ``aiohttp-openmetrics`` to list of third-party libraries -- by :user:`jelmer`. diff --git a/docs/third_party.rst b/docs/third_party.rst index e8095c7f09d..145a505a5de 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -305,3 +305,6 @@ ask to raise the status. - `aiohttp-asgi-connector <https://github.com/thearchitector/aiohttp-asgi-connector>`_ An aiohttp connector for using a ``ClientSession`` to interface directly with separate ASGI applications. + +- `aiohttp-openmetrics <https://github.com/jelmer/aiohttp-openmetrics>`_ + An aiohttp middleware for exposing Prometheus metrics. From 64730512593d58b389f8e203f7966da72fe13a92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Jan 2025 11:26:08 +0000 Subject: [PATCH 1142/1511] Bump pygments from 2.19.0 to 2.19.1 (#10310) Bumps [pygments](https://github.com/pygments/pygments) from 2.19.0 to 2.19.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pygments/pygments/releases">pygments's releases</a>.</em></p> <blockquote> <h2>2.19.1</h2> <ul> <li> <p>Updated lexers:</p> <ul> <li>Ini: Fix quoted string regression introduced in 2.19.0</li> <li>Lua: Fix a regression introduced in 2.19.0</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pygments/pygments/blob/master/CHANGES">pygments's changelog</a>.</em></p> <blockquote> <h2>Version 2.19.1</h2> <p>(released January 6th, 2025)</p> <ul> <li> <p>Updated lexers:</p> <ul> <li>Ini: Fix quoted string regression introduced in 2.19.0</li> <li>Lua: Fix a regression introduced in 2.19.0</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pygments/pygments/commit/b583de4794e94b4dc4c2da03a7c29f462482293e"><code>b583de4</code></a> Prepare 2.19.1 release.</li> <li><a href="https://github.com/pygments/pygments/commit/c13f3f11b8594decd01a8867e17a1a078ba2defd"><code>c13f3f1</code></a> Prepare 2.19.1 release.</li> <li><a href="https://github.com/pygments/pygments/commit/cdbcd43227ea838909abada93533013a99b564c7"><code>cdbcd43</code></a> Add regression test for .ini issue, update CHANGES.</li> <li><a href="https://github.com/pygments/pygments/commit/5792a21f452cd85c40c5a45bd6d69abb4d3a6d3e"><code>5792a21</code></a> Update CHANGES.</li> <li><a href="https://github.com/pygments/pygments/commit/a9858663ed85219ed7475f5877b22b9cb49f660f"><code>a985866</code></a> Merge pull request <a href="https://redirect.github.com/pygments/pygments/issues/2835">#2835</a> from kartben/fix_ini_double_quotes</li> <li><a href="https://github.com/pygments/pygments/commit/5822fd1fda4ded57651b6e95664da2c37bd0ed58"><code>5822fd1</code></a> Fix Lua regressions.</li> <li><a href="https://github.com/pygments/pygments/commit/673d8243c1c66363c83ae5467b4cecf13b506f3c"><code>673d824</code></a> Fix quoted string handling</li> <li><a href="https://github.com/pygments/pygments/commit/43bf86fb86f0a3a4bacedc65eace650947ecee51"><code>43bf86f</code></a> Improve example regex in the docs.</li> <li><a href="https://github.com/pygments/pygments/commit/452ac621811e946c8276c298dc318343d63371e4"><code>452ac62</code></a> Move on past 2.19.0.</li> <li>See full diff in <a href="https://github.com/pygments/pygments/compare/2.19.0...2.19.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pygments&package-manager=pip&previous-version=2.19.0&new-version=2.19.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f5ef3c0b380..c762661470a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ pydantic-core==2.27.2 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.19.0 +pygments==2.19.1 # via # rich # sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 922f5caed8b..75cf289cad1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -143,7 +143,7 @@ pydantic==2.10.4 # via python-on-whales pydantic-core==2.27.2 # via pydantic -pygments==2.19.0 +pygments==2.19.1 # via # rich # sphinx diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9d979436ded..01941d6a212 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -34,7 +34,7 @@ packaging==24.2 # via sphinx pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.19.0 +pygments==2.19.1 # via sphinx requests==2.32.3 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index 281cd202d6e..918dee7658d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -32,7 +32,7 @@ markupsafe==3.0.2 # via jinja2 packaging==24.2 # via sphinx -pygments==2.19.0 +pygments==2.19.1 # via sphinx requests==2.32.3 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 70360ce5c30..affc479af2c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -61,7 +61,7 @@ pydantic==2.10.4 # via python-on-whales pydantic-core==2.27.2 # via pydantic -pygments==2.19.0 +pygments==2.19.1 # via rich pytest==8.3.4 # via diff --git a/requirements/test.txt b/requirements/test.txt index 4a187bbdc28..d4c088096ce 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -81,7 +81,7 @@ pydantic==2.10.4 # via python-on-whales pydantic-core==2.27.2 # via pydantic -pygments==2.19.0 +pygments==2.19.1 # via rich pytest==8.3.4 # via From 8bd21e22815e9d349731a5458e8f13c317bfe852 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2025 10:48:24 +0000 Subject: [PATCH 1143/1511] Bump pytest-codspeed from 3.1.0 to 3.1.1 (#10312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) from 3.1.0 to 3.1.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/releases">pytest-codspeed's releases</a>.</em></p> <blockquote> <h2>v3.1.1</h2> <h2>What's Changed</h2> <h3><!-- raw HTML omitted -->⚙️ Internals</h3> <ul> <li>Add a py3-none-any fallback wheel by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/61">CodSpeedHQ/pytest-codspeed#61</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.0...v3.1.1">https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.0...v3.1.1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/blob/master/CHANGELOG.md">pytest-codspeed's changelog</a>.</em></p> <blockquote> <h2>[3.1.1] - 2025-01-07</h2> <h3><!-- raw HTML omitted -->⚙️ Internals</h3> <ul> <li>Fix tag num with bumpver by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/61">#61</a></li> <li>Update uv lock before release by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Add a py3-none-any fallback wheel by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/11de031ee1defbe67cdba634d0170489e99617cb"><code>11de031</code></a> Release v3.1.1 🚀</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/9294a19070fca9bccfa187643a03b94bc1abe654"><code>9294a19</code></a> chore: fix tag num with bumpver</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/fa3d6e8be34092c0713141f091a9e02f0dbd4671"><code>fa3d6e8</code></a> chore: update uv lock before release</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/64900a815c61176123f07d9dab971502a449675b"><code>64900a8</code></a> ci: add a py3-none-any fallback wheel</li> <li>See full diff in <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.0...v3.1.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-codspeed&package-manager=pip&previous-version=3.1.0&new-version=3.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c762661470a..b9506be1126 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -168,7 +168,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.0 +pytest-codspeed==3.1.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 75cf289cad1..951be055777 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -163,7 +163,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.0 +pytest-codspeed==3.1.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index affc479af2c..e1a78ab3f46 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -68,7 +68,7 @@ pytest==8.3.4 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==3.1.0 +pytest-codspeed==3.1.1 # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index d4c088096ce..da5b81f617b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -90,7 +90,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.0 +pytest-codspeed==3.1.1 # via -r requirements/test.in pytest-cov==6.0.0 # via -r requirements/test.in From 49be3829dd74e7a62ece705315813d62877c176f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Jan 2025 11:16:07 +0000 Subject: [PATCH 1144/1511] Bump pytest-codspeed from 3.1.1 to 3.1.2 (#10318) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) from 3.1.1 to 3.1.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/releases">pytest-codspeed's releases</a>.</em></p> <blockquote> <h2>v3.1.2</h2> <h2>What's Changed</h2> <h3><!-- raw HTML omitted -->🐛 Bug Fixes</h3> <ul> <li>fix: update package_data to include header and source files for valgrind wrapper by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/64">CodSpeedHQ/pytest-codspeed#64</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.1...v3.1.2">https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.1...v3.1.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/blob/master/CHANGELOG.md">pytest-codspeed's changelog</a>.</em></p> <blockquote> <h2>[3.1.2] - 2025-01-09</h2> <h3><!-- raw HTML omitted -->🐛 Bug Fixes</h3> <ul> <li>Update package_data to include header and source files for valgrind wrapper by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/64">#64</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/da9270dbed54be9ea55d601e774e855ce0978480"><code>da9270d</code></a> Release v3.1.2 🚀</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/2cdee6921205377611ca85de0a9f74e341d3978e"><code>2cdee69</code></a> fix: update package_data to include header and source files for valgrind wrapper</li> <li>See full diff in <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.1...v3.1.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-codspeed&package-manager=pip&previous-version=3.1.1&new-version=3.1.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b9506be1126..4664c6a392a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -168,7 +168,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.1 +pytest-codspeed==3.1.2 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 951be055777..def1d779cf4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -163,7 +163,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.1 +pytest-codspeed==3.1.2 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index e1a78ab3f46..4652d40247c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -68,7 +68,7 @@ pytest==8.3.4 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==3.1.1 +pytest-codspeed==3.1.2 # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index da5b81f617b..00beee69f1e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -90,7 +90,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.1 +pytest-codspeed==3.1.2 # via -r requirements/test.in pytest-cov==6.0.0 # via -r requirements/test.in From 6ef83f516c8a13918a5f673ec171cb5bfba04319 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2025 03:28:25 +0100 Subject: [PATCH 1145/1511] [PR #10301/77f25a0a backport][3.11] Update docs for aiohttp.request (#10307) **This is a backport of PR #10301 as merged into master (77f25a0a4da3cc20f4c5b6d12be9d273ea02f1df).** ## What do these changes do? Noticed that the content of `aiohttp.request` reference in documentation needs to be changed after #10300 . This pr update docs for that. ## Are there changes in behavior for the user? ## Is it a substantial burden for the maintainers to support this? ## Related issue number ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [x] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder Co-authored-by: Cycloctane <Cycloctane@outlook.com> --- docs/client_reference.rst | 125 +++++++++++++++++++++++++++++--------- 1 file changed, 97 insertions(+), 28 deletions(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 19e221a27ca..26537161971 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -513,7 +513,7 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 :param str server_hostname: Sets or overrides the host name that the - target server’s certificate will be matched against. + target server's certificate will be matched against. See :py:meth:`asyncio.loop.create_connection` for more information. @@ -850,14 +850,21 @@ certification chaining. .. function:: request(method, url, *, params=None, data=None, \ json=None,\ - headers=None, cookies=None, auth=None, \ + cookies=None, headers=None, skip_auto_headers=None, auth=None, \ allow_redirects=True, max_redirects=10, \ - encoding='utf-8', \ - version=HttpVersion(major=1, minor=1), \ - compress=None, chunked=None, expect100=False, raise_for_status=False, \ + compress=False, chunked=None, expect100=False, raise_for_status=None, \ + read_until_eof=True, \ + proxy=None, proxy_auth=None, \ + timeout=sentinel, ssl=True, \ + server_hostname=None, \ + proxy_headers=None, \ + trace_request_ctx=None, \ read_bufsize=None, \ - connector=None, loop=None,\ - read_until_eof=True, timeout=sentinel) + auto_decompress=None, \ + max_line_size=None, \ + max_field_size=None, \ + version=aiohttp.HttpVersion11, \ + connector=None) :async: Asynchronous context manager for performing an asynchronous HTTP @@ -870,8 +877,20 @@ certification chaining. be encoded with :class:`~yarl.URL` (see :class:`~yarl.URL` to skip encoding). - :param dict params: Parameters to be sent in the query - string of the new request (optional) + :param params: Mapping, iterable of tuple of *key*/*value* pairs or + string to be sent as parameters in the query + string of the new request. Ignored for subsequent + redirected requests (optional) + + Allowed values are: + + - :class:`collections.abc.Mapping` e.g. :class:`dict`, + :class:`multidict.MultiDict` or + :class:`multidict.MultiDictProxy` + - :class:`collections.abc.Iterable` e.g. :class:`tuple` or + :class:`list` + - :class:`str` with preferably url-encoded content + (**Warning:** content will not be encoded by *aiohttp*) :param data: The data to send in the body of the request. This can be a :class:`FormData` object or anything that can be passed into @@ -881,28 +900,46 @@ certification chaining. :param json: Any json compatible python object (optional). *json* and *data* parameters could not be used at the same time. + :param dict cookies: HTTP Cookies to send with the request (optional) + :param dict headers: HTTP Headers to send with the request (optional) - :param dict cookies: Cookies to send with the request (optional) + :param skip_auto_headers: set of headers for which autogeneration + should be skipped. + + *aiohttp* autogenerates headers like ``User-Agent`` or + ``Content-Type`` if these headers are not explicitly + passed. Using ``skip_auto_headers`` parameter allows to skip + that generation. + + Iterable of :class:`str` or :class:`~multidict.istr` + (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param bool allow_redirects: Whether to process redirects or not. - When ``True``, redirects are followed (up to ``max_redirects`` times) - and logged into :attr:`ClientResponse.history` and ``trace_configs``. - When ``False``, the original response is returned. - ``True`` by default (optional). + When ``True``, redirects are followed (up to ``max_redirects`` times) + and logged into :attr:`ClientResponse.history` and ``trace_configs``. + When ``False``, the original response is returned. + ``True`` by default (optional). - :param aiohttp.protocol.HttpVersion version: Request HTTP version (optional) + :param int max_redirects: Maximum number of redirects to follow. + :exc:`TooManyRedirects` is raised if the number is exceeded. + Ignored when ``allow_redirects=False``. + ``10`` by default. :param bool compress: Set to ``True`` if request has to be compressed - with deflate encoding. - ``False`` instructs aiohttp to not compress data. + with deflate encoding. If `compress` can not be combined + with a *Content-Encoding* and *Content-Length* headers. ``None`` by default (optional). :param int chunked: Enables chunked transfer encoding. - ``None`` by default (optional). + It is up to the developer + to decide how to chunk data streams. If chunking is enabled, aiohttp + encodes the provided chunks in the "Transfer-encoding: chunked" format. + If *chunked* is set, then the *Transfer-encoding* and *content-length* + headers are disallowed. ``None`` by default (optional). :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). @@ -916,28 +953,60 @@ certification chaining. .. versionadded:: 3.4 - :param aiohttp.BaseConnector connector: BaseConnector sub-class - instance to support connection pooling. - :param bool read_until_eof: Read response until EOF if response does not have Content-Length header. ``True`` by default (optional). + :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) + + :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP + Basic Authorization (optional) + + :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) + total timeout, 30 seconds socket connect timeout by default. + + :param ssl: SSL validation mode. ``True`` for default SSL check + (:func:`ssl.create_default_context` is used), + ``False`` for skip SSL certificate validation, + :class:`aiohttp.Fingerprint` for fingerprint + validation, :class:`ssl.SSLContext` for custom SSL + certificate validation. + + Supersedes *verify_ssl*, *ssl_context* and + *fingerprint* parameters. + + :param str server_hostname: Sets or overrides the host name that the + target server's certificate will be matched against. + + See :py:meth:`asyncio.loop.create_connection` + for more information. + + :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy + if the parameter proxy has been provided. + + :param trace_request_ctx: Object used to give as a kw param for each new + :class:`TraceConfig` object instantiated, + used to give information to the + tracers that is only available at request time. + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). ``None`` by default, it means that the session global value is used. .. versionadded:: 3.7 - :param timeout: a :class:`ClientTimeout` settings structure, 300 seconds (5min) - total timeout, 30 seconds socket connect timeout by default. + :param bool auto_decompress: Automatically decompress response body. + May be used to enable/disable auto decompression on a per-request basis. - :param loop: :ref:`event loop<asyncio-event-loop>` - used for processing HTTP requests. - If param is ``None``, :func:`asyncio.get_event_loop` - is used for getting default event loop. + :param int max_line_size: Maximum allowed size of lines in responses. - .. deprecated:: 2.0 + :param int max_field_size: Maximum allowed size of header fields in responses. + + :param aiohttp.protocol.HttpVersion version: Request HTTP version, + ``HTTP 1.1`` by default. (optional) + + :param aiohttp.BaseConnector connector: BaseConnector sub-class + instance to support connection pooling. (optional) :return ClientResponse: a :class:`client response <ClientResponse>` object. From 5f34c3bdd83b25b8b72d1b6b9a25546e9ee596f7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 11:02:35 +0000 Subject: [PATCH 1146/1511] Bump virtualenv from 20.28.1 to 20.29.0 (#10328) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.28.1 to 20.29.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.29.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.28.1 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2818">pypa/virtualenv#2818</a></li> <li>Makes --python command-line flag take precedence over env var by <a href="https://github.com/filiplajszczak"><code>@​filiplajszczak</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2821">pypa/virtualenv#2821</a></li> <li>Add free-threaded Python support by <a href="https://github.com/robsdedude"><code>@​robsdedude</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2809">pypa/virtualenv#2809</a></li> <li>Upgrade embeded setuptools by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2823">pypa/virtualenv#2823</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/filiplajszczak"><code>@​filiplajszczak</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2821">pypa/virtualenv#2821</a></li> <li><a href="https://github.com/robsdedude"><code>@​robsdedude</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2809">pypa/virtualenv#2809</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.28.1...20.29.0">https://github.com/pypa/virtualenv/compare/20.28.1...20.29.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.29.0 (2025-01-15)</h2> <p>Features - 20.29.0</p> <pre><code>- Add support for selecting free-threaded Python interpreters, e.g., `python3.13t`. (:issue:`2809`) <p>Bugfixes - 20.29.0 </code></pre></p> <ul> <li> <p>Upgrade embedded wheels:</p> <ul> <li>setuptools to <code>75.8.0</code> from <code>75.6.0</code> (:issue:<code>2823</code>)</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/0f6dc41fe3bb598dc485e045dcfe94dfc39e5f99"><code>0f6dc41</code></a> release 20.29.0</li> <li><a href="https://github.com/pypa/virtualenv/commit/ea5e5cbbd994639fc2935cd29b43a1717f9b8cef"><code>ea5e5cb</code></a> Upgrade embeded setuptools (<a href="https://redirect.github.com/pypa/virtualenv/issues/2823">#2823</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/b00d59c9308fc0feaf3ea8e62bb2b6223c48c01e"><code>b00d59c</code></a> Add free-threaded Python support (<a href="https://redirect.github.com/pypa/virtualenv/issues/2809">#2809</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/bc7a91a5ad0c3d2db52d0976f08c791604cba9c7"><code>bc7a91a</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2821">#2821</a> from filiplajszczak/cli-precedence-2285</li> <li><a href="https://github.com/pypa/virtualenv/commit/11995e7348c1813dd210bde7b4d7d277dce5bfbe"><code>11995e7</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2819">#2819</a> from pypa/pre-commit-ci-update-config</li> <li><a href="https://github.com/pypa/virtualenv/commit/dc016868118749f097f9647c02d6657919a4e34a"><code>dc01686</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pypa/virtualenv/commit/7f450c3e1d9f814ddbe0692363f3916eb6ef96d8"><code>7f450c3</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2818">#2818</a> from pypa/release-20.28.1</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.28.1...20.29.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.28.1&new-version=20.29.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4664c6a392a..bcc0597c34c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -266,7 +266,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.28.1 +virtualenv==20.29.0 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index def1d779cf4..ae4a5e91e29 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -257,7 +257,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.28.1 +virtualenv==20.29.0 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 4652d40247c..8e330c63da2 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -102,5 +102,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.28.1 +virtualenv==20.29.0 # via pre-commit From de81d3b8224d0184e5d3f274bebf7f01eda1ee2b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 19:11:05 +0000 Subject: [PATCH 1147/1511] [PR #10339/089e7de2 backport][3.12] Add sphinx configuration for readthedocs (#10341) Co-authored-by: J. Nick Koston <nick@koston.org> --- .readthedocs.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index b3edaf4b8ea..b7d8a9236f6 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,6 +5,10 @@ --- version: 2 +sphinx: + # Path to your Sphinx configuration file. + configuration: docs/conf.py + submodules: include: all exclude: [] From b088a2cbca34da38079b8088764b9d1ea9a6a514 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 10:04:43 -1000 Subject: [PATCH 1148/1511] [PR #10342/755299d0 backport][3.11] Increase allowed import time for Python 3.12+ (#10343) --- tests/test_imports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index 5a2bb76b03c..b3f545ad900 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -38,7 +38,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: # and even slower under pytest-xdist, especially in CI _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) if _IS_XDIST_RUN - else 265 + else 295 ), } _TARGET_TIMINGS_BY_PYTHON_VERSION["3.13"] = _TARGET_TIMINGS_BY_PYTHON_VERSION["3.12"] From 016cbaee87a90f00c9a3689457037ca98b4e2ece Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 20:19:45 +0000 Subject: [PATCH 1149/1511] [PR #10342/755299d0 backport][3.12] Increase allowed import time for Python 3.12+ (#10344) --- tests/test_imports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index 5a2bb76b03c..b3f545ad900 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -38,7 +38,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: # and even slower under pytest-xdist, especially in CI _XDIST_WORKER_COUNT * 100 * (1 if _IS_CI_ENV else 1.53) if _IS_XDIST_RUN - else 265 + else 295 ), } _TARGET_TIMINGS_BY_PYTHON_VERSION["3.13"] = _TARGET_TIMINGS_BY_PYTHON_VERSION["3.12"] From 6c3f6f0dcc12937c793d5fa252e2dadffa036338 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 20:29:07 +0000 Subject: [PATCH 1150/1511] [PR #10339/089e7de2 backport][3.11] Add sphinx configuration for readthedocs (#10340) --- .readthedocs.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index b3edaf4b8ea..b7d8a9236f6 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,6 +5,10 @@ --- version: 2 +sphinx: + # Path to your Sphinx configuration file. + configuration: docs/conf.py + submodules: include: all exclude: [] From 5a62ec94a1df7b4fa21fb98892bdd70e0668ff6e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 20:36:28 +0000 Subject: [PATCH 1151/1511] [PR #10332/a0e64bb4 backport][3.12] Log the remote that generates request errors (#10338) --- CHANGES/10332.feature.rst | 1 + aiohttp/web_protocol.py | 8 ++++++-- tests/test_web_server.py | 36 +++++++++++++++++++++++++++--------- 3 files changed, 34 insertions(+), 11 deletions(-) create mode 100644 CHANGES/10332.feature.rst diff --git a/CHANGES/10332.feature.rst b/CHANGES/10332.feature.rst new file mode 100644 index 00000000000..e5c84adf50d --- /dev/null +++ b/CHANGES/10332.feature.rst @@ -0,0 +1 @@ +Improved logging of HTTP protocol errors to include the remote address -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 3306b86bded..32f503474a9 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -694,9 +694,13 @@ def handle_error( # or encrypted traffic to an HTTP port. This is expected # to happen when connected to the public internet so we log # it at the debug level as to not fill logs with noise. - self.logger.debug("Error handling request", exc_info=exc) + self.logger.debug( + "Error handling request from %s", request.remote, exc_info=exc + ) else: - self.log_exception("Error handling request", exc_info=exc) + self.log_exception( + "Error handling request from %s", request.remote, exc_info=exc + ) # some data already got sent, connection is broken if request.writer.output_size > 0: diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 7b9b87a374a..910f074e90f 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -56,7 +56,9 @@ async def handler(request): assert txt.startswith("500 Internal Server Error") assert "Traceback" not in txt - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_invalid_method_with_loop_debug( @@ -85,7 +87,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: # on the first request since the client may # be probing for TLS/SSL support which is # expected to fail - logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) logger.debug.reset_mock() # Now make another connection to the server @@ -99,7 +103,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: # on the first request since the client may # be probing for TLS/SSL support which is # expected to fail - logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_invalid_method_without_loop_debug( @@ -128,7 +134,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: # on the first request since the client may # be probing for TLS/SSL support which is # expected to fail - logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_invalid_method_second_request( @@ -159,7 +167,9 @@ async def handler(request: web.BaseRequest) -> web.Response: # BadHttpMethod should be logged as an exception # if its not the first request since we know # that the client already was speaking HTTP - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_bad_status_line_as_exception( @@ -184,7 +194,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: txt = await resp.text() assert "Traceback (most recent call last):\n" not in txt - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_handler_timeout( @@ -254,7 +266,9 @@ async def handler(request): txt = await resp.text() assert "Traceback (most recent call last):\n" in txt - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_html_exception(aiohttp_raw_server, aiohttp_client): @@ -278,7 +292,9 @@ async def handler(request): "</body></html>\n" ) - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_html_exception_debug(aiohttp_raw_server, aiohttp_client): @@ -302,7 +318,9 @@ async def handler(request): "<pre>Traceback (most recent call last):\n" ) - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_handler_cancellation(unused_port_socket: socket.socket) -> None: From 68e426c1684e59d54c57b707418f7b1e0587ad4b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 11:04:50 -1000 Subject: [PATCH 1152/1511] [PR #10332/a0e64bb4 backport][3.11] Log the remote that generates request errors (#10337) --- CHANGES/10332.feature.rst | 1 + aiohttp/web_protocol.py | 8 ++++++-- tests/test_web_server.py | 36 +++++++++++++++++++++++++++--------- 3 files changed, 34 insertions(+), 11 deletions(-) create mode 100644 CHANGES/10332.feature.rst diff --git a/CHANGES/10332.feature.rst b/CHANGES/10332.feature.rst new file mode 100644 index 00000000000..e5c84adf50d --- /dev/null +++ b/CHANGES/10332.feature.rst @@ -0,0 +1 @@ +Improved logging of HTTP protocol errors to include the remote address -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 3306b86bded..32f503474a9 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -694,9 +694,13 @@ def handle_error( # or encrypted traffic to an HTTP port. This is expected # to happen when connected to the public internet so we log # it at the debug level as to not fill logs with noise. - self.logger.debug("Error handling request", exc_info=exc) + self.logger.debug( + "Error handling request from %s", request.remote, exc_info=exc + ) else: - self.log_exception("Error handling request", exc_info=exc) + self.log_exception( + "Error handling request from %s", request.remote, exc_info=exc + ) # some data already got sent, connection is broken if request.writer.output_size > 0: diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 7b9b87a374a..910f074e90f 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -56,7 +56,9 @@ async def handler(request): assert txt.startswith("500 Internal Server Error") assert "Traceback" not in txt - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_invalid_method_with_loop_debug( @@ -85,7 +87,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: # on the first request since the client may # be probing for TLS/SSL support which is # expected to fail - logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) logger.debug.reset_mock() # Now make another connection to the server @@ -99,7 +103,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: # on the first request since the client may # be probing for TLS/SSL support which is # expected to fail - logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_invalid_method_without_loop_debug( @@ -128,7 +134,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: # on the first request since the client may # be probing for TLS/SSL support which is # expected to fail - logger.debug.assert_called_with("Error handling request", exc_info=exc) + logger.debug.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_invalid_method_second_request( @@ -159,7 +167,9 @@ async def handler(request: web.BaseRequest) -> web.Response: # BadHttpMethod should be logged as an exception # if its not the first request since we know # that the client already was speaking HTTP - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_logs_bad_status_line_as_exception( @@ -184,7 +194,9 @@ async def handler(request: web.BaseRequest) -> NoReturn: txt = await resp.text() assert "Traceback (most recent call last):\n" not in txt - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_handler_timeout( @@ -254,7 +266,9 @@ async def handler(request): txt = await resp.text() assert "Traceback (most recent call last):\n" in txt - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_html_exception(aiohttp_raw_server, aiohttp_client): @@ -278,7 +292,9 @@ async def handler(request): "</body></html>\n" ) - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_raw_server_html_exception_debug(aiohttp_raw_server, aiohttp_client): @@ -302,7 +318,9 @@ async def handler(request): "<pre>Traceback (most recent call last):\n" ) - logger.exception.assert_called_with("Error handling request", exc_info=exc) + logger.exception.assert_called_with( + "Error handling request from %s", cli.host, exc_info=exc + ) async def test_handler_cancellation(unused_port_socket: socket.socket) -> None: From d3c52656eabfdb6f7c88e821a704256db4d2b256 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 21:25:34 +0000 Subject: [PATCH 1153/1511] Bump cherry-picker from 2.4.0 to 2.5.0 (#10336) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cherry-picker](https://github.com/python/cherry-picker) from 2.4.0 to 2.5.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python/cherry-picker/releases">cherry-picker's releases</a>.</em></p> <blockquote> <h2>cherry-picker-v2.5.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Add draft config option to Create Pull Request by <a href="https://github.com/gopidesupavan"><code>@​gopidesupavan</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/151">python/cherry-picker#151</a></li> <li>Better error message when cherry_picker is called in wrong state by <a href="https://github.com/serhiy-storchaka"><code>@​serhiy-storchaka</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/119">python/cherry-picker#119</a></li> <li>Bubble up error message by <a href="https://github.com/dpr-0"><code>@​dpr-0</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/112">python/cherry-picker#112</a></li> <li>Acknowledge network issues on GitHub by <a href="https://github.com/ambv"><code>@​ambv</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/153">python/cherry-picker#153</a></li> <li>Ignore uv.lock file by <a href="https://github.com/potiuk"><code>@​potiuk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/149">python/cherry-picker#149</a></li> <li>Fix mypy pre-commit settings by <a href="https://github.com/potiuk"><code>@​potiuk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/148">python/cherry-picker#148</a></li> <li>Update CI config by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/144">python/cherry-picker#144</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/potiuk"><code>@​potiuk</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/149">python/cherry-picker#149</a></li> <li><a href="https://github.com/dpr-0"><code>@​dpr-0</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/112">python/cherry-picker#112</a></li> <li><a href="https://github.com/gopidesupavan"><code>@​gopidesupavan</code></a> made their first contribution in <a href="https://redirect.github.com/python/cherry-picker/pull/151">python/cherry-picker#151</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/python/cherry-picker/compare/cherry-picker-v2.4.0...cherry-picker-v2.5.0">https://github.com/python/cherry-picker/compare/cherry-picker-v2.4.0...cherry-picker-v2.5.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/cherry-picker/blob/main/CHANGELOG.md">cherry-picker's changelog</a>.</em></p> <blockquote> <h2>2.5.0</h2> <ul> <li>Add draft config option to Create Pull Request by <a href="https://github.com/gopidesupavan"><code>@​gopidesupavan</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/151">python/cherry-picker#151</a></li> <li>Better error message when cherry_picker is called in wrong state by <a href="https://github.com/serhiy-storchaka"><code>@​serhiy-storchaka</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/119">python/cherry-picker#119</a></li> <li>Bubble up error message by <a href="https://github.com/dpr-0"><code>@​dpr-0</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/112">python/cherry-picker#112</a></li> <li>Acknowledge network issues on GitHub by <a href="https://github.com/ambv"><code>@​ambv</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/153">python/cherry-picker#153</a></li> <li>Ignore uv.lock file by <a href="https://github.com/potiuk"><code>@​potiuk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/149">python/cherry-picker#149</a></li> <li>Fix mypy pre-commit settings by <a href="https://github.com/potiuk"><code>@​potiuk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/148">python/cherry-picker#148</a></li> <li>Update CI config by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/python/cherry-picker/pull/144">python/cherry-picker#144</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/cherry-picker/commit/24e6a44a1b8730bb5786db6ce519eeb37271f371"><code>24e6a44</code></a> Update changelog for 2.5.0 (<a href="https://redirect.github.com/python/cherry-picker/issues/154">#154</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/a0d1a14a371cdb94f7cd1cdc303c361a60f66ed3"><code>a0d1a14</code></a> Acknowledge network issues on GitHub (<a href="https://redirect.github.com/python/cherry-picker/issues/153">#153</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/962ba5cf8d30ad154832434b561d5c8eed251352"><code>962ba5c</code></a> add draft config option to create pull request (<a href="https://redirect.github.com/python/cherry-picker/issues/151">#151</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/014b2aaa9c45c7ead65f56759eb5539b34bac45f"><code>014b2aa</code></a> Better error message when cherry_picker is called in wrong state (<a href="https://redirect.github.com/python/cherry-picker/issues/119">#119</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/0a5c565e28529acecbece73f71573b6283c6678b"><code>0a5c565</code></a> Bubble up error message (<a href="https://redirect.github.com/python/cherry-picker/issues/112">#112</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/a2d729374eec70f34d182d216bfc3250268c5d5a"><code>a2d7293</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/python/cherry-picker/issues/152">#152</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/b8f7d16ad21ad6d53c976214051556c5414283b0"><code>b8f7d16</code></a> Bump codecov/codecov-action from 4 to 5 (<a href="https://redirect.github.com/python/cherry-picker/issues/150">#150</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/957f2a694625d1384295c2a5b05dc28a9e2af432"><code>957f2a6</code></a> Update CI config (<a href="https://redirect.github.com/python/cherry-picker/issues/144">#144</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/452a9b2f6e440e6ebe234507b6fbdea99838b0b6"><code>452a9b2</code></a> Fix mypy pre-commit settings (<a href="https://redirect.github.com/python/cherry-picker/issues/148">#148</a>)</li> <li><a href="https://github.com/python/cherry-picker/commit/a4cac745d6ee71d12da0b439c5d093cb987be294"><code>a4cac74</code></a> Ignore uv.lock file (<a href="https://redirect.github.com/python/cherry-picker/issues/149">#149</a>)</li> <li>See full diff in <a href="https://github.com/python/cherry-picker/compare/cherry-picker-v2.4.0...cherry-picker-v2.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cherry-picker&package-manager=pip&previous-version=2.4.0&new-version=2.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 6 +++++- requirements/dev.txt | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bcc0597c34c..3a4b94c75e7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -41,7 +41,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.1 # via requests -cherry-picker==2.4.0 +cherry-picker==2.5.0 # via -r requirements/dev.in click==8.1.8 # via @@ -228,6 +228,10 @@ sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in +stamina==24.3.0 + # via cherry-picker +tenacity==9.0.0 + # via stamina tomli==2.2.1 # via # build diff --git a/requirements/dev.txt b/requirements/dev.txt index ae4a5e91e29..e0a34857e36 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -41,7 +41,7 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.1 # via requests -cherry-picker==2.4.0 +cherry-picker==2.5.0 # via -r requirements/dev.in click==8.1.8 # via @@ -219,6 +219,10 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in +stamina==24.3.0 + # via cherry-picker +tenacity==9.0.0 + # via stamina tomli==2.2.1 # via # build From 976e0a86ef710e1a1289d965668ceb486444593d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 21:26:59 +0000 Subject: [PATCH 1154/1511] Bump setuptools from 75.7.0 to 75.8.0 (#10320) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.7.0 to 75.8.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.8.0</h1> <h2>Features</h2> <ul> <li>Implemented <code>Dynamic</code> field for core metadata (as introduced in PEP 643). The existing implementation is currently experimental and the exact approach may change in future releases. (<a href="https://redirect.github.com/pypa/setuptools/issues/4698">#4698</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/5c9d9809dec1b20e2a9da6b4a06355fd6f87a190"><code>5c9d980</code></a> Bump version: 75.7.0 → 75.8.0</li> <li><a href="https://github.com/pypa/setuptools/commit/72c422261b40f2b95a8be6605cc7dd93cec81794"><code>72c4222</code></a> Avoid using Any in function</li> <li><a href="https://github.com/pypa/setuptools/commit/1c61d4799438677c7cfaaccf281312bfb1aee9b3"><code>1c61d47</code></a> Add news fragments for PEP 643</li> <li><a href="https://github.com/pypa/setuptools/commit/f285d01e2661b01e4947a4dca7704790b65f2967"><code>f285d01</code></a> Implement PEP 643 (<code>Dynamic</code> field for core metadata) (<a href="https://redirect.github.com/pypa/setuptools/issues/4698">#4698</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/a50f6e2e1e8b4610adde709079bec17ad0944197"><code>a50f6e2</code></a> Fix _static.Dict.<strong>ior</strong> for Python 3.8</li> <li><a href="https://github.com/pypa/setuptools/commit/b055895fa337a6e03a29c2ea6493b6b778d2ba46"><code>b055895</code></a> Add extra tests for static/dynamic metadata</li> <li><a href="https://github.com/pypa/setuptools/commit/770b4fc8f6248d862629028f5ee4218975f9516b"><code>770b4fc</code></a> Remove test workaround for unmarked static values from pyproject.toml</li> <li><a href="https://github.com/pypa/setuptools/commit/8b22d73be5e23a9611398d81aedc5164115940ce"><code>8b22d73</code></a> Mark values from pyproject.toml as static</li> <li><a href="https://github.com/pypa/setuptools/commit/f699fd842e3ddedbe937ee33b0bd6ad28e735664"><code>f699fd8</code></a> Fix spelling error</li> <li><a href="https://github.com/pypa/setuptools/commit/8b4c8a3c95f43d771d0fa6e4ebceea3436bc70f7"><code>8b4c8a3</code></a> Add tests for static 'attr' directive</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v75.7.0...v75.8.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.7.0&new-version=75.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3a4b94c75e7..53c49012e5c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -282,7 +282,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 # via pip-tools -setuptools==75.7.0 +setuptools==75.8.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index e0a34857e36..c6461895361 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -273,7 +273,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==24.3.1 # via pip-tools -setuptools==75.7.0 +setuptools==75.8.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 01941d6a212..8aad3bd4045 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.7.0 +setuptools==75.8.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 918dee7658d..d94908fb12e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.7.0 +setuptools==75.8.0 # via incremental From d6f0bbc7154351429420841a39f9c3cb5e0e767a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 21:27:01 +0000 Subject: [PATCH 1155/1511] Bump python-on-whales from 0.74.0 to 0.75.1 (#10324) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.74.0 to 0.75.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.75.1</h2> <h2>What's Changed</h2> <ul> <li>Work around setuptools license file metadata issue by <a href="https://github.com/rcwbr"><code>@​rcwbr</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/666">gabrieldemarmiesse/python-on-whales#666</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.75.0...v0.75.1">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.75.0...v0.75.1</a></p> <h2>v0.75.0</h2> <h2>What's Changed</h2> <ul> <li>Add anchor to link being displayed when using CLI by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/654">gabrieldemarmiesse/python-on-whales#654</a></li> <li>Support bake remote definitions by <a href="https://github.com/rcwbr"><code>@​rcwbr</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/656">gabrieldemarmiesse/python-on-whales#656</a></li> <li>Add entrypoint option to docker.compose.run() (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/659">#659</a>) by <a href="https://github.com/www84"><code>@​www84</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/661">gabrieldemarmiesse/python-on-whales#661</a></li> <li>Add volumes option to docker.compose.run() (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/662">#662</a>) by <a href="https://github.com/www84"><code>@​www84</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/663">gabrieldemarmiesse/python-on-whales#663</a></li> <li>Add envs option to docker.compose.run() (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/664">#664</a>) by <a href="https://github.com/www84"><code>@​www84</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/665">gabrieldemarmiesse/python-on-whales#665</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/rcwbr"><code>@​rcwbr</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/656">gabrieldemarmiesse/python-on-whales#656</a></li> <li><a href="https://github.com/www84"><code>@​www84</code></a> made their first contribution in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/661">gabrieldemarmiesse/python-on-whales#661</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.74.0...v0.75.0">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.74.0...v0.75.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/9621e92614cc60f17203f0628440d570d060cded"><code>9621e92</code></a> bump version to 0.75.1</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/c3b82f3ebbe59037b97476479f47b66d52b734d9"><code>c3b82f3</code></a> Work around setuptools license file metadata issue (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/666">#666</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/aa5586b2fd9539711625735cb77678b9e71eda81"><code>aa5586b</code></a> Bump version to 0.75.0</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/63120611d2506eccd386d7eee3df2cc36d682b53"><code>6312061</code></a> Add envs option to docker.compose.run() (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/664">#664</a>) (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/665">#665</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/406eea4c7dfa457d608bed7cf0d4758f16c530ba"><code>406eea4</code></a> Add volumes option to docker.compose.run() (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/663">#663</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/86804eea508b7a38c9ce76d07baf8b36a95a9ff4"><code>86804ee</code></a> Add entrypoint option to docker.compose.run() (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/659">#659</a>) (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/661">#661</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/07fe32a17239b1626a9acbc5d61dea0d6cfe4188"><code>07fe32a</code></a> Update comment in python-publish.yml</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/b2ff8654488378d913ce900b4e526b4b4d3ed6a7"><code>b2ff865</code></a> Support bake remote definitions (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/656">#656</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/a2f95b07dac7084f4bf1e30207fcbf2823e36df9"><code>a2f95b0</code></a> Add anchor to link being displayed when using CLI (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/654">#654</a>)</li> <li>See full diff in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.74.0...v0.75.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.74.0&new-version=0.75.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 53c49012e5c..cd83de715ab 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -182,7 +182,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.74.0 +python-on-whales==0.75.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index c6461895361..5e7dd758f75 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -177,7 +177,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.74.0 +python-on-whales==0.75.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 8e330c63da2..47b42f0425f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -74,7 +74,7 @@ pytest-mock==3.14.0 # via -r requirements/lint.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.74.0 +python-on-whales==0.75.1 # via -r requirements/lint.in pyyaml==6.0.2 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 00beee69f1e..4d101628f08 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -100,7 +100,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.74.0 +python-on-whales==0.75.1 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From c73c1db081a352bbcbe1300c7e7e9c0820ebf180 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 21:27:08 +0000 Subject: [PATCH 1156/1511] Bump virtualenv from 20.29.0 to 20.29.1 (#10335) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.29.0 to 20.29.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.29.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.29.0 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2824">pypa/virtualenv#2824</a></li> <li>Simplify Solution to --python command-line flag precedence by <a href="https://github.com/DK96-OS"><code>@​DK96-OS</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2826">pypa/virtualenv#2826</a></li> <li>Change PyInfo cache versioning mechanism by <a href="https://github.com/robsdedude"><code>@​robsdedude</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2827">pypa/virtualenv#2827</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/DK96-OS"><code>@​DK96-OS</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2826">pypa/virtualenv#2826</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.29.0...20.29.1">https://github.com/pypa/virtualenv/compare/20.29.0...20.29.1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.29.1 (2025-01-17)</h2> <p>Bugfixes - 20.29.1</p> <pre><code>- Fix PyInfo cache incompatbility warnings - by :user:`robsdedude`. (:issue:`2827`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/e07c0c370e136b16d249aaee30fb4c293041bdcf"><code>e07c0c3</code></a> release 20.29.1</li> <li><a href="https://github.com/pypa/virtualenv/commit/61bee9fddf2a17d656a6c43645c4544b2eed7df3"><code>61bee9f</code></a> Add changelog for PyInfo fix</li> <li><a href="https://github.com/pypa/virtualenv/commit/fd0dc85b3b81f0c554c8fc3ecc8e62ee28b38f75"><code>fd0dc85</code></a> Change PyInfo cache versioning mechanism (<a href="https://redirect.github.com/pypa/virtualenv/issues/2827">#2827</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/56ef466dccdd97271a977736c26e81a47ef8ec56"><code>56ef466</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2826">#2826</a> from DK96-OS/cli-precedence-2285</li> <li><a href="https://github.com/pypa/virtualenv/commit/caf03a268e03a3453aa4058ec764b46370f0d543"><code>caf03a2</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2824">#2824</a> from pypa/release-20.29.0</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.29.0...20.29.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.29.0&new-version=20.29.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index cd83de715ab..6ab80eb2b4e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -270,7 +270,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.29.0 +virtualenv==20.29.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 5e7dd758f75..acb03f54aa8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -261,7 +261,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.29.0 +virtualenv==20.29.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 47b42f0425f..510b34d814e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -102,5 +102,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.29.0 +virtualenv==20.29.1 # via pre-commit From 9fc9ebc13dae1782bb1b3feac6a202f43c6068b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 21:37:21 +0000 Subject: [PATCH 1157/1511] Bump pydantic from 2.10.4 to 2.10.5 (#10319) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.10.4 to 2.10.5. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.10.5 (2025-01-08)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.10.5">GitHub release</a></p> <h3>What's Changed</h3> <ul> <li>Remove custom MRO implementation of Pydantic models by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11184">#11184</a></li> <li>Fix URL serialization for unions by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11233">#11233</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/5d34efda82895b8697649e20616aea385d769eaf"><code>5d34efd</code></a> Prepare release v2.10.5 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11237">#11237</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/6e585f925e25f91f365ae6ad6c910a667f9d78e9"><code>6e585f9</code></a> Fix url serialization for unions (<a href="https://redirect.github.com/pydantic/pydantic/issues/11233">#11233</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/5a22e026084044acbf6f24e0760d9903be0bfa5a"><code>5a22e02</code></a> Remove custom MRO implementation of Pydantic models (<a href="https://redirect.github.com/pydantic/pydantic/issues/11195">#11195</a>)</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.10.4...v2.10.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.10.4&new-version=2.10.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6ab80eb2b4e..f79b6a3ce62 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -142,7 +142,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.4 +pydantic==2.10.5 # via python-on-whales pydantic-core==2.27.2 # via pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index acb03f54aa8..bc61e753db9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -139,7 +139,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.4 +pydantic==2.10.5 # via python-on-whales pydantic-core==2.27.2 # via pydantic diff --git a/requirements/lint.txt b/requirements/lint.txt index 510b34d814e..8c20d5baa18 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -57,7 +57,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.4 +pydantic==2.10.5 # via python-on-whales pydantic-core==2.27.2 # via pydantic diff --git a/requirements/test.txt b/requirements/test.txt index 4d101628f08..d4f6156ff04 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,7 +77,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.4 +pydantic==2.10.5 # via python-on-whales pydantic-core==2.27.2 # via pydantic From bc9abe475c9c9c5f45336ef7a4c612f606149175 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 02:56:57 +0000 Subject: [PATCH 1158/1511] =?UTF-8?q?[PR=20#10304/b6ffb1d1=20backport][3.1?= =?UTF-8?q?1]=20Add=20aiohttp-openmetrics=20to=20list=20of=20third=20party?= =?UTF-8?q?=20aiohttp-related=20python=E2=80=A6=20(#10305)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10304 as merged into master (b6ffb1d1a3b710c600f06c9d21cf62f75d324767).** Co-authored-by: Jelmer Vernooij <jelmer@jelmer.uk> --- CHANGES/10304.doc.rst | 1 + docs/third_party.rst | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/10304.doc.rst diff --git a/CHANGES/10304.doc.rst b/CHANGES/10304.doc.rst new file mode 100644 index 00000000000..cedac3ef881 --- /dev/null +++ b/CHANGES/10304.doc.rst @@ -0,0 +1 @@ +Added ``aiohttp-openmetrics`` to list of third-party libraries -- by :user:`jelmer`. diff --git a/docs/third_party.rst b/docs/third_party.rst index e8095c7f09d..145a505a5de 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -305,3 +305,6 @@ ask to raise the status. - `aiohttp-asgi-connector <https://github.com/thearchitector/aiohttp-asgi-connector>`_ An aiohttp connector for using a ``ClientSession`` to interface directly with separate ASGI applications. + +- `aiohttp-openmetrics <https://github.com/jelmer/aiohttp-openmetrics>`_ + An aiohttp middleware for exposing Prometheus metrics. From ac895b8830a91dec625edf5b1293b8884c5f8956 Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Tue, 21 Jan 2025 10:57:27 +0800 Subject: [PATCH 1159/1511] [PR #10300/3d06cc1][3.11] Use kwargs in aiohttp.client.request (#10303) (cherry picked from commit 3d06cc14ad82ce4c207449b593955c23bd5e4d88) --- CHANGES/10300.feature.rst | 2 + aiohttp/client.py | 178 ++++++++++++++------------------ docs/spelling_wordlist.txt | 2 + tests/test_client_functional.py | 16 +++ 4 files changed, 96 insertions(+), 102 deletions(-) create mode 100644 CHANGES/10300.feature.rst diff --git a/CHANGES/10300.feature.rst b/CHANGES/10300.feature.rst new file mode 100644 index 00000000000..3632c3d41a7 --- /dev/null +++ b/CHANGES/10300.feature.rst @@ -0,0 +1,2 @@ +Update :py:func:`~aiohttp.request` to make it accept ``_RequestOptions`` kwargs. +-- by :user:`Cycloctane`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 3b1dc08544f..7c788e825eb 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -1471,106 +1471,80 @@ async def __aexit__( await self._session.close() -def request( - method: str, - url: StrOrURL, - *, - params: Query = None, - data: Any = None, - json: Any = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Optional[bool] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, object] = sentinel, - cookies: Optional[LooseCookies] = None, - version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - read_bufsize: Optional[int] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - max_line_size: int = 8190, - max_field_size: int = 8190, -) -> _SessionRequestContextManager: - """Constructs and sends a request. - - Returns response object. - method - HTTP method - url - request url - params - (optional) Dictionary or bytes to be sent in the query - string of the new request - data - (optional) Dictionary, bytes, or file-like object to - send in the body of the request - json - (optional) Any json compatible python object - headers - (optional) Dictionary of HTTP Headers to send with - the request - cookies - (optional) Dict object to send with the request - auth - (optional) BasicAuth named tuple represent HTTP Basic Auth - auth - aiohttp.helpers.BasicAuth - allow_redirects - (optional) If set to False, do not follow - redirects - version - Request HTTP version. - compress - Set to True if request has to be compressed - with deflate encoding. - chunked - Set to chunk size for chunked transfer encoding. - expect100 - Expect 100-continue response from server. - connector - BaseConnector sub-class instance to support - connection pooling. - read_until_eof - Read response until eof if response - does not have Content-Length header. - loop - Optional event loop. - timeout - Optional ClientTimeout settings structure, 5min - total timeout by default. - Usage:: - >>> import aiohttp - >>> resp = await aiohttp.request('GET', 'http://python.org/') - >>> resp - <ClientResponse(python.org/) [200]> - >>> data = await resp.read() - """ - connector_owner = False - if connector is None: - connector_owner = True - connector = TCPConnector(loop=loop, force_close=True) - - session = ClientSession( - loop=loop, - cookies=cookies, - version=version, - timeout=timeout, - connector=connector, - connector_owner=connector_owner, - ) +if sys.version_info >= (3, 11) and TYPE_CHECKING: - return _SessionRequestContextManager( - session._request( - method, - url, - params=params, - data=data, - json=json, - headers=headers, - skip_auto_headers=skip_auto_headers, - auth=auth, - allow_redirects=allow_redirects, - max_redirects=max_redirects, - compress=compress, - chunked=chunked, - expect100=expect100, - raise_for_status=raise_for_status, - read_until_eof=read_until_eof, - proxy=proxy, - proxy_auth=proxy_auth, - read_bufsize=read_bufsize, - max_line_size=max_line_size, - max_field_size=max_field_size, - ), - session, - ) + def request( + method: str, + url: StrOrURL, + *, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Unpack[_RequestOptions], + ) -> _SessionRequestContextManager: ... + +else: + + def request( + method: str, + url: StrOrURL, + *, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> async with aiohttp.request('GET', 'http://python.org/') as resp: + ... print(resp) + ... data = await resp.read() + <ClientResponse(https://www.python.org/) [200 OK]> + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=kwargs.pop("cookies", None), + version=version, + timeout=kwargs.pop("timeout", sentinel), + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request(method, url, **kwargs), + session, + ) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c4e10b44987..3e41af824e4 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -13,6 +13,7 @@ app app’s apps arg +args Arsenic async asyncio @@ -169,6 +170,7 @@ keepaliving kib KiB kwarg +kwargs latin lifecycle linux diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 05af9ae25ad..ba75e8e93c6 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3382,6 +3382,22 @@ async def handler(request: web.Request) -> web.Response: await server.close() +async def test_aiohttp_request_ssl( + aiohttp_server: AiohttpServer, + ssl_ctx: ssl.SSLContext, + client_ssl_ctx: ssl.SSLContext, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, ssl=ssl_ctx) + + async with aiohttp.request("GET", server.make_url("/"), ssl=client_ssl_ctx) as resp: + assert resp.status == 200 + + async def test_yield_from_in_session_request(aiohttp_client: AiohttpClient) -> None: # a test for backward compatibility with yield from syntax async def handler(request): From 76a026f9113502e5b77832771918d71907f153be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 10:53:18 +0000 Subject: [PATCH 1160/1511] Bump pre-commit from 4.0.1 to 4.1.0 (#10346) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 4.0.1 to 4.1.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pre-commit/pre-commit/releases">pre-commit's releases</a>.</em></p> <blockquote> <h2>pre-commit v4.1.0</h2> <h3>Features</h3> <ul> <li>Add <code>language: julia</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3348">#3348</a> PR by <a href="https://github.com/fredrikekre"><code>@​fredrikekre</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2689">#2689</a> issue <a href="https://github.com/jmuchovej"><code>@​jmuchovej</code></a>.</li> </ul> </li> </ul> <h3>Fixes</h3> <ul> <li>Disable automatic toolchain switching for <code>language: golang</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3304">#3304</a> PR by <a href="https://github.com/AleksaC"><code>@​AleksaC</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3300">#3300</a> issue by <a href="https://github.com/AleksaC"><code>@​AleksaC</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3149">#3149</a> issue by <a href="https://github.com/nijel"><code>@​nijel</code></a>.</li> </ul> </li> <li>Fix <code>language: r</code> installation when initiated by RStudio. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3389">#3389</a> PR by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3385">#3385</a> issue by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md">pre-commit's changelog</a>.</em></p> <blockquote> <h1>4.1.0 - 2025-01-20</h1> <h3>Features</h3> <ul> <li>Add <code>language: julia</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3348">#3348</a> PR by <a href="https://github.com/fredrikekre"><code>@​fredrikekre</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/2689">#2689</a> issue <a href="https://github.com/jmuchovej"><code>@​jmuchovej</code></a>.</li> </ul> </li> </ul> <h3>Fixes</h3> <ul> <li>Disable automatic toolchain switching for <code>language: golang</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3304">#3304</a> PR by <a href="https://github.com/AleksaC"><code>@​AleksaC</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3300">#3300</a> issue by <a href="https://github.com/AleksaC"><code>@​AleksaC</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3149">#3149</a> issue by <a href="https://github.com/nijel"><code>@​nijel</code></a>.</li> </ul> </li> <li>Fix <code>language: r</code> installation when initiated by RStudio. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3389">#3389</a> PR by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3385">#3385</a> issue by <a href="https://github.com/lorenzwalthert"><code>@​lorenzwalthert</code></a>.</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/pre-commit/commit/b152e922ef11a97efe22ca7dc4f90011f0d1711c"><code>b152e92</code></a> v4.1.0</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/c3125a4d36912c768bfa5dcb2b79d6f4179d79ed"><code>c3125a4</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3389">#3389</a> from lorenzwalthert/dev-always-unset-renv</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/c2c061cf63e00a3ff8c88a9054c47e96a36f2daa"><code>c2c061c</code></a> fix: ensure env patch is applied for vanilla emulation</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/cd429db5e2172e51099716efd58a15e76a1719a7"><code>cd429db</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3382">#3382</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/9b9f8e254d46da65c8544244c423596d54260e24"><code>9b9f8e2</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/86300a4a7e5441aad007d83c1101d8a8eb767ad7"><code>86300a4</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3376">#3376</a> from pre-commit/r-gone</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/77edad8455e88b403e055d2692c9545085cf3edb"><code>77edad8</code></a> install r on ubuntu runners</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/18b393905e24c730eeb15754f6f275a9d27e396f"><code>18b3939</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3375">#3375</a> from pre-commit/dotnet-tests-ubuntu-latest</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/31cb945ffb860f6f8176642d1a27af40eeec554d"><code>31cb945</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3374">#3374</a> from pre-commit/docker-image-tests-ubuntu-22-not-pre...</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/28c3d81bd27fe5e62eead459c1963a582e763bd7"><code>28c3d81</code></a> update .net tests to use .net 8</li> <li>Additional commits viewable in <a href="https://github.com/pre-commit/pre-commit/compare/v4.0.1...v4.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pre-commit&package-manager=pip&previous-version=4.0.1&new-version=4.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f79b6a3ce62..3ee444267f3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -130,7 +130,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==4.0.1 +pre-commit==4.1.0 # via -r requirements/lint.in propcache==0.2.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index bc61e753db9..ec9daddbe35 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -127,7 +127,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==4.0.1 +pre-commit==4.1.0 # via -r requirements/lint.in propcache==0.2.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 8c20d5baa18..e2d26683d30 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -51,7 +51,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==4.0.1 +pre-commit==4.1.0 # via -r requirements/lint.in pycares==4.5.0 # via aiodns From 06e4d51e59cea31e5c9e923dc1c2313744a16c27 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 10:58:16 +0000 Subject: [PATCH 1161/1511] Bump identify from 2.6.5 to 2.6.6 (#10347) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.5 to 2.6.6. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/5bc1845addd0f9600fe719cc27464bde5c19f839"><code>5bc1845</code></a> v2.6.6</li> <li><a href="https://github.com/pre-commit/identify/commit/7b82a63b70054697a4c17598872ada429a424835"><code>7b82a63</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/500">#500</a> from vlotorev/patch-1</li> <li><a href="https://github.com/pre-commit/identify/commit/26e13ca264d23a4c759eee395311bf84dde5bf75"><code>26e13ca</code></a> Add '.asm' file extension as 'asm' filetype</li> <li><a href="https://github.com/pre-commit/identify/commit/f09385549fc272df4e28ad8b771ea62bb32c49ca"><code>f093855</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/499">#499</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/e9ef72e442a7eb1c835b0451340333d53d04e9df"><code>e9ef72e</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.5...v2.6.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.5&new-version=2.6.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3ee444267f3..7a3e1866f4e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.5 +identify==2.6.6 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index ec9daddbe35..cf3b7b2f7e5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -80,7 +80,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.5 +identify==2.6.6 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index e2d26683d30..5a9793e11e5 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -29,7 +29,7 @@ filelock==3.16.1 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.5 +identify==2.6.6 # via pre-commit idna==3.7 # via trustme From 3c5de4610175f2a00139317f8e70c7fd2b1675b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Jan 2025 10:55:09 +0000 Subject: [PATCH 1162/1511] Bump filelock from 3.16.1 to 3.17.0 (#10351) Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.16.1 to 3.17.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/py-filelock/releases">filelock's releases</a>.</em></p> <blockquote> <h2>Drop 3.8</h2> <!-- raw HTML omitted --> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.16.1...3.17.0">https://github.com/tox-dev/filelock/compare/3.16.1...3.17.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/filelock/commit/5eb02b2580122e6f42fb8b7a4e08cededcb22b1c"><code>5eb02b2</code></a> Drop 3.8 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/388">#388</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/23a984803c969a75530ba8c2c5fb5a6b73686762"><code>23a9848</code></a> Bump astral-sh/setup-uv from 4 to 5 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/387">#387</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/79a958358f79ea3ae0ae1bc7ee7ce6a96a5ef407"><code>79a9583</code></a> Bump pypa/gh-action-pypi-publish from 1.12.2 to 1.12.3 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/386">#386</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/46dddc89672d1646156c61c4db0a664ba4886d2f"><code>46dddc8</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/385">#385</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/57f488ff8fdc2193572efe102408fb63cfefe4e4"><code>57f488f</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/383">#383</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/d4feb5a15974754a8fcd7893c196f229652a05ca"><code>d4feb5a</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/382">#382</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/fb2a4e26c6b391e78ca35d0959e854e9c174fc2b"><code>fb2a4e2</code></a> Bump astral-sh/setup-uv from 3 to 4 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/381">#381</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/07c2840b805d1dd2400ee1b6b193dd3c9bf1f7c0"><code>07c2840</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/379">#379</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/ee4c7ba1c863509a4fa9a8a992c7ec41747e1f96"><code>ee4c7ba</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/377">#377</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/2e1a1b228dcf9ab82d93be164573def7c630ae54"><code>2e1a1b2</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/374">#374</a>)</li> <li>Additional commits viewable in <a href="https://github.com/tox-dev/py-filelock/compare/3.16.1...3.17.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.16.1&new-version=3.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7a3e1866f4e..aed5148bd29 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -68,7 +68,7 @@ exceptiongroup==1.2.2 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.16.1 +filelock==3.17.0 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index cf3b7b2f7e5..1064cfc151b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ exceptiongroup==1.2.2 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.16.1 +filelock==3.17.0 # via virtualenv freezegun==1.5.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 5a9793e11e5..ad34c33a885 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -25,7 +25,7 @@ distlib==0.3.9 # via virtualenv exceptiongroup==1.2.2 # via pytest -filelock==3.16.1 +filelock==3.17.0 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in From a120800a6c27a8cb1b420cddf03fe53473472ade Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 22 Jan 2025 14:22:45 +0000 Subject: [PATCH 1163/1511] [PR #10330/d3dc087b backport][3.12] Start building riscv64 platform wheels in CI/CD (#10354) **This is a backport of PR #10330 as merged into master (d3dc087b8e9aa665d47045550e5d9f2eddf8f512).** Co-authored-by: E Shattow <eshattow@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 ++ CHANGES/10330.packaging.rst | 1 + 2 files changed, 3 insertions(+) create mode 100644 CHANGES/10330.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 93d1ddded65..d471f5658f6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -355,6 +355,8 @@ jobs: qemu: aarch64 - os: ubuntu qemu: ppc64le + - os: ubuntu + qemu: riscv64 - os: ubuntu qemu: s390x steps: diff --git a/CHANGES/10330.packaging.rst b/CHANGES/10330.packaging.rst new file mode 100644 index 00000000000..c159cf3a57d --- /dev/null +++ b/CHANGES/10330.packaging.rst @@ -0,0 +1 @@ +Started publishing ``riscv64`` wheels -- by :user:`eshattow`. From 7bac47eff1b9e29e71577f982061094edad5a02b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:40:29 +0000 Subject: [PATCH 1164/1511] Bump attrs from 24.3.0 to 25.1.0 (#10364) Bumps [attrs](https://github.com/sponsors/hynek) from 24.3.0 to 25.1.0. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/sponsors/hynek/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=24.3.0&new-version=25.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 990179d8b17..ddd4e082f20 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.3.0 +attrs==25.1.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index aed5148bd29..4d2e2fa5c31 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==24.3.0 +attrs==25.1.0 # via -r requirements/runtime-deps.in babel==2.16.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 1064cfc151b..e3de751d397 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==24.3.0 +attrs==25.1.0 # via -r requirements/runtime-deps.in babel==2.16.0 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 7fb7ffb3589..fcc9a417a1a 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.3.0 +attrs==25.1.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d4f6156ff04..691b1c74b8f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==24.3.0 +attrs==25.1.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From b5c141570ed20ed35c755b46d7d2f1737d1ba6bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:44:50 +0000 Subject: [PATCH 1165/1511] Bump pip from 24.3.1 to 25.0 (#10365) Bumps [pip](https://github.com/pypa/pip) from 24.3.1 to 25.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pip/blob/main/NEWS.rst">pip's changelog</a>.</em></p> <blockquote> <h1>25.0 (2025-01-26)</h1> <h2>Deprecations and Removals</h2> <ul> <li>Deprecate the <code>no-python-version-warning</code> flag as it has long done nothing since Python 2 support was removed in pip 21.0. (<code>[#13154](https://github.com/pypa/pip/issues/13154) <https://github.com/pypa/pip/issues/13154></code>_)</li> </ul> <h2>Features</h2> <ul> <li>Prefer to display :pep:<code>639</code> <code>License-Expression</code> in <code>pip show</code> if metadata version is at least 2.4. (<code>[#13112](https://github.com/pypa/pip/issues/13112) <https://github.com/pypa/pip/issues/13112></code>_)</li> <li>Support :pep:<code>639</code> <code>License-Expression</code> and <code>License-File</code> metadata fields in JSON output. <code>pip inspect</code> and <code>pip install --report</code> now emit <code>license_expression</code> and <code>license_file</code> fields in the <code>metadata</code> object, if the corresponding fields are present in the installed <code>METADATA</code> file. (<code>[#13134](https://github.com/pypa/pip/issues/13134) <https://github.com/pypa/pip/issues/13134></code>_)</li> <li>Files in the network cache will inherit the read/write permissions of pip's cache directory (in addition to the current user retaining read/write access). This enables a single cache to be shared among multiple users. (<code>[#11012](https://github.com/pypa/pip/issues/11012) <https://github.com/pypa/pip/issues/11012></code>_)</li> <li>Return the size, along with the number, of files cleared on <code>pip cache purge</code> and <code>pip cache remove</code> (<code>[#12176](https://github.com/pypa/pip/issues/12176) <https://github.com/pypa/pip/issues/12176></code>_)</li> <li>Cache <code>python-requires</code> checks while filtering potential installation candidates. (<code>[#13128](https://github.com/pypa/pip/issues/13128) <https://github.com/pypa/pip/issues/13128></code>_)</li> <li>Optimize package collection by avoiding unnecessary URL parsing and other processing. (<code>[#13132](https://github.com/pypa/pip/issues/13132) <https://github.com/pypa/pip/issues/13132></code>_)</li> </ul> <h2>Bug Fixes</h2> <ul> <li>Reorder the encoding detection when decoding a requirements file, relying on UTF-8 over the locale encoding by default, matching the documented behaviour. (<code>[#12771](https://github.com/pypa/pip/issues/12771) <https://github.com/pypa/pip/issues/12771></code>_)</li> <li>The pip version self check is disabled on <code>EXTERNALLY-MANAGED</code> environments. (<code>[#11820](https://github.com/pypa/pip/issues/11820) <https://github.com/pypa/pip/issues/11820></code>_)</li> <li>Fix a security bug allowing a specially crafted wheel to execute code during installation. (<code>[#13079](https://github.com/pypa/pip/issues/13079) <https://github.com/pypa/pip/issues/13079></code>_)</li> <li>The inclusion of <code>packaging</code> 24.2 changes how pre-release specifiers with <code><</code> and <code>></code> behave. Including a pre-release version with these specifiers now implies accepting pre-releases (e.g., <code><2.0dev</code> can include <code>1.0rc1</code>). To avoid implying pre-releases, avoid specifying them (e.g., use <code><2.0</code>). The exception is <code>!=</code>, which never implies pre-releases. (<code>[#13163](https://github.com/pypa/pip/issues/13163) <https://github.com/pypa/pip/issues/13163></code>_)</li> <li>The <code>--cert</code> and <code>--client-cert</code> command-line options are now respected while installing build dependencies. Consequently, the private <code>_PIP_STANDALONE_CERT</code> environment variable is no longer used. (<code>[#5502](https://github.com/pypa/pip/issues/5502) <https://github.com/pypa/pip/issues/5502></code>_)</li> <li>The <code>--proxy</code> command-line option is now respected while installing build dependencies. (<code>[#6018](https://github.com/pypa/pip/issues/6018) <https://github.com/pypa/pip/issues/6018></code>_)</li> </ul> <h2>Vendored Libraries</h2> <ul> <li>Upgrade CacheControl to 0.14.1</li> <li>Upgrade idna to 3.10</li> <li>Upgrade msgpack to 1.1.0</li> <li>Upgrade packaging to 24.2</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pip/commit/f47b5874299848c688336ae7c8d69534013fe2c6"><code>f47b587</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/74a7f3335338712af44be95241daf62e756f27ec"><code>74a7f33</code></a> Update AUTHORS.txt</li> <li><a href="https://github.com/pypa/pip/commit/a008888a5b123e8d5e4667bdd21e4b42f3fc034c"><code>a008888</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13171">#13171</a> from pypa/dependabot/github_actions/github-actions-...</li> <li><a href="https://github.com/pypa/pip/commit/d265fb7427c3ba4dbd10e4874a0bebea2e59350e"><code>d265fb7</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13174">#13174</a> from ichard26/changelog</li> <li><a href="https://github.com/pypa/pip/commit/d35384ef91cb372a5223a01f980e5deb84c8fde5"><code>d35384e</code></a> Copyedit news entries before 25.0</li> <li><a href="https://github.com/pypa/pip/commit/adc4f9951b51b6a06e405b8960dd0c5f030f0fb5"><code>adc4f99</code></a> Reorder requirements file decoding (<a href="https://redirect.github.com/pypa/pip/issues/12795">#12795</a>)</li> <li><a href="https://github.com/pypa/pip/commit/40c42149a51a63e8416c047d5ddc0da1694387ea"><code>40c4214</code></a> Bump pypa/gh-action-pypi-publish in the github-actions group</li> <li><a href="https://github.com/pypa/pip/commit/6b0fb904803fbb3ce7da63966b2759407b6cd9dc"><code>6b0fb90</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13048">#13048</a> from sbidoul/trusted-publisher-sbi</li> <li><a href="https://github.com/pypa/pip/commit/c7fb1e13ec79b1b48481ac245144c2b368e64f7d"><code>c7fb1e1</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13145">#13145</a> from befeleme/pip-show-pep639</li> <li><a href="https://github.com/pypa/pip/commit/41c807c5938d269703c6ff2644fb3b7dc88eda4e"><code>41c807c</code></a> Show License-Expression if present in package metadata</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pip/compare/24.3.1...25.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=24.3.1&new-version=25.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4d2e2fa5c31..32bfe2d3ea9 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -280,7 +280,7 @@ yarl==1.18.3 # via -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: -pip==24.3.1 +pip==25.0 # via pip-tools setuptools==75.8.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index e3de751d397..f45999ed5cf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -271,7 +271,7 @@ yarl==1.18.3 # via -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: -pip==24.3.1 +pip==25.0 # via pip-tools setuptools==75.8.0 # via From c990568c8b29e5b677c71fde167ae03a7e93783b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:57:02 +0000 Subject: [PATCH 1166/1511] Bump pydantic from 2.10.5 to 2.10.6 (#10357) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.10.5 to 2.10.6. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.10.6 2025-01-23</h2> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Fix JSON Schema reference collection with <code>'examples'</code> keys by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11325">#11325</a></li> <li>Fix url python serialization by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11331">#11331</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.10.5...v2.10.6">https://github.com/pydantic/pydantic/compare/v2.10.5...v2.10.6</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.10.6 (2025-01-23)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.10.6">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Fix JSON Schema reference collection with <code>'examples'</code> keys by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11325">#11325</a></li> <li>Fix url python serialization by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11331">#11331</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/df05e69a8a3fb37628a0e3a33518ca0425334bc9"><code>df05e69</code></a> Bump version to v2.10.6 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11334">#11334</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/416082625aed40ce341faf4b13e366f1ef51838d"><code>4160826</code></a> Fix url python serialization (<a href="https://redirect.github.com/pydantic/pydantic/issues/11331">#11331</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/f94e842692969168ff8ea7ecefa6815fff2883d8"><code>f94e842</code></a> Fix JSON Schema reference collection with <code>"examples"</code> keys (<a href="https://redirect.github.com/pydantic/pydantic/issues/11325">#11325</a>)</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.10.5...v2.10.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.10.5&new-version=2.10.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 32bfe2d3ea9..0cd05bd7cb8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -142,7 +142,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.5 +pydantic==2.10.6 # via python-on-whales pydantic-core==2.27.2 # via pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index f45999ed5cf..4fed3549612 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -139,7 +139,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.5 +pydantic==2.10.6 # via python-on-whales pydantic-core==2.27.2 # via pydantic diff --git a/requirements/lint.txt b/requirements/lint.txt index ad34c33a885..821cf156e76 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -57,7 +57,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.5 +pydantic==2.10.6 # via python-on-whales pydantic-core==2.27.2 # via pydantic diff --git a/requirements/test.txt b/requirements/test.txt index 691b1c74b8f..058eb3d0c72 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -77,7 +77,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.5 +pydantic==2.10.6 # via python-on-whales pydantic-core==2.27.2 # via pydantic From db2a367b718e9228946c6e456333bc9e7e268e6d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 29 Jan 2025 16:21:49 +0000 Subject: [PATCH 1167/1511] [PR #10366/3fb2c8df backport][3.12] Fix sdist make (#10368) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10366 as merged into master (3fb2c8dfcc8f52089cb48a1136d3e569d1ac73ff).** Co-authored-by: Michał Górny <mgorny@gentoo.org> --- CHANGES/10366.packaging | 2 ++ MANIFEST.in | 1 + Makefile | 3 +++ tools/gen.py | 2 +- 4 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10366.packaging diff --git a/CHANGES/10366.packaging b/CHANGES/10366.packaging new file mode 100644 index 00000000000..f0e9f62095a --- /dev/null +++ b/CHANGES/10366.packaging @@ -0,0 +1,2 @@ +Added missing files to the source distribution to fix ``Makefile`` targets. +Added a ``cythonize-nodeps`` target to run Cython without invoking pip to install dependencies. diff --git a/MANIFEST.in b/MANIFEST.in index d7c5cef6aad..64cee139a1f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,6 +7,7 @@ graft aiohttp graft docs graft examples graft tests +graft tools graft requirements recursive-include vendor * global-include aiohttp *.pyi diff --git a/Makefile b/Makefile index b0a3ef3226b..c6193fea9e4 100644 --- a/Makefile +++ b/Makefile @@ -81,6 +81,9 @@ generate-llhttp: .llhttp-gen .PHONY: cythonize cythonize: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c +.PHONY: cythonize-nodeps +cythonize-nodeps: $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c + .install-deps: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c $(call to-hash,$(CYS) $(REQS)) @python -m pip install -r requirements/dev.in -c requirements/dev.txt @touch .install-deps diff --git a/tools/gen.py b/tools/gen.py index ab2b39a2df0..24fb71bdd9d 100755 --- a/tools/gen.py +++ b/tools/gen.py @@ -7,7 +7,7 @@ import multidict ROOT = pathlib.Path.cwd() -while ROOT.parent != ROOT and not (ROOT / ".git").exists(): +while ROOT.parent != ROOT and not (ROOT / "pyproject.toml").exists(): ROOT = ROOT.parent From 3ef5f00f4cb4b4f7b2d6080485b399207c6a525f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 31 Jan 2025 11:04:45 +0000 Subject: [PATCH 1168/1511] Bump certifi from 2024.12.14 to 2025.1.31 (#10370) Bumps [certifi](https://github.com/certifi/python-certifi) from 2024.12.14 to 2025.1.31. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/certifi/python-certifi/commit/088f93122ea7c91cfdaeea7fa76ab2f850b8064d"><code>088f931</code></a> 2025.01.31 (<a href="https://redirect.github.com/certifi/python-certifi/issues/336">#336</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/1c177954a1d9f46efdff5956fe16de88bdcefc34"><code>1c17795</code></a> Bump pypa/gh-action-pypi-publish from 1.12.3 to 1.12.4 (<a href="https://redirect.github.com/certifi/python-certifi/issues/335">#335</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/a2e88f0eb5bab543e97f43dac5d38739bd193bd0"><code>a2e88f0</code></a> Bump actions/upload-artifact from 4.5.0 to 4.6.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/334">#334</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/82284ed1f981c6a3ba4ef9de739cd32918e70a26"><code>82284ed</code></a> Bump peter-evans/create-pull-request from 7.0.5 to 7.0.6 (<a href="https://redirect.github.com/certifi/python-certifi/issues/333">#333</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/10d3d1d86c89e4054ce2c021cf2309af8c26aa57"><code>10d3d1d</code></a> Bump actions/upload-artifact from 4.4.3 to 4.5.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/332">#332</a>)</li> <li>See full diff in <a href="https://github.com/certifi/python-certifi/compare/2024.12.14...2025.01.31">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2024.12.14&new-version=2025.1.31)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0cd05bd7cb8..d47696d8709 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -30,7 +30,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 # via pip-tools -certifi==2024.12.14 +certifi==2025.1.31 # via requests cffi==1.17.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 4fed3549612..c496d9403bc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 # via pip-tools -certifi==2024.12.14 +certifi==2025.1.31 # via requests cffi==1.17.1 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 8aad3bd4045..1a51f608fd4 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx -certifi==2024.12.14 +certifi==2025.1.31 # via requests charset-normalizer==3.4.1 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index d94908fb12e..a6fb751909d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.16.0 # via sphinx -certifi==2024.12.14 +certifi==2025.1.31 # via requests charset-normalizer==3.4.1 # via requests From 3ddced79dadc4e83606408ab3b1d86392019054e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 11:00:48 +0000 Subject: [PATCH 1169/1511] Bump babel from 2.16.0 to 2.17.0 (#10374) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [babel](https://github.com/python-babel/babel) from 2.16.0 to 2.17.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python-babel/babel/releases">babel's releases</a>.</em></p> <blockquote> <h2>v2.17.0</h2> <p>Happy 2025! This release is being made from FOSDEM 2025, in Brussels, Belgium. 🇧🇪</p> <p>Thank you to all contributors, new and old, and here's to another great year of internationalization and localization!</p> <hr /> <p>The changelog below is auto-generated by GitHub.</p> <p>Please see <a href="https://github.com/python-babel/babel/blob/b50a1d2186c20f3359f7e10853d2b2225a46ed40/CHANGES.rst">CHANGELOG.rst</a> for additional details.</p> <hr /> <h2>What's Changed</h2> <ul> <li>Fix deprecation warnings for <code>datetime.utcnow()</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1119">python-babel/babel#1119</a></li> <li>Enclose white spaces in references by <a href="https://github.com/Dunedan"><code>@​Dunedan</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1105">python-babel/babel#1105</a></li> <li>Replace <code>str.index</code> with <code>str.find</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1130">python-babel/babel#1130</a></li> <li>Replace more alternate characters in <code>format_skeleton</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1122">python-babel/babel#1122</a></li> <li>Fix extracted lineno with nested calls by <a href="https://github.com/dylankiss"><code>@​dylankiss</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1126">python-babel/babel#1126</a></li> <li>"Deleted duplicate code in test" by <a href="https://github.com/mattdiaz007"><code>@​mattdiaz007</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1138">python-babel/babel#1138</a></li> <li>Fix of list index out of range error in PoFileParser.add_message when translations is empty by <a href="https://github.com/gabe-sherman"><code>@​gabe-sherman</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1135">python-babel/babel#1135</a></li> <li>Make seconds optional in <code>parse_time</code> time formats by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1141">python-babel/babel#1141</a></li> <li>Mark <code>wraptext</code> deprecated; use <code>TextWrapper</code> directly in <code>write_po</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1140">python-babel/babel#1140</a></li> <li>Fix the way obsolete messages are stored by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1132">python-babel/babel#1132</a></li> <li>Replace <code>OrderedDict</code> with just <code>dict</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1149">python-babel/babel#1149</a></li> <li>Use CLDR 46 by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1145">python-babel/babel#1145</a></li> <li>Update CI to use python 3.13 and Ubuntu 24.04 by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1153">python-babel/babel#1153</a></li> <li>Adjust docs/conf.py to add compatibility with sphinx 8 by <a href="https://github.com/hrnciar"><code>@​hrnciar</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1155">python-babel/babel#1155</a></li> <li>Allow specifying an explicit format in parse_date/parse_time by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1131">python-babel/babel#1131</a></li> <li>Simplify <code>read_mo</code> logic regarding <code>catalog.charset</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1148">python-babel/babel#1148</a></li> <li>Bump CI/tool versions by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1160">python-babel/babel#1160</a></li> <li>fix: check_and_call_extract_file uses the first matching method and options, instead of the first matching method and last matching options by <a href="https://github.com/jpmckinney"><code>@​jpmckinney</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1121">python-babel/babel#1121</a></li> <li>Prevent wrapping file locations containing white space by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1120">python-babel/babel#1120</a></li> <li>Add tzdata as dev dependency and sync with tox.ini by <a href="https://github.com/wandrew004"><code>@​wandrew004</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1159">python-babel/babel#1159</a></li> <li>Support short and narrow formats for format_timedelta when using <code>add_direction</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1163">python-babel/babel#1163</a></li> <li>Improve handling for <code>locale=None</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1164">python-babel/babel#1164</a></li> <li>Use <code>pytest.raises(match=...)</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1166">python-babel/babel#1166</a></li> <li>Strip extra leading slashes in <code>/etc/localtime</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1165">python-babel/babel#1165</a></li> <li>Remove redundant assignment in <code>Catalog.__setitem__</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1167">python-babel/babel#1167</a></li> <li>Small cleanups by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1170">python-babel/babel#1170</a></li> <li>Small test cleanup by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1172">python-babel/babel#1172</a></li> <li>Add <code>Message.python_brace_format</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1169">python-babel/babel#1169</a></li> <li>Import <code>Literal</code> from the typing module by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1175">python-babel/babel#1175</a></li> <li>Prefer LC_MONETARY when formatting currencies by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1173">python-babel/babel#1173</a></li> <li>Fix dates formatting <code>Y</code>, <code>w</code> and <code>W</code> symbols for week-numbering by <a href="https://github.com/jun66j5"><code>@​jun66j5</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1179">python-babel/babel#1179</a></li> <li>Increase test coverage of the <code>python_format</code> checker by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1176">python-babel/babel#1176</a></li> <li>Prepare for 2.17.0 by <a href="https://github.com/akx"><code>@​akx</code></a> in <a href="https://redirect.github.com/python-babel/babel/pull/1182">python-babel/babel#1182</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/Dunedan"><code>@​Dunedan</code></a> made their first contribution in <a href="https://redirect.github.com/python-babel/babel/pull/1105">python-babel/babel#1105</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python-babel/babel/blob/master/CHANGES.rst">babel's changelog</a>.</em></p> <blockquote> <h2>Version 2.17.0</h2> <p>Happy 2025! This release is being made from FOSDEM 2025, in Brussels, Belgium.</p> <p>Thank you to all contributors, new and old, and here's to another great year of internationalization and localization!</p> <p>Features</p> <pre><code> * CLDR: Babel now uses CLDR 46, by @tomasr8 in :gh:`1145` * Dates: Allow specifying an explicit format in parse_date/parse_time by @tomasr8 in :gh:`1131` * Dates: More alternate characters are now supported by `format_skeleton`. By @tomasr8 in :gh:`1122` * Dates: Support short and narrow formats for format_timedelta when using `add_direction`, by @akx in :gh:`1163` * Messages: .po files now enclose white spaces in filenames like GNU gettext does. By @Dunedan in :gh:`1105`, and @tomasr8 in :gh:`1120` * Messages: Initial support for `Message.python_brace_format`, by @tomasr8 in :gh:`1169` * Numbers: LC_MONETARY is now preferred when formatting currencies, by @akx in :gh:`1173` <p>Bugfixes<br /> </code></pre></p> <ul> <li>Dates: Make seconds optional in <code>parse_time</code> time formats by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in :gh:<code>1141</code></li> <li>Dates: Replace <code>str.index</code> with <code>str.find</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in :gh:<code>1130</code></li> <li>Dates: Strip extra leading slashes in <code>/etc/localtime</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1165</code></li> <li>Dates: Week numbering and formatting of dates with week numbers was repaired by <a href="https://github.com/jun66j5"><code>@​jun66j5</code></a> in :gh:<code>1179</code></li> <li>General: Improve handling for <code>locale=None</code> by <a href="https://github.com/akx"><code>@​akx</code></a> in :gh:<code>1164</code></li> <li>General: Remove redundant assignment in <code>Catalog.__setitem__</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in :gh:<code>1167</code></li> <li>Messages: Fix extracted lineno with nested calls, by <a href="https://github.com/dylankiss"><code>@​dylankiss</code></a> in :gh:<code>1126</code></li> <li>Messages: Fix of list index out of range when translations is empty, by <a href="https://github.com/gabe-sherman"><code>@​gabe-sherman</code></a> in :gh:<code>1135</code></li> <li>Messages: Fix the way obsolete messages are stored by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in :gh:<code>1132</code></li> <li>Messages: Simplify <code>read_mo</code> logic regarding <code>catalog.charset</code> by <a href="https://github.com/tomasr8"><code>@​tomasr8</code></a> in :gh:<code>1148</code></li> <li>Messages: Use the first matching method & options, rather than first matching method & last options, by <a href="https://github.com/jpmckinney"><code>@​jpmckinney</code></a> in :gh:<code>1121</code></li> </ul> <p>Deprecation and compatibility</p> <pre><code> * Dates: Fix deprecation warnings for `datetime.utcnow()` by @tomasr8 in :gh:`1119` * Docs: Adjust docs/conf.py to add compatibility with sphinx 8 by @hrnciar in :gh:`1155` * General: Import `Literal` from the typing module by @tomasr8 in :gh:`1175` * General: Replace `OrderedDict` with just `dict` by @tomasr8 in :gh:`1149` * Messages: Mark `wraptext` deprecated; use `TextWrapper` directly in `write_po` by @akx in :gh:`1140` <p>Infrastructure</p> <pre><code> * Add tzdata as dev dependency and sync with tox.ini by @wandrew004 in :gh:`1159` * Duplicate test code was deleted by @mattdiaz007 in :gh:`1138` * Increase test coverage of the `python_format` checker by @tomasr8 in :gh:`1176` * Small cleanups by @akx in :gh:`1160`, :gh:`1166`, :gh:`1170` and :gh:`1172` &lt;/tr&gt;&lt;/table&gt; </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python-babel/babel/commit/b50a1d2186c20f3359f7e10853d2b2225a46ed40"><code>b50a1d2</code></a> Prepare for 2.17.0 (<a href="https://redirect.github.com/python-babel/babel/issues/1182">#1182</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/5f117b2689573aa98acc8a47108c49b99f4d1394"><code>5f117b2</code></a> Increase test coverage of the <code>python_format</code> checker (<a href="https://redirect.github.com/python-babel/babel/issues/1176">#1176</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/363ad7531fb5dcdc3e9844573592b0b44afb914b"><code>363ad75</code></a> Fix dates formatting <code>Y</code>, <code>w</code> and <code>W</code> symbols for week-numbering (<a href="https://redirect.github.com/python-babel/babel/issues/1179">#1179</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/e9c3ef8d0de3080ca59f7f8dbabf9b52983adc7d"><code>e9c3ef8</code></a> Merge pull request <a href="https://redirect.github.com/python-babel/babel/issues/1173">#1173</a> from python-babel/lc-monetary-2</li> <li><a href="https://github.com/python-babel/babel/commit/56ef7c7f578a904917464c187e399abb762bd5e3"><code>56ef7c7</code></a> Prefer LC_MONETARY when formatting currency</li> <li><a href="https://github.com/python-babel/babel/commit/aee6d698b541dc50439280d7e093092cc0d4b832"><code>aee6d69</code></a> <code>default_locale</code>: support multiple keys</li> <li><a href="https://github.com/python-babel/babel/commit/2d8a808864d1aae5d3d02d4f95917c79740c5d35"><code>2d8a808</code></a> Import <code>Literal</code> &amp; <code>TypedDict</code> from the typing module (<a href="https://redirect.github.com/python-babel/babel/issues/1175">#1175</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/98b9562c05e5276038c27ec12c12f3e92dc027b6"><code>98b9562</code></a> Add basic support for <code>Message.python_brace_format</code> (<a href="https://redirect.github.com/python-babel/babel/issues/1169">#1169</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/0c1091c9de9543e30bc4b845eb10b5bf84516d7b"><code>0c1091c</code></a> Small test cleanup (<a href="https://redirect.github.com/python-babel/babel/issues/1172">#1172</a>)</li> <li><a href="https://github.com/python-babel/babel/commit/db4879136a7fbcef475f26b75dbdd65d0ce488f9"><code>db48791</code></a> Merge pull request <a href="https://redirect.github.com/python-babel/babel/issues/1170">#1170</a> from python-babel/small-cleanup</li> <li>Additional commits viewable in <a href="https://github.com/python-babel/babel/compare/v2.16.0...v2.17.0">compare view</a></li> </ul> </details> <br /> </code></pre> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=babel&package-manager=pip&previous-version=2.16.0&new-version=2.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d47696d8709..517f98e6f27 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -24,7 +24,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # valkey attrs==25.1.0 # via -r requirements/runtime-deps.in -babel==2.16.0 +babel==2.17.0 # via sphinx brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index c496d9403bc..26c5779c783 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -24,7 +24,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # valkey attrs==25.1.0 # via -r requirements/runtime-deps.in -babel==2.16.0 +babel==2.17.0 # via sphinx brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 1a51f608fd4..4be7358fe8d 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -8,7 +8,7 @@ aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==1.0.0 # via sphinx -babel==2.16.0 +babel==2.17.0 # via sphinx certifi==2025.1.31 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index a6fb751909d..8324f007e3a 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -8,7 +8,7 @@ aiohttp-theme==0.1.7 # via -r requirements/doc.in alabaster==1.0.0 # via sphinx -babel==2.16.0 +babel==2.17.0 # via sphinx certifi==2025.1.31 # via requests From 822c50457efee0e1f39631a95dd9a26ebd26313e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 13:22:10 +0000 Subject: [PATCH 1170/1511] Bump pytest-codspeed from 3.1.2 to 3.2.0 (#10375) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) from 3.1.2 to 3.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/releases">pytest-codspeed's releases</a>.</em></p> <blockquote> <h2>v3.2.0</h2> <h2>What's Changed</h2> <p>This release improves the reliability of the measurement, primarily focusing on the wall-time instrument.</p> <h3><!-- raw HTML omitted -->🚀 Features</h3> <ul> <li> <p>Increase the min round time to a bigger value (+/- 1ms) by <a href="https://github.com/art049"><code>@​art049</code></a></p> </li> <li> <p>Fix the random seed while measuring with instruments by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/48">#48</a></p> </li> </ul> <h3><!-- raw HTML omitted -->🐛 Bug Fixes</h3> <ul> <li>Use time per iteration instead of total round time in stats by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> <h3><!-- raw HTML omitted -->🏗️ Refactor</h3> <ul> <li>Replace hardcoded outlier factor for improved readability by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/67">#67</a></li> </ul> <h3><!-- raw HTML omitted -->⚙️ Internals</h3> <ul> <li>Fix self-dependency by <a href="https://github.com/adriencaccia"><code>@​adriencaccia</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/66">#66</a></li> <li>Fix uv version in CI by <a href="https://github.com/adriencaccia"><code>@​adriencaccia</code></a></li> <li>Add benchmarks-walltime job to run additional performance benchmarks by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/65">#65</a></li> </ul> <p>⚠️ <em>Since the wall-time instrument is still in beta, this minor release might create unexpected performance changes for this new instrument.</em></p> <p><strong>Full Changelog</strong>: <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.2...v3.2.0">https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.2...v3.2.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/CodSpeedHQ/pytest-codspeed/blob/master/CHANGELOG.md">pytest-codspeed's changelog</a>.</em></p> <blockquote> <h2>[3.2.0] - 2025-01-31</h2> <h3><!-- raw HTML omitted -->🚀 Features</h3> <ul> <li>Increase the min round time to a bigger value (+/- 1ms) by <a href="https://github.com/art049"><code>@​art049</code></a></li> <li>Add benchmarks-walltime job to run additional performance benchmarks by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/65">#65</a></li> <li>Fix the random seed while measuring with instruments by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/48">#48</a></li> </ul> <h3><!-- raw HTML omitted -->🐛 Bug Fixes</h3> <ul> <li>Use time per iteration instead of total round time in stats by <a href="https://github.com/art049"><code>@​art049</code></a></li> </ul> <h3><!-- raw HTML omitted -->🏗️ Refactor</h3> <ul> <li>Replace hardcoded outlier factor for improved readability by <a href="https://github.com/art049"><code>@​art049</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/67">#67</a></li> </ul> <h3><!-- raw HTML omitted -->⚙️ Internals</h3> <ul> <li>Fix self-dependency by <a href="https://github.com/adriencaccia"><code>@​adriencaccia</code></a> in <a href="https://redirect.github.com/CodSpeedHQ/pytest-codspeed/pull/66">#66</a></li> <li>Fix uv version in CI by <a href="https://github.com/adriencaccia"><code>@​adriencaccia</code></a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/253b1bea24e909d257c7650265ff34e22af6a3bc"><code>253b1be</code></a> Release v3.2.0 🚀</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/4333c1f7984a46636b9f1ce4cbd3d2200328eafc"><code>4333c1f</code></a> refactor: replace hardcoded outlier factor for improved readability</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/6e1d63e087b45c30ddce1923e4579c201d0721a9"><code>6e1d63e</code></a> feat: increase the min round time to a bigger value (+/- 1ms)</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/cc10330599216271ff217733fa9a19aed9b45fbc"><code>cc10330</code></a> fix: use time per iteration instead of total round time in stats</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/003c3b4660ce34754990f14b9f949ad0d6a1c46d"><code>003c3b4</code></a> feat(ci): add benchmarks-walltime job to run additional performance benchmarks</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/768b5b83644b53d8fd7d4a21946033ea2e86d110"><code>768b5b8</code></a> feat: fix the random seed while measuring with instruments</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/d13743136c5d37efc15b7ecf8428e33e34509a1e"><code>d137431</code></a> chore: fix self-dependency</li> <li><a href="https://github.com/CodSpeedHQ/pytest-codspeed/commit/9df2c77adc5c7488ef3dc16a22fe076cbcee7c33"><code>9df2c77</code></a> chore: fix uv version in CI</li> <li>See full diff in <a href="https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.1.2...v3.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-codspeed&package-manager=pip&previous-version=3.1.2&new-version=3.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 517f98e6f27..f6f9adbf39d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -168,7 +168,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.2 +pytest-codspeed==3.2.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 26c5779c783..5d87c1ebaba 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -163,7 +163,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.2 +pytest-codspeed==3.2.0 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 821cf156e76..dd06ec7236f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -68,7 +68,7 @@ pytest==8.3.4 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==3.1.2 +pytest-codspeed==3.2.0 # via -r requirements/lint.in pytest-mock==3.14.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 058eb3d0c72..efd44e1ea3f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -90,7 +90,7 @@ pytest==8.3.4 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==3.1.2 +pytest-codspeed==3.2.0 # via -r requirements/test.in pytest-cov==6.0.0 # via -r requirements/test.in From 6709f5308df9caac997488327af658feebdac8c2 Mon Sep 17 00:00:00 2001 From: Jacob Padilla <jp@jacobpadilla.com> Date: Tue, 4 Feb 2025 08:57:57 -0500 Subject: [PATCH 1171/1511] [PR #10371/f46ccff backport][3.12] 'partitioned' parameter for set_cookie method. (#10378) --- CHANGES/9870.misc.rst | 1 + aiohttp/web_response.py | 4 ++++ docs/web_reference.rst | 7 ++++++- tests/test_web_response.py | 14 ++++++++++++++ 4 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 CHANGES/9870.misc.rst diff --git a/CHANGES/9870.misc.rst b/CHANGES/9870.misc.rst new file mode 100644 index 00000000000..caa8f45e522 --- /dev/null +++ b/CHANGES/9870.misc.rst @@ -0,0 +1 @@ +Added support for the ``partitioned`` attribute in the ``set_cookie`` method. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 99636f2de59..a1955ca0d9e 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -233,6 +233,7 @@ def set_cookie( httponly: Optional[bool] = None, version: Optional[str] = None, samesite: Optional[str] = None, + partitioned: Optional[bool] = None, ) -> None: """Set or update response cookie. @@ -269,6 +270,9 @@ def set_cookie( if samesite is not None: c["samesite"] = samesite + if partitioned is not None: + c["partitioned"] = partitioned + def del_cookie( self, name: str, diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 51f5c43d7e7..62edd4c24aa 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -726,7 +726,7 @@ and :ref:`aiohttp-web-signals` handlers:: .. method:: set_cookie(name, value, *, path='/', expires=None, \ domain=None, max_age=None, \ secure=None, httponly=None, version=None, \ - samesite=None) + samesite=None, partitioned=None) Convenient way for setting :attr:`cookies`, allows to specify some additional properties like *max_age* in a single call. @@ -779,6 +779,11 @@ and :ref:`aiohttp-web-signals` handlers:: .. versionadded:: 3.7 + :param bool partitioned: ``True`` to set a partitioned cookie. + Available in Python 3.14+. (optional) + + .. versionadded:: 3.12 + .. warning:: In HTTP version 1.1, ``expires`` was deprecated and replaced with diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 6eb52d480ff..0a2c5273080 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -3,6 +3,7 @@ import gzip import io import json +import sys import zlib from concurrent.futures import ThreadPoolExecutor from typing import AsyncIterator, Optional @@ -900,6 +901,19 @@ def test_response_cookies() -> None: assert str(resp.cookies) == expected +@pytest.mark.skipif(sys.version_info < (3, 14), reason="No partitioned support") +def test_response_cookie_partitioned() -> None: + resp = StreamResponse() + + assert resp.cookies == {} + + resp.set_cookie("name", "value", partitioned=False) + assert str(resp.cookies) == "Set-Cookie: name=value; Path=/" + + resp.set_cookie("name", "value", partitioned=True) + assert str(resp.cookies) == "Set-Cookie: name=value; Partitioned; Path=/" + + def test_response_cookie_path() -> None: resp = StreamResponse() From e38ea3242fa2bb1987c4e4da4c9bbbd1b3321149 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 4 Feb 2025 20:04:08 -0600 Subject: [PATCH 1172/1511] [3.11] Bump packaging to 24.2 (#10383) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1e7c0bbe6c1..d79bdab3893 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -30,7 +30,7 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl -packaging==24.1 +packaging==24.2 # via gunicorn propcache==0.2.0 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 740e3e2d559..041a3737ab0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -129,7 +129,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via # build # gunicorn diff --git a/requirements/dev.txt b/requirements/dev.txt index 72e49ed9edf..a99644dff81 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -122,7 +122,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via # build # gunicorn diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 892ae6b164c..43b3822706e 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -34,7 +34,7 @@ jinja2==3.1.4 # towncrier markupsafe==2.1.5 # via jinja2 -packaging==24.1 +packaging==24.2 # via sphinx pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/doc.txt b/requirements/doc.txt index f7f98330e1f..6ddfc47455b 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -34,7 +34,7 @@ jinja2==3.1.4 # towncrier markupsafe==2.1.5 # via jinja2 -packaging==24.1 +packaging==24.2 # via sphinx pygments==2.18.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index d7d97277bce..e2547d13da5 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -55,7 +55,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via pytest platformdirs==4.3.6 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 33510f18682..cf81a7bf257 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -70,7 +70,7 @@ mypy==1.11.2 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy -packaging==24.1 +packaging==24.2 # via # gunicorn # pytest From 65248a877f11c2035982150a118fc6ab8fdea6cb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 02:22:39 +0000 Subject: [PATCH 1173/1511] [PR #10366/3fb2c8df backport][3.11] Fix sdist make (#10367) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Michał Górny <mgorny@gentoo.org> Co-authored-by: J. Nick Koston <nick@koston.org> fixes related to use of source distribution to build `aiohttp`: --- CHANGES/10366.packaging | 2 ++ MANIFEST.in | 1 + Makefile | 3 +++ tools/gen.py | 2 +- 4 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10366.packaging diff --git a/CHANGES/10366.packaging b/CHANGES/10366.packaging new file mode 100644 index 00000000000..f0e9f62095a --- /dev/null +++ b/CHANGES/10366.packaging @@ -0,0 +1,2 @@ +Added missing files to the source distribution to fix ``Makefile`` targets. +Added a ``cythonize-nodeps`` target to run Cython without invoking pip to install dependencies. diff --git a/MANIFEST.in b/MANIFEST.in index d7c5cef6aad..64cee139a1f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,6 +7,7 @@ graft aiohttp graft docs graft examples graft tests +graft tools graft requirements recursive-include vendor * global-include aiohttp *.pyi diff --git a/Makefile b/Makefile index b0a3ef3226b..c6193fea9e4 100644 --- a/Makefile +++ b/Makefile @@ -81,6 +81,9 @@ generate-llhttp: .llhttp-gen .PHONY: cythonize cythonize: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c +.PHONY: cythonize-nodeps +cythonize-nodeps: $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c + .install-deps: .install-cython $(PYXS:.pyx=.c) aiohttp/_websocket/reader_c.c $(call to-hash,$(CYS) $(REQS)) @python -m pip install -r requirements/dev.in -c requirements/dev.txt @touch .install-deps diff --git a/tools/gen.py b/tools/gen.py index ab2b39a2df0..24fb71bdd9d 100755 --- a/tools/gen.py +++ b/tools/gen.py @@ -7,7 +7,7 @@ import multidict ROOT = pathlib.Path.cwd() -while ROOT.parent != ROOT and not (ROOT / ".git").exists(): +while ROOT.parent != ROOT and not (ROOT / "pyproject.toml").exists(): ROOT = ROOT.parent From b752e7910770014a619c6b8cc5629241d560550e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 02:27:45 +0000 Subject: [PATCH 1174/1511] [PR #10330/d3dc087b backport][3.11] Start building riscv64 platform wheels in CI/CD (#10353) Co-authored-by: E Shattow <eshattow@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 ++ CHANGES/10330.packaging.rst | 1 + 2 files changed, 3 insertions(+) create mode 100644 CHANGES/10330.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 93d1ddded65..d471f5658f6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -355,6 +355,8 @@ jobs: qemu: aarch64 - os: ubuntu qemu: ppc64le + - os: ubuntu + qemu: riscv64 - os: ubuntu qemu: s390x steps: diff --git a/CHANGES/10330.packaging.rst b/CHANGES/10330.packaging.rst new file mode 100644 index 00000000000..c159cf3a57d --- /dev/null +++ b/CHANGES/10330.packaging.rst @@ -0,0 +1 @@ +Started publishing ``riscv64`` wheels -- by :user:`eshattow`. From c60a8f455f08865f461668fdfbca26e7b3744064 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 17:37:46 +0000 Subject: [PATCH 1175/1511] [PR #10387/95b28c71 backport][3.11] Restore total_bytes to EmptyStreamReader (#10388) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #10386 --- CHANGES/10387.bugfix.rst | 1 + aiohttp/streams.py | 1 + tests/test_streams.py | 1 + 3 files changed, 3 insertions(+) create mode 100644 CHANGES/10387.bugfix.rst diff --git a/CHANGES/10387.bugfix.rst b/CHANGES/10387.bugfix.rst new file mode 100644 index 00000000000..ad1ead9e363 --- /dev/null +++ b/CHANGES/10387.bugfix.rst @@ -0,0 +1 @@ +Restored the missing ``total_bytes`` attribute to ``EmptyStreamReader`` -- by :user:`bdraco`. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 6126fb5695d..7a3f64d1289 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -555,6 +555,7 @@ class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] def __init__(self) -> None: self._read_eof_chunk = False + self.total_bytes = 0 def __repr__(self) -> str: return "<%s>" % self.__class__.__name__ diff --git a/tests/test_streams.py b/tests/test_streams.py index fcf13a91eb3..1b65f771c77 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1141,6 +1141,7 @@ async def test_empty_stream_reader() -> None: with pytest.raises(asyncio.IncompleteReadError): await s.readexactly(10) assert s.read_nowait() == b"" + assert s.total_bytes == 0 async def test_empty_stream_reader_iter_chunks() -> None: From 4cc98a343e871536032f28ee4098b30bfef154fd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 11:42:53 -0600 Subject: [PATCH 1176/1511] [PR #10137/25c7f23 backport][3.11] Restore zero copy writes on Python 3.12.9+/3.13.2+ (#10390) --- .github/workflows/ci-cd.yml | 4 +- CHANGES/10137.misc.rst | 3 + aiohttp/http_writer.py | 17 +++++- tests/test_http_writer.py | 111 +++++++++++++++++++++++++++++++++++- 4 files changed, 130 insertions(+), 5 deletions(-) create mode 100644 CHANGES/10137.misc.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d471f5658f6..1a95adf975f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -250,11 +250,11 @@ jobs: uses: actions/checkout@v4 with: submodules: true - - name: Setup Python 3.13 + - name: Setup Python 3.13.2 id: python-install uses: actions/setup-python@v5 with: - python-version: 3.13 + python-version: 3.13.2 cache: pip cache-dependency-path: requirements/*.txt - name: Update pip, wheel, setuptools, build, twine diff --git a/CHANGES/10137.misc.rst b/CHANGES/10137.misc.rst new file mode 100644 index 00000000000..43b19c33f32 --- /dev/null +++ b/CHANGES/10137.misc.rst @@ -0,0 +1,3 @@ +Restored support for zero copy writes when using Python 3.12 versions 3.12.9 and later or Python 3.13.2+ -- by :user:`bdraco`. + +Zero copy writes were previously disabled due to :cve:`2024-12254` which is resolved in these Python versions. diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index 28b14f7a791..e031a97708d 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -1,6 +1,7 @@ """Http related parsers and protocol.""" import asyncio +import sys import zlib from typing import ( # noqa Any, @@ -24,6 +25,17 @@ __all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") +MIN_PAYLOAD_FOR_WRITELINES = 2048 +IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2) +IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9) +SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9 +# writelines is not safe for use +# on Python 3.12+ until 3.12.9 +# on Python 3.13+ until 3.13.2 +# and on older versions it not any faster than write +# CVE-2024-12254: https://github.com/python/cpython/pull/127656 + + class HttpVersion(NamedTuple): major: int minor: int @@ -90,7 +102,10 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport = self._protocol.transport if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") - transport.write(b"".join(chunks)) + if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES: + transport.write(b"".join(chunks)) + else: + transport.writelines(chunks) async def write( self, diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 5f316fad2f7..677b5bc9678 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -2,7 +2,7 @@ import array import asyncio import zlib -from typing import Iterable +from typing import Generator, Iterable from unittest import mock import pytest @@ -14,7 +14,19 @@ @pytest.fixture -def buf(): +def enable_writelines() -> Generator[None, None, None]: + with mock.patch("aiohttp.http_writer.SKIP_WRITELINES", False): + yield + + +@pytest.fixture +def force_writelines_small_payloads() -> Generator[None, None, None]: + with mock.patch("aiohttp.http_writer.MIN_PAYLOAD_FOR_WRITELINES", 1): + yield + + +@pytest.fixture +def buf() -> bytearray: return bytearray() @@ -117,6 +129,33 @@ async def test_write_large_payload_deflate_compression_data_in_eof( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("enable_writelines") +async def test_write_large_payload_deflate_compression_data_in_eof_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + + await msg.write(b"data" * 4096) + assert transport.write.called # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + transport.write.reset_mock() # type: ignore[attr-defined] + assert not transport.writelines.called # type: ignore[attr-defined] + + # This payload compresses to 20447 bytes + payload = b"".join( + [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] + ) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + assert transport.writelines.called # type: ignore[attr-defined] + chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + content = b"".join(chunks) + assert zlib.decompress(content) == (b"data" * 4096) + payload + + async def test_write_payload_chunked_filter( protocol: BaseProtocol, transport: asyncio.Transport, @@ -185,6 +224,26 @@ async def test_write_payload_deflate_compression_chunked( assert content == expected +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_payload_deflate_compression_chunked_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + expected = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof() + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert content == expected + + async def test_write_payload_deflate_and_chunked( buf: bytearray, protocol: BaseProtocol, @@ -221,6 +280,26 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof( assert content == expected +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + expected = b"2\r\nx\x9c\r\nd\r\nKI,IL\xcdK\x01\x00\x0b@\x02\xd2\r\n0\r\n\r\n" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof(b"end") + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert content == expected + + async def test_write_large_payload_deflate_compression_chunked_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, @@ -247,6 +326,34 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_large_payload_deflate_compression_chunked_data_in_eof_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + + await msg.write(b"data" * 4096) + # This payload compresses to 1111 bytes + payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + + chunks = [] + for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] + chunked_payload = list(write_lines_call[1][0])[1:] + chunked_payload.pop() + chunks.extend(chunked_payload) + + assert all(chunks) + content = b"".join(chunks) + assert zlib.decompress(content) == (b"data" * 4096) + payload + + async def test_write_payload_deflate_compression_chunked_connection_lost( protocol: BaseProtocol, transport: asyncio.Transport, From 1aa8579f05cd756185985f42de7d47f625aee2a1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 11:47:48 -0600 Subject: [PATCH 1177/1511] [PR #10137/25c7f23 backport][3.12] Restore zero copy writes on Python 3.12.9+/3.13.2+ (#10391) --- .github/workflows/ci-cd.yml | 4 +- CHANGES/10137.misc.rst | 3 + aiohttp/http_writer.py | 17 +++++- tests/test_http_writer.py | 111 +++++++++++++++++++++++++++++++++++- 4 files changed, 130 insertions(+), 5 deletions(-) create mode 100644 CHANGES/10137.misc.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d471f5658f6..1a95adf975f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -250,11 +250,11 @@ jobs: uses: actions/checkout@v4 with: submodules: true - - name: Setup Python 3.13 + - name: Setup Python 3.13.2 id: python-install uses: actions/setup-python@v5 with: - python-version: 3.13 + python-version: 3.13.2 cache: pip cache-dependency-path: requirements/*.txt - name: Update pip, wheel, setuptools, build, twine diff --git a/CHANGES/10137.misc.rst b/CHANGES/10137.misc.rst new file mode 100644 index 00000000000..43b19c33f32 --- /dev/null +++ b/CHANGES/10137.misc.rst @@ -0,0 +1,3 @@ +Restored support for zero copy writes when using Python 3.12 versions 3.12.9 and later or Python 3.13.2+ -- by :user:`bdraco`. + +Zero copy writes were previously disabled due to :cve:`2024-12254` which is resolved in these Python versions. diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index 28b14f7a791..e031a97708d 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -1,6 +1,7 @@ """Http related parsers and protocol.""" import asyncio +import sys import zlib from typing import ( # noqa Any, @@ -24,6 +25,17 @@ __all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") +MIN_PAYLOAD_FOR_WRITELINES = 2048 +IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2) +IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9) +SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9 +# writelines is not safe for use +# on Python 3.12+ until 3.12.9 +# on Python 3.13+ until 3.13.2 +# and on older versions it not any faster than write +# CVE-2024-12254: https://github.com/python/cpython/pull/127656 + + class HttpVersion(NamedTuple): major: int minor: int @@ -90,7 +102,10 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport = self._protocol.transport if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") - transport.write(b"".join(chunks)) + if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES: + transport.write(b"".join(chunks)) + else: + transport.writelines(chunks) async def write( self, diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 5f316fad2f7..677b5bc9678 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -2,7 +2,7 @@ import array import asyncio import zlib -from typing import Iterable +from typing import Generator, Iterable from unittest import mock import pytest @@ -14,7 +14,19 @@ @pytest.fixture -def buf(): +def enable_writelines() -> Generator[None, None, None]: + with mock.patch("aiohttp.http_writer.SKIP_WRITELINES", False): + yield + + +@pytest.fixture +def force_writelines_small_payloads() -> Generator[None, None, None]: + with mock.patch("aiohttp.http_writer.MIN_PAYLOAD_FOR_WRITELINES", 1): + yield + + +@pytest.fixture +def buf() -> bytearray: return bytearray() @@ -117,6 +129,33 @@ async def test_write_large_payload_deflate_compression_data_in_eof( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("enable_writelines") +async def test_write_large_payload_deflate_compression_data_in_eof_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + + await msg.write(b"data" * 4096) + assert transport.write.called # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + transport.write.reset_mock() # type: ignore[attr-defined] + assert not transport.writelines.called # type: ignore[attr-defined] + + # This payload compresses to 20447 bytes + payload = b"".join( + [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] + ) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + assert transport.writelines.called # type: ignore[attr-defined] + chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + content = b"".join(chunks) + assert zlib.decompress(content) == (b"data" * 4096) + payload + + async def test_write_payload_chunked_filter( protocol: BaseProtocol, transport: asyncio.Transport, @@ -185,6 +224,26 @@ async def test_write_payload_deflate_compression_chunked( assert content == expected +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_payload_deflate_compression_chunked_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + expected = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof() + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert content == expected + + async def test_write_payload_deflate_and_chunked( buf: bytearray, protocol: BaseProtocol, @@ -221,6 +280,26 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof( assert content == expected +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + expected = b"2\r\nx\x9c\r\nd\r\nKI,IL\xcdK\x01\x00\x0b@\x02\xd2\r\n0\r\n\r\n" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof(b"end") + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert content == expected + + async def test_write_large_payload_deflate_compression_chunked_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, @@ -247,6 +326,34 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_large_payload_deflate_compression_chunked_data_in_eof_writelines( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + + await msg.write(b"data" * 4096) + # This payload compresses to 1111 bytes + payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + + chunks = [] + for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] + chunked_payload = list(write_lines_call[1][0])[1:] + chunked_payload.pop() + chunks.extend(chunked_payload) + + assert all(chunks) + content = b"".join(chunks) + assert zlib.decompress(content) == (b"data" * 4096) + payload + + async def test_write_payload_deflate_compression_chunked_connection_lost( protocol: BaseProtocol, transport: asyncio.Transport, From 7f7afba1cdb9ce708cd8f22e31d384ffd471720d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 18:04:22 +0000 Subject: [PATCH 1178/1511] [PR #10387/95b28c71 backport][3.12] Restore total_bytes to EmptyStreamReader (#10389) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #10386 --- CHANGES/10387.bugfix.rst | 1 + aiohttp/streams.py | 1 + tests/test_streams.py | 1 + 3 files changed, 3 insertions(+) create mode 100644 CHANGES/10387.bugfix.rst diff --git a/CHANGES/10387.bugfix.rst b/CHANGES/10387.bugfix.rst new file mode 100644 index 00000000000..ad1ead9e363 --- /dev/null +++ b/CHANGES/10387.bugfix.rst @@ -0,0 +1 @@ +Restored the missing ``total_bytes`` attribute to ``EmptyStreamReader`` -- by :user:`bdraco`. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 6126fb5695d..7a3f64d1289 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -555,6 +555,7 @@ class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] def __init__(self) -> None: self._read_eof_chunk = False + self.total_bytes = 0 def __repr__(self) -> str: return "<%s>" % self.__class__.__name__ diff --git a/tests/test_streams.py b/tests/test_streams.py index fcf13a91eb3..1b65f771c77 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1141,6 +1141,7 @@ async def test_empty_stream_reader() -> None: with pytest.raises(asyncio.IncompleteReadError): await s.readexactly(10) assert s.read_nowait() == b"" + assert s.total_bytes == 0 async def test_empty_stream_reader_iter_chunks() -> None: From aed8222e6dfdfed17eb04ff053ce3859f4ee8e15 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 12:30:12 -0600 Subject: [PATCH 1179/1511] Release 3.11.12 (#10392) --- CHANGES.rst | 107 ++++++++++++++++++++++++++++++++++++ CHANGES/10137.misc.rst | 3 - CHANGES/10270.bugfix.rst | 2 - CHANGES/10281.contrib.rst | 1 - CHANGES/10300.feature.rst | 2 - CHANGES/10304.doc.rst | 1 - CHANGES/10330.packaging.rst | 1 - CHANGES/10332.feature.rst | 1 - CHANGES/10366.packaging | 2 - CHANGES/10387.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 11 files changed, 108 insertions(+), 15 deletions(-) delete mode 100644 CHANGES/10137.misc.rst delete mode 100644 CHANGES/10270.bugfix.rst delete mode 100644 CHANGES/10281.contrib.rst delete mode 100644 CHANGES/10300.feature.rst delete mode 100644 CHANGES/10304.doc.rst delete mode 100644 CHANGES/10330.packaging.rst delete mode 100644 CHANGES/10332.feature.rst delete mode 100644 CHANGES/10366.packaging delete mode 100644 CHANGES/10387.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index b07cec6a093..7a464aed9eb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,113 @@ .. towncrier release notes start +3.11.12 (2025-02-05) +==================== + +Bug fixes +--------- + +- ``MultipartForm.decode()`` now follows RFC1341 7.2.1 with a ``CRLF`` after the boundary + -- by :user:`imnotjames`. + + + *Related issues and pull requests on GitHub:* + :issue:`10270`. + + + +- Restored the missing ``total_bytes`` attribute to ``EmptyStreamReader`` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10387`. + + + + +Features +-------- + +- Updated :py:func:`~aiohttp.request` to make it accept ``_RequestOptions`` kwargs. + -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`10300`. + + + +- Improved logging of HTTP protocol errors to include the remote address -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10332`. + + + + +Improved documentation +---------------------- + +- Added ``aiohttp-openmetrics`` to list of third-party libraries -- by :user:`jelmer`. + + + *Related issues and pull requests on GitHub:* + :issue:`10304`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Started publishing ``riscv64`` wheels -- by :user:`eshattow`. + + + *Related issues and pull requests on GitHub:* + :issue:`10330`. + + + +- Added missing files to the source distribution to fix ``Makefile`` targets. + Added a ``cythonize-nodeps`` target to run Cython without invoking pip to install dependencies. + + + *Related issues and pull requests on GitHub:* + :issue:`10366`. + + + + +Contributor-facing changes +-------------------------- + +- The CI/CD workflow has been updated to use `upload-artifact` v4 and `download-artifact` v4 GitHub Actions -- by :user:`silamon`. + + + *Related issues and pull requests on GitHub:* + :issue:`10281`. + + + + +Miscellaneous internal changes +------------------------------ + +- Restored support for zero copy writes when using Python 3.12 versions 3.12.9 and later or Python 3.13.2+ -- by :user:`bdraco`. + + Zero copy writes were previously disabled due to :cve:`2024-12254` which is resolved in these Python versions. + + + *Related issues and pull requests on GitHub:* + :issue:`10137`. + + + + +---- + + 3.11.11 (2024-12-18) ==================== diff --git a/CHANGES/10137.misc.rst b/CHANGES/10137.misc.rst deleted file mode 100644 index 43b19c33f32..00000000000 --- a/CHANGES/10137.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Restored support for zero copy writes when using Python 3.12 versions 3.12.9 and later or Python 3.13.2+ -- by :user:`bdraco`. - -Zero copy writes were previously disabled due to :cve:`2024-12254` which is resolved in these Python versions. diff --git a/CHANGES/10270.bugfix.rst b/CHANGES/10270.bugfix.rst deleted file mode 100644 index e3252464dc8..00000000000 --- a/CHANGES/10270.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -``MultipartForm.decode()`` must follow RFC1341 7.2.1 with a ``CRLF`` after the boundary --- by :user:`imnotjames`. diff --git a/CHANGES/10281.contrib.rst b/CHANGES/10281.contrib.rst deleted file mode 100644 index b50b4d2f955..00000000000 --- a/CHANGES/10281.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -The CI/CD workflow has been updated to use `upload-artifact` v4 and `download-artifact` v4 GitHub Actions -- by :user:`silamon`. diff --git a/CHANGES/10300.feature.rst b/CHANGES/10300.feature.rst deleted file mode 100644 index 3632c3d41a7..00000000000 --- a/CHANGES/10300.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update :py:func:`~aiohttp.request` to make it accept ``_RequestOptions`` kwargs. --- by :user:`Cycloctane`. diff --git a/CHANGES/10304.doc.rst b/CHANGES/10304.doc.rst deleted file mode 100644 index cedac3ef881..00000000000 --- a/CHANGES/10304.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``aiohttp-openmetrics`` to list of third-party libraries -- by :user:`jelmer`. diff --git a/CHANGES/10330.packaging.rst b/CHANGES/10330.packaging.rst deleted file mode 100644 index c159cf3a57d..00000000000 --- a/CHANGES/10330.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Started publishing ``riscv64`` wheels -- by :user:`eshattow`. diff --git a/CHANGES/10332.feature.rst b/CHANGES/10332.feature.rst deleted file mode 100644 index e5c84adf50d..00000000000 --- a/CHANGES/10332.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improved logging of HTTP protocol errors to include the remote address -- by :user:`bdraco`. diff --git a/CHANGES/10366.packaging b/CHANGES/10366.packaging deleted file mode 100644 index f0e9f62095a..00000000000 --- a/CHANGES/10366.packaging +++ /dev/null @@ -1,2 +0,0 @@ -Added missing files to the source distribution to fix ``Makefile`` targets. -Added a ``cythonize-nodeps`` target to run Cython without invoking pip to install dependencies. diff --git a/CHANGES/10387.bugfix.rst b/CHANGES/10387.bugfix.rst deleted file mode 100644 index ad1ead9e363..00000000000 --- a/CHANGES/10387.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Restored the missing ``total_bytes`` attribute to ``EmptyStreamReader`` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 0c3c65fdbe1..4bafa848287 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.12.dev0" +__version__ = "3.11.12" from typing import TYPE_CHECKING, Tuple From 2a3111d2b08e86ccaebf86c8228ec4ed5f4943c9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 19:26:31 +0000 Subject: [PATCH 1180/1511] [PR #10393/9057364b backport][3.11] Revert "Start building riscv64 platform wheels in CI/CD" (#10394) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 1a95adf975f..0bd2a07f65e 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -355,8 +355,6 @@ jobs: qemu: aarch64 - os: ubuntu qemu: ppc64le - - os: ubuntu - qemu: riscv64 - os: ubuntu qemu: s390x steps: From 22237766eb917860a0f981fc43ce024f4b94584c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 13:26:46 -0600 Subject: [PATCH 1181/1511] [3.11] Amend changes to remove reverted riscv64 wheels (#10397) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/aio-libs/aiohttp/pull/10393 Note that the release failed so this was never published <img width="464" alt="Screenshot 2025-02-05 at 1 26 20 PM" src="https://github.com/user-attachments/assets/7e21a6ef-abc0-430f-afc8-99e2e4256276" /> --- CHANGES.rst | 8 -------- 1 file changed, 8 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7a464aed9eb..5775d1041cc 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -70,14 +70,6 @@ Improved documentation Packaging updates and notes for downstreams ------------------------------------------- -- Started publishing ``riscv64`` wheels -- by :user:`eshattow`. - - - *Related issues and pull requests on GitHub:* - :issue:`10330`. - - - - Added missing files to the source distribution to fix ``Makefile`` targets. Added a ``cythonize-nodeps`` target to run Cython without invoking pip to install dependencies. From bc4f5cf0b05356b4634da850f7496d8cb1f56b23 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 19:29:05 +0000 Subject: [PATCH 1182/1511] [PR #10393/9057364b backport][3.12] Revert "Start building riscv64 platform wheels in CI/CD" (#10395) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 -- CHANGES/10330.packaging.rst | 1 - 2 files changed, 3 deletions(-) delete mode 100644 CHANGES/10330.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 1a95adf975f..0bd2a07f65e 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -355,8 +355,6 @@ jobs: qemu: aarch64 - os: ubuntu qemu: ppc64le - - os: ubuntu - qemu: riscv64 - os: ubuntu qemu: s390x steps: diff --git a/CHANGES/10330.packaging.rst b/CHANGES/10330.packaging.rst deleted file mode 100644 index c159cf3a57d..00000000000 --- a/CHANGES/10330.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Started publishing ``riscv64`` wheels -- by :user:`eshattow`. From b53c5f958bbe8e14eebe65031e32ae5f3c23ea72 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 13:34:44 -0600 Subject: [PATCH 1183/1511] [PR #10396/481a8374 backport][3.11] Switch to native arm runners for wheel builds (#10398) **This is a backport of PR #10396 as merged into master (481a8374a6fa7b75269f6e523348b2ce7cb498b6).** Not a user facing change as we should get the same wheels in the end, however it should build a bit faster since its using native runners and musl is now split up Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 0bd2a07f65e..b4c65de98c2 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -347,16 +347,27 @@ jobs: needs: pre-deploy strategy: matrix: - os: [ubuntu, windows, macos] + os: [ubuntu, windows, macos, "ubuntu-24.04-arm"] qemu: [''] + musl: [""] include: - # Split ubuntu job for the sake of speed-up + # Split ubuntu/musl jobs for the sake of speed-up - os: ubuntu - qemu: aarch64 + qemu: ppc64le + musl: "" + - os: ubuntu + qemu: s390x + musl: "" - os: ubuntu qemu: ppc64le + musl: musllinux - os: ubuntu qemu: s390x + musl: musllinux + - os: ubuntu + musl: musllinux + - os: ubuntu-24.04-arm + musl: musllinux steps: - name: Checkout uses: actions/checkout@v4 @@ -398,12 +409,13 @@ jobs: - name: Build wheels uses: pypa/cibuildwheel@v2.22.0 env: + CIBW_SKIP: ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - name: Upload wheels uses: actions/upload-artifact@v4 with: name: >- - dist-${{ matrix.os }}-${{ + dist-${{ matrix.os }}-${{ matrix.musl }}-${{ matrix.qemu && matrix.qemu || 'native' From 26692da3e79e21bf16c0b63972b097643e5bf8ea Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 13:34:53 -0600 Subject: [PATCH 1184/1511] [PR #10396/481a8374 backport][3.12] Switch to native arm runners for wheel builds (#10399) **This is a backport of PR #10396 as merged into master (481a8374a6fa7b75269f6e523348b2ce7cb498b6).** Not a user facing change as we should get the same wheels in the end, however it should build a bit faster since its using native runners and musl is now split up Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 0bd2a07f65e..b4c65de98c2 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -347,16 +347,27 @@ jobs: needs: pre-deploy strategy: matrix: - os: [ubuntu, windows, macos] + os: [ubuntu, windows, macos, "ubuntu-24.04-arm"] qemu: [''] + musl: [""] include: - # Split ubuntu job for the sake of speed-up + # Split ubuntu/musl jobs for the sake of speed-up - os: ubuntu - qemu: aarch64 + qemu: ppc64le + musl: "" + - os: ubuntu + qemu: s390x + musl: "" - os: ubuntu qemu: ppc64le + musl: musllinux - os: ubuntu qemu: s390x + musl: musllinux + - os: ubuntu + musl: musllinux + - os: ubuntu-24.04-arm + musl: musllinux steps: - name: Checkout uses: actions/checkout@v4 @@ -398,12 +409,13 @@ jobs: - name: Build wheels uses: pypa/cibuildwheel@v2.22.0 env: + CIBW_SKIP: ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - name: Upload wheels uses: actions/upload-artifact@v4 with: name: >- - dist-${{ matrix.os }}-${{ + dist-${{ matrix.os }}-${{ matrix.musl }}-${{ matrix.qemu && matrix.qemu || 'native' From 785261c834ee227241b823a2669d3a727032724d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 14:18:46 -0600 Subject: [PATCH 1185/1511] [PR #10400/9b33be3 backport][3.12] Add workaround for segfaults during wheel builds (#10402) (cherry picked from commit 9b33be33d169f19842ae0a0f537163625fe3af77) --- .github/workflows/ci-cd.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b4c65de98c2..2299037231e 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -342,7 +342,7 @@ jobs: path: dist build-wheels: - name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} + name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} ${{ matrix.musl }} runs-on: ${{ matrix.os }}-latest needs: pre-deploy strategy: @@ -378,6 +378,10 @@ jobs: uses: docker/setup-qemu-action@v3 with: platforms: all + # This should be temporary + # xref https://github.com/docker/setup-qemu-action/issues/188 + # xref https://github.com/tonistiigi/binfmt/issues/215 + image: tonistiigi/binfmt:qemu-v8.1.5 id: qemu - name: Prepare emulation run: | From 5f1cd3b198188d7b5ef4745b8c8ca87a0a293a26 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 14:18:52 -0600 Subject: [PATCH 1186/1511] [PR #10400/9b33be3 backport][3.11] Add workaround for segfaults during wheel builds (#10401) (cherry picked from commit 9b33be33d169f19842ae0a0f537163625fe3af77) --- .github/workflows/ci-cd.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b4c65de98c2..2299037231e 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -342,7 +342,7 @@ jobs: path: dist build-wheels: - name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} + name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} ${{ matrix.musl }} runs-on: ${{ matrix.os }}-latest needs: pre-deploy strategy: @@ -378,6 +378,10 @@ jobs: uses: docker/setup-qemu-action@v3 with: platforms: all + # This should be temporary + # xref https://github.com/docker/setup-qemu-action/issues/188 + # xref https://github.com/tonistiigi/binfmt/issues/215 + image: tonistiigi/binfmt:qemu-v8.1.5 id: qemu - name: Prepare emulation run: | From 864bf5a263141d041911f26a012d16dbfdf8af51 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 14:58:29 -0600 Subject: [PATCH 1187/1511] [PR #10403/908145c9 backport][3.11] Disable wheel builds on PyPy (#10405) **This is a backport of PR #10403 as merged into master (908145c97546afb717807d3bcad1f63110fdaa4a).** fixes failure seen in https://github.com/aio-libs/aiohttp/actions/runs/13165870727 When I split these, I forgot to turn these off as we have never tried to build extensions on PyPy Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2299037231e..c056f67ac49 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -413,7 +413,7 @@ jobs: - name: Build wheels uses: pypa/cibuildwheel@v2.22.0 env: - CIBW_SKIP: ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} + CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - name: Upload wheels uses: actions/upload-artifact@v4 From a45f26791c3570ffb6acc5f265a90c8bd02ca1f0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 14:58:36 -0600 Subject: [PATCH 1188/1511] [PR #10403/908145c9 backport][3.12] Disable wheel builds on PyPy (#10406) **This is a backport of PR #10403 as merged into master (908145c97546afb717807d3bcad1f63110fdaa4a).** fixes failure seen in https://github.com/aio-libs/aiohttp/actions/runs/13165870727 When I split these, I forgot to turn these off as we have never tried to build extensions on PyPy Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2299037231e..c056f67ac49 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -413,7 +413,7 @@ jobs: - name: Build wheels uses: pypa/cibuildwheel@v2.22.0 env: - CIBW_SKIP: ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} + CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - name: Upload wheels uses: actions/upload-artifact@v4 From 43a36ee8b90cbafd136a59d5f9a73a3f6945d31e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 15:05:11 -0600 Subject: [PATCH 1189/1511] [PR #10404/6ee81df6 backport][3.11] Start build wheels on armv7l musllinux (#10407) **This is a backport of PR #10404 as merged into master (6ee81df6dd2f50e3df8dc2aafcc4cab035c71b9a).** I realized we were missing these. While I'm fixing up the wheel builds I figured I'd add these as well Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 5 ++++- CHANGES/10404.packaging.rst | 1 + docs/spelling_wordlist.txt | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10404.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index c056f67ac49..e9cc3608d42 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -359,7 +359,10 @@ jobs: qemu: s390x musl: "" - os: ubuntu - qemu: ppc64le + qemu: armv7l + musl: musllinux + - os: ubuntu + qemu: s390x musl: musllinux - os: ubuntu qemu: s390x diff --git a/CHANGES/10404.packaging.rst b/CHANGES/10404.packaging.rst new file mode 100644 index 00000000000..e27ca91989f --- /dev/null +++ b/CHANGES/10404.packaging.rst @@ -0,0 +1 @@ +Started building armv7l musllinux wheels -- by :user:`bdraco`. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 3e41af824e4..59ea99c40bb 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -14,6 +14,7 @@ app’s apps arg args +armv Arsenic async asyncio @@ -201,6 +202,7 @@ multidicts Multidicts multipart Multipart +musllinux mypy Nagle Nagle’s From 0a51d1dab1bb5791fb34b5180045c6b93b551eea Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 15:05:23 -0600 Subject: [PATCH 1190/1511] [PR #10404/6ee81df6 backport][3.12] Start build wheels on armv7l musllinux (#10408) **This is a backport of PR #10404 as merged into master (6ee81df6dd2f50e3df8dc2aafcc4cab035c71b9a).** I realized we were missing these. While I'm fixing up the wheel builds I figured I'd add these as well Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 5 ++++- CHANGES/10404.packaging.rst | 1 + docs/spelling_wordlist.txt | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10404.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index c056f67ac49..e9cc3608d42 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -359,7 +359,10 @@ jobs: qemu: s390x musl: "" - os: ubuntu - qemu: ppc64le + qemu: armv7l + musl: musllinux + - os: ubuntu + qemu: s390x musl: musllinux - os: ubuntu qemu: s390x diff --git a/CHANGES/10404.packaging.rst b/CHANGES/10404.packaging.rst new file mode 100644 index 00000000000..e27ca91989f --- /dev/null +++ b/CHANGES/10404.packaging.rst @@ -0,0 +1 @@ +Started building armv7l musllinux wheels -- by :user:`bdraco`. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 3e41af824e4..59ea99c40bb 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -14,6 +14,7 @@ app’s apps arg args +armv Arsenic async asyncio @@ -201,6 +202,7 @@ multidicts Multidicts multipart Multipart +musllinux mypy Nagle Nagle’s From d07f577592238dc5c38558223a0148582b2dfeb1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 15:12:01 -0600 Subject: [PATCH 1191/1511] [3.11] Adjust changelog messages for armv7l wheels (#10409) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit <img width="375" alt="Screenshot 2025-02-05 at 3 11 36 PM" src="https://github.com/user-attachments/assets/953c9e75-0c88-4183-bbb9-433b364b5647" /> --- CHANGES.rst | 8 ++++++++ CHANGES/10404.packaging.rst | 1 - 2 files changed, 8 insertions(+), 1 deletion(-) delete mode 100644 CHANGES/10404.packaging.rst diff --git a/CHANGES.rst b/CHANGES.rst index 5775d1041cc..104dd7a746d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -79,6 +79,14 @@ Packaging updates and notes for downstreams +- Started building armv7l musllinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10404`. + + + Contributor-facing changes -------------------------- diff --git a/CHANGES/10404.packaging.rst b/CHANGES/10404.packaging.rst deleted file mode 100644 index e27ca91989f..00000000000 --- a/CHANGES/10404.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Started building armv7l musllinux wheels -- by :user:`bdraco`. From 3cca9598a2d58831fac577187e9ea30ec2cda582 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 15:40:55 -0600 Subject: [PATCH 1192/1511] [PR #10410/1009c066 backport][3.11] Fix runs-on for wheel builds for native arm (#10411) **This is a backport of PR #10410 as merged into master (1009c066dc88de1360bfb1d011cd0cc9addeb534).** -latest was being appended to everything but this will not work with the native arm runners Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e9cc3608d42..e2129cf4d34 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -343,31 +343,31 @@ jobs: build-wheels: name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} ${{ matrix.musl }} - runs-on: ${{ matrix.os }}-latest + runs-on: ${{ matrix.os }} needs: pre-deploy strategy: matrix: - os: [ubuntu, windows, macos, "ubuntu-24.04-arm"] + os: ["ubuntu-latest", "windows-latest", "macos-latest", "ubuntu-24.04-arm"] qemu: [''] musl: [""] include: # Split ubuntu/musl jobs for the sake of speed-up - - os: ubuntu + - os: ubuntu-latest qemu: ppc64le musl: "" - - os: ubuntu + - os: ubuntu-latest qemu: s390x musl: "" - - os: ubuntu + - os: ubuntu-latest qemu: armv7l musl: musllinux - - os: ubuntu + - os: ubuntu-latest qemu: s390x musl: musllinux - - os: ubuntu + - os: ubuntu-latest qemu: s390x musl: musllinux - - os: ubuntu + - os: ubuntu-latest musl: musllinux - os: ubuntu-24.04-arm musl: musllinux From f4196485dee1d7a19f4087b5bd4dfd065d3f42b5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 15:41:06 -0600 Subject: [PATCH 1193/1511] [PR #10410/1009c066 backport][3.12] Fix runs-on for wheel builds for native arm (#10412) **This is a backport of PR #10410 as merged into master (1009c066dc88de1360bfb1d011cd0cc9addeb534).** -latest was being appended to everything but this will not work with the native arm runners Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e9cc3608d42..e2129cf4d34 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -343,31 +343,31 @@ jobs: build-wheels: name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} ${{ matrix.musl }} - runs-on: ${{ matrix.os }}-latest + runs-on: ${{ matrix.os }} needs: pre-deploy strategy: matrix: - os: [ubuntu, windows, macos, "ubuntu-24.04-arm"] + os: ["ubuntu-latest", "windows-latest", "macos-latest", "ubuntu-24.04-arm"] qemu: [''] musl: [""] include: # Split ubuntu/musl jobs for the sake of speed-up - - os: ubuntu + - os: ubuntu-latest qemu: ppc64le musl: "" - - os: ubuntu + - os: ubuntu-latest qemu: s390x musl: "" - - os: ubuntu + - os: ubuntu-latest qemu: armv7l musl: musllinux - - os: ubuntu + - os: ubuntu-latest qemu: s390x musl: musllinux - - os: ubuntu + - os: ubuntu-latest qemu: s390x musl: musllinux - - os: ubuntu + - os: ubuntu-latest musl: musllinux - os: ubuntu-24.04-arm musl: musllinux From 70da1d45342fe4372a795cacfd394ba367bcff3c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 16:30:50 -0600 Subject: [PATCH 1194/1511] [PR #10413/f6dae31d backport][3.11] Fix missing ppc64le musllinux wheels (#10414) **This is a backport of PR #10413 as merged into master (f6dae31db7538882be97e6d56f79c527636f62f3).** I had s390x twice instead of ppc64le. Seems like I could use a little more sleep or coffee today Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e2129cf4d34..3df6a02c1dd 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -355,17 +355,17 @@ jobs: - os: ubuntu-latest qemu: ppc64le musl: "" + - os: ubuntu-latest + qemu: ppc64le + musl: musllinux - os: ubuntu-latest qemu: s390x musl: "" - - os: ubuntu-latest - qemu: armv7l - musl: musllinux - os: ubuntu-latest qemu: s390x musl: musllinux - os: ubuntu-latest - qemu: s390x + qemu: armv7l musl: musllinux - os: ubuntu-latest musl: musllinux From a1da985fdaa1738b40286e84955c8da3e862a9ba Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 16:31:01 -0600 Subject: [PATCH 1195/1511] [PR #10413/f6dae31d backport][3.12] Fix missing ppc64le musllinux wheels (#10415) **This is a backport of PR #10413 as merged into master (f6dae31db7538882be97e6d56f79c527636f62f3).** I had s390x twice instead of ppc64le. Seems like I could use a little more sleep or coffee today Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e2129cf4d34..3df6a02c1dd 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -355,17 +355,17 @@ jobs: - os: ubuntu-latest qemu: ppc64le musl: "" + - os: ubuntu-latest + qemu: ppc64le + musl: musllinux - os: ubuntu-latest qemu: s390x musl: "" - - os: ubuntu-latest - qemu: armv7l - musl: musllinux - os: ubuntu-latest qemu: s390x musl: musllinux - os: ubuntu-latest - qemu: s390x + qemu: armv7l musl: musllinux - os: ubuntu-latest musl: musllinux From 78ea7251bc952b53be1042c7e97f95e0426e6939 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 17:41:13 -0600 Subject: [PATCH 1196/1511] [PR #10416/e92f1874 backport][3.11] Fix wheel download-artifact by setting merge-multiple (#10417) **This is a backport of PR #10416 as merged into master (e92f18746aead3ff6c5363cf4a3ad735e6b44e1e).** Should fix https://github.com/aio-libs/aiohttp/actions/runs/13167811054/job/36753606142 Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 3df6a02c1dd..a794dc65d77 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -455,6 +455,7 @@ jobs: with: path: dist pattern: dist-* + merge-multiple: true - name: Collected dists run: | tree dist From 30bc60c583304024c3f53f16f208fcbaa6c9c761 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 17:41:31 -0600 Subject: [PATCH 1197/1511] [PR #10416/e92f1874 backport][3.12] Fix wheel download-artifact by setting merge-multiple (#10418) **This is a backport of PR #10416 as merged into master (e92f18746aead3ff6c5363cf4a3ad735e6b44e1e).** Should fix https://github.com/aio-libs/aiohttp/actions/runs/13167811054/job/36753606142 Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 3df6a02c1dd..a794dc65d77 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -455,6 +455,7 @@ jobs: with: path: dist pattern: dist-* + merge-multiple: true - name: Collected dists run: | tree dist From 1bd7faa1008c3b959af14da382dd28d1d00a8695 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 5 Feb 2025 18:59:14 -0600 Subject: [PATCH 1198/1511] [3.11] Increment version to 3.11.13.dev0 (#10420) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4bafa848287..6652a0b979d 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.12" +__version__ = "3.11.13.dev0" from typing import TYPE_CHECKING, Tuple From c4523f3bc1c8972d8436e02e20d749c0b5823b07 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 16:23:31 +0000 Subject: [PATCH 1199/1511] [PR #10426/fae142f5 backport][3.11] Add benchmark for streaming API iter_chunks (#10427) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 61439183334..ac3131e9750 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -124,7 +124,7 @@ def test_one_hundred_get_requests_with_512kib_chunked_payload( aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 512KiB.""" + """Benchmark 100 GET requests with a payload of 512KiB using read.""" message_count = 100 payload = b"a" * (2**19) @@ -148,6 +148,36 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_get_requests_iter_chunks_on_512kib_chunked_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 512KiB using iter_chunks.""" + message_count = 100 + payload = b"a" * (2**19) + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunks(): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_get_request_with_251308_compressed_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From 655c2f30abfe32f3137925e1663fb0d8a76cf297 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 16:29:44 +0000 Subject: [PATCH 1200/1511] [PR #10426/fae142f5 backport][3.12] Add benchmark for streaming API iter_chunks (#10428) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index 61439183334..ac3131e9750 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -124,7 +124,7 @@ def test_one_hundred_get_requests_with_512kib_chunked_payload( aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, ) -> None: - """Benchmark 100 GET requests with a payload of 512KiB.""" + """Benchmark 100 GET requests with a payload of 512KiB using read.""" message_count = 100 payload = b"a" * (2**19) @@ -148,6 +148,36 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_get_requests_iter_chunks_on_512kib_chunked_payload( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 GET requests with a payload of 512KiB using iter_chunks.""" + message_count = 100 + payload = b"a" * (2**19) + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=payload) + resp.enable_chunked_encoding() + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunks(): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_get_request_with_251308_compressed_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From cae5b2ed18cba9d35d9ca3f03346680f256a50a4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 16:59:54 +0000 Subject: [PATCH 1201/1511] [PR #10423/51daf719 backport][3.12] Disable writelines for test_write_large_payload_deflate_compression_data_in_eof (#10430) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #10421 --- CHANGES/10423.packaging.rst | 1 + tests/test_http_writer.py | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 CHANGES/10423.packaging.rst diff --git a/CHANGES/10423.packaging.rst b/CHANGES/10423.packaging.rst new file mode 100644 index 00000000000..6cf58c5a10b --- /dev/null +++ b/CHANGES/10423.packaging.rst @@ -0,0 +1 @@ +Fixed test ``test_write_large_payload_deflate_compression_data_in_eof_writelines`` failing with Python 3.12.9+ or 3.13.2+ -- by :user:`bdraco`. diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 677b5bc9678..c39fe3c7251 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -19,6 +19,12 @@ def enable_writelines() -> Generator[None, None, None]: yield +@pytest.fixture +def disable_writelines() -> Generator[None, None, None]: + with mock.patch("aiohttp.http_writer.SKIP_WRITELINES", True): + yield + + @pytest.fixture def force_writelines_small_payloads() -> Generator[None, None, None]: with mock.patch("aiohttp.http_writer.MIN_PAYLOAD_FOR_WRITELINES", 1): @@ -104,6 +110,7 @@ async def test_write_payload_length(protocol, transport, loop) -> None: assert b"da" == content.split(b"\r\n\r\n", 1)[-1] +@pytest.mark.usefixtures("disable_writelines") async def test_write_large_payload_deflate_compression_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, From a3d8bd346dd562a32f00f386b28c24cf28d40da8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 17:04:23 +0000 Subject: [PATCH 1202/1511] [PR #10422/289259d1 backport][3.12] Add human readable error messages for WebSocket PING/PONG timeouts (#10432) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10422.misc.rst | 3 +++ aiohttp/client_ws.py | 4 +++- aiohttp/web_ws.py | 6 +++++- tests/test_client_ws_functional.py | 1 + tests/test_web_websocket_functional.py | 1 + 5 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10422.misc.rst diff --git a/CHANGES/10422.misc.rst b/CHANGES/10422.misc.rst new file mode 100644 index 00000000000..7ecb1c0e2e2 --- /dev/null +++ b/CHANGES/10422.misc.rst @@ -0,0 +1,3 @@ +Added human-readable error messages to the exceptions for WebSocket disconnects due to PONG not being received -- by :user:`bdraco`. + +Previously, the error messages were empty strings, which made it hard to determine what went wrong. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index f4cfa1bffe8..daa57d1930b 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -163,7 +163,9 @@ def _ping_task_done(self, task: "asyncio.Task[None]") -> None: self._ping_task = None def _pong_not_received(self) -> None: - self._handle_ping_pong_exception(ServerTimeoutError()) + self._handle_ping_pong_exception( + ServerTimeoutError(f"No PONG received after {self._pong_heartbeat} seconds") + ) def _handle_ping_pong_exception(self, exc: BaseException) -> None: """Handle exceptions raised during ping/pong processing.""" diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 0fb1549a3aa..a448bca101e 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -182,7 +182,11 @@ def _ping_task_done(self, task: "asyncio.Task[None]") -> None: def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._handle_ping_pong_exception(asyncio.TimeoutError()) + self._handle_ping_pong_exception( + asyncio.TimeoutError( + f"No PONG received after {self._pong_heartbeat} seconds" + ) + ) def _handle_ping_pong_exception(self, exc: BaseException) -> None: """Handle exceptions raised during ping/pong processing.""" diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 7ede7432adf..54cd5e92f80 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -902,6 +902,7 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE assert msg.type is WSMsgType.ERROR assert isinstance(msg.data, ServerTimeoutError) + assert str(msg.data) == "No PONG received after 0.05 seconds" async def test_close_websocket_while_ping_inflight( diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index b7494d9265f..945096a2af3 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -797,6 +797,7 @@ async def handler(request: web.Request) -> NoReturn: assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE assert ws_server_close_code == WSCloseCode.ABNORMAL_CLOSURE assert isinstance(ws_server_exception, asyncio.TimeoutError) + assert str(ws_server_exception) == "No PONG received after 0.025 seconds" await ws.close() From 5942b0bc37a2175f05d77b0403fc1c098eddf4f6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 17:11:39 +0000 Subject: [PATCH 1203/1511] [PR #10422/289259d1 backport][3.11] Add human readable error messages for WebSocket PING/PONG timeouts (#10431) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10422.misc.rst | 3 +++ aiohttp/client_ws.py | 4 +++- aiohttp/web_ws.py | 6 +++++- tests/test_client_ws_functional.py | 1 + tests/test_web_websocket_functional.py | 1 + 5 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10422.misc.rst diff --git a/CHANGES/10422.misc.rst b/CHANGES/10422.misc.rst new file mode 100644 index 00000000000..7ecb1c0e2e2 --- /dev/null +++ b/CHANGES/10422.misc.rst @@ -0,0 +1,3 @@ +Added human-readable error messages to the exceptions for WebSocket disconnects due to PONG not being received -- by :user:`bdraco`. + +Previously, the error messages were empty strings, which made it hard to determine what went wrong. diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index f4cfa1bffe8..daa57d1930b 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -163,7 +163,9 @@ def _ping_task_done(self, task: "asyncio.Task[None]") -> None: self._ping_task = None def _pong_not_received(self) -> None: - self._handle_ping_pong_exception(ServerTimeoutError()) + self._handle_ping_pong_exception( + ServerTimeoutError(f"No PONG received after {self._pong_heartbeat} seconds") + ) def _handle_ping_pong_exception(self, exc: BaseException) -> None: """Handle exceptions raised during ping/pong processing.""" diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 0fb1549a3aa..a448bca101e 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -182,7 +182,11 @@ def _ping_task_done(self, task: "asyncio.Task[None]") -> None: def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: - self._handle_ping_pong_exception(asyncio.TimeoutError()) + self._handle_ping_pong_exception( + asyncio.TimeoutError( + f"No PONG received after {self._pong_heartbeat} seconds" + ) + ) def _handle_ping_pong_exception(self, exc: BaseException) -> None: """Handle exceptions raised during ping/pong processing.""" diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 7ede7432adf..54cd5e92f80 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -902,6 +902,7 @@ async def handler(request): assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE assert msg.type is WSMsgType.ERROR assert isinstance(msg.data, ServerTimeoutError) + assert str(msg.data) == "No PONG received after 0.05 seconds" async def test_close_websocket_while_ping_inflight( diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index b7494d9265f..945096a2af3 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -797,6 +797,7 @@ async def handler(request: web.Request) -> NoReturn: assert ws.close_code == WSCloseCode.ABNORMAL_CLOSURE assert ws_server_close_code == WSCloseCode.ABNORMAL_CLOSURE assert isinstance(ws_server_exception, asyncio.TimeoutError) + assert str(ws_server_exception) == "No PONG received after 0.025 seconds" await ws.close() From 0ed7f2fb66831c87463ff834da41385b5e20ad7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 20:53:15 +0000 Subject: [PATCH 1204/1511] Bump mypy from 1.11.2 to 1.15.0 (#10385) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.11.2 to 1.15.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/mypy/blob/master/CHANGELOG.md">mypy's changelog</a>.</em></p> <blockquote> <h1>Mypy Release Notes</h1> <h2>Next Release</h2> <p>...</p> <h2>Mypy 1.15</h2> <p>We’ve just uploaded mypy 1.15 to the Python Package Index (<a href="https://pypi.org/project/mypy/">PyPI</a>). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:</p> <pre><code>python3 -m pip install -U mypy </code></pre> <p>You can read the full documentation for this release on <a href="http://mypy.readthedocs.io">Read the Docs</a>.</p> <h3>Performance Improvements</h3> <p>Mypy is up to 40% faster in some use cases. This improvement comes largely from tuning the performance of the garbage collector. Additionally, the release includes several micro-optimizations that may be impactful for large projects.</p> <p>Contributed by Jukka Lehtosalo</p> <ul> <li>PR <a href="https://redirect.github.com/python/mypy/pull/18306">18306</a></li> <li>PR <a href="https://redirect.github.com/python/mypy/pull/18302">18302</a></li> <li>PR <a href="https://redirect.github.com/python/mypy/pull/18298">18298</a></li> <li>PR <a href="https://redirect.github.com/python/mypy/pull/18299">18299</a></li> </ul> <h3>Mypyc Accelerated Mypy Wheels for ARM Linux</h3> <p>For best performance, mypy can be compiled to C extension modules using mypyc. This makes mypy 3-5x faster than when interpreted with pure Python. We now build and upload mypyc accelerated mypy wheels for <code>manylinux_aarch64</code> to PyPI, making it easy for Linux users on ARM platforms to realise this speedup -- just <code>pip install</code> the latest mypy.</p> <p>Contributed by Christian Bundy and Marc Mueller (PR <a href="https://redirect.github.com/mypyc/mypy_mypyc-wheels/pull/76">mypy_mypyc-wheels#76</a>, PR <a href="https://redirect.github.com/mypyc/mypy_mypyc-wheels/pull/89">mypy_mypyc-wheels#89</a>).</p> <h3><code>--strict-bytes</code></h3> <p>By default, mypy treats <code>bytearray</code> and <code>memoryview</code> values as assignable to the <code>bytes</code> type, for historical reasons. Use the <code>--strict-bytes</code> flag to disable this behavior. <a href="https://peps.python.org/pep-0688">PEP 688</a> specified the removal of this special case. The flag will be enabled by default in <strong>mypy 2.0</strong>.</p> <p>Contributed by Ali Hamdan (PR <a href="https://redirect.github.com/python/mypy/pull/18263">18263</a>) and Shantanu Jain (PR <a href="https://redirect.github.com/python/mypy/pull/13952">13952</a>).</p> <h3>Improvements to Reachability Analysis and Partial Type Handling in Loops</h3> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy/commit/9397454fb5aead107461b089e7cf190bf538d20a"><code>9397454</code></a> remove +dev from version ahead of final release</li> <li><a href="https://github.com/python/mypy/commit/686b591a69db216f714ad50698db785f4ac63eb0"><code>686b591</code></a> remove "unreleased" from 1.15 changelog entry</li> <li><a href="https://github.com/python/mypy/commit/cb4b243a5d9e03173e3e7275e5b92b98afaefb60"><code>cb4b243</code></a> Various small updates to 1.15 changelog (<a href="https://redirect.github.com/python/mypy/issues/18599">#18599</a>)</li> <li><a href="https://github.com/python/mypy/commit/1a265024f901399c701a772e8c1f9e6e110f45e6"><code>1a26502</code></a> Prepare changelog for 1.15 release (<a href="https://redirect.github.com/python/mypy/issues/18583">#18583</a>)</li> <li><a href="https://github.com/python/mypy/commit/d4515e4ad3eee6318744c64cf2eab0ea0b5b7562"><code>d4515e4</code></a> Fix a few PR links in the changelog (<a href="https://redirect.github.com/python/mypy/issues/18586">#18586</a>)</li> <li><a href="https://github.com/python/mypy/commit/f83b6435b0c07a327f6b567dfb5e79ffa36708a2"><code>f83b643</code></a> Add object self-type to tuple test fixture (<a href="https://redirect.github.com/python/mypy/issues/18592">#18592</a>)</li> <li><a href="https://github.com/python/mypy/commit/ebc2cb8befbadfc10b962af018b3fa3842d3fd87"><code>ebc2cb8</code></a> Prevent crash on generic NamedTuple with unresolved typevar bound (<a href="https://redirect.github.com/python/mypy/issues/18585">#18585</a>)</li> <li><a href="https://github.com/python/mypy/commit/63c251e249e52256629dbe8b8334937a092f792d"><code>63c251e</code></a> empty commit to trigger wheel rebuild</li> <li><a href="https://github.com/python/mypy/commit/c30573e7b95eef9d057ff42ebfd326438dac3c42"><code>c30573e</code></a> Fix literal context for ternary expressions (for real) (<a href="https://redirect.github.com/python/mypy/issues/18545">#18545</a>)</li> <li><a href="https://github.com/python/mypy/commit/23d862dd6fbb905a69bcb31e88746dc7a1eb4a43"><code>23d862d</code></a> Fix isinstance with explicit (non generic) type alias (<a href="https://redirect.github.com/python/mypy/issues/18512">#18512</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/mypy/compare/v1.11.2...v1.15.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.11.2&new-version=1.15.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- aiohttp/web.py | 2 +- aiohttp/web_app.py | 4 ++-- aiohttp/web_fileresponse.py | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/aiohttp/web.py b/aiohttp/web.py index d6ab6f6fad4..8307ff405ca 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -345,7 +345,7 @@ async def _run_app( try: if host is not None: - if isinstance(host, (str, bytes, bytearray, memoryview)): + if isinstance(host, str): sites.append( TCPSite( runner, diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 4bdc54034de..854f9bce88d 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -86,7 +86,7 @@ def _build_middlewares( """Apply middlewares to handler.""" for app in apps[::-1]: for m, _ in app._middlewares_handlers: # type: ignore[union-attr] - handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc] + handler = update_wrapper(partial(m, handler=handler), handler) return handler @@ -561,7 +561,7 @@ async def _handle(self, request: Request) -> StreamResponse: for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] if new_style: handler = update_wrapper( - partial(m, handler=handler), handler # type: ignore[misc] + partial(m, handler=handler), handler ) else: handler = await m(app, handler) # type: ignore[arg-type,assignment] diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index be9cf87e069..344611cc495 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -83,7 +83,7 @@ class _FileResponseResult(Enum): # Add custom pairs and clear the encodings map so guess_type ignores them. CONTENT_TYPES.encodings_map.clear() for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): - CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] + CONTENT_TYPES.add_type(content_type, extension) _CLOSE_FUTURES: Set[asyncio.Future[None]] = set() diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f6f9adbf39d..630217808ea 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -110,7 +110,7 @@ multidict==6.1.0 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.11.2 ; implementation_name == "cpython" +mypy==1.15.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 5d87c1ebaba..a9526e88f37 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -107,7 +107,7 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.11.2 ; implementation_name == "cpython" +mypy==1.15.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index dd06ec7236f..7fff423e368 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -39,7 +39,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.11.2 ; implementation_name == "cpython" +mypy==1.15.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index efd44e1ea3f..f43cbdf0f40 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.11.2 ; implementation_name == "cpython" +mypy==1.15.0 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From cf19b0434bfd2c62d42478333211432b460b7e3a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 7 Feb 2025 18:27:07 +0000 Subject: [PATCH 1205/1511] [PR #10423/51daf719 backport][3.11] Disable writelines for test_write_large_payload_deflate_compression_data_in_eof (#10429) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #10421 --- CHANGES/10423.packaging.rst | 1 + tests/test_http_writer.py | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 CHANGES/10423.packaging.rst diff --git a/CHANGES/10423.packaging.rst b/CHANGES/10423.packaging.rst new file mode 100644 index 00000000000..6cf58c5a10b --- /dev/null +++ b/CHANGES/10423.packaging.rst @@ -0,0 +1 @@ +Fixed test ``test_write_large_payload_deflate_compression_data_in_eof_writelines`` failing with Python 3.12.9+ or 3.13.2+ -- by :user:`bdraco`. diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 677b5bc9678..c39fe3c7251 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -19,6 +19,12 @@ def enable_writelines() -> Generator[None, None, None]: yield +@pytest.fixture +def disable_writelines() -> Generator[None, None, None]: + with mock.patch("aiohttp.http_writer.SKIP_WRITELINES", True): + yield + + @pytest.fixture def force_writelines_small_payloads() -> Generator[None, None, None]: with mock.patch("aiohttp.http_writer.MIN_PAYLOAD_FOR_WRITELINES", 1): @@ -104,6 +110,7 @@ async def test_write_payload_length(protocol, transport, loop) -> None: assert b"da" == content.split(b"\r\n\r\n", 1)[-1] +@pytest.mark.usefixtures("disable_writelines") async def test_write_large_payload_deflate_compression_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, From b020505891a8ef79f175e676841ed29090886b40 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:50:58 +0000 Subject: [PATCH 1206/1511] Bump aiohappyeyeballs from 2.4.4 to 2.4.6 (#10441) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.4.4 to 2.4.6. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h2>v2.4.6 (2025-02-07)</h2> <h3>Bug Fixes</h3> <ul> <li>Ensure all timers are cancelled when after staggered race finishes (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/136">#136</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f75891d8974693b24af9789a8981ed7f6bc55c5c"><code>f75891d</code></a>)</li> </ul> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.5...v2.4.6">v2.4.5...v2.4.6</a></p> <h2>v2.4.5 (2025-02-07)</h2> <h3>Bug Fixes</h3> <ul> <li>Keep classifiers in project to avoid automatic enrichment (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/134">#134</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/99edb20e9d3e53ead65b55cb3e93c22c03d06599"><code>99edb20</code></a>)</li> </ul> <p>Co-authored-by: J. Nick Koston <a href="mailto:nick@koston.org">nick@koston.org</a></p> <ul> <li>Move classifiers to prevent recalculation by Poetry (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/131">#131</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/66e1c90ae81f71c7039cd62c60417a96202d906c"><code>66e1c90</code></a>)</li> </ul> <p>Co-authored-by: Martin Styk <a href="mailto:martin.styk@oracle.com">martin.styk@oracle.com</a></p> <p>Co-authored-by: J. Nick Koston <a href="mailto:nick@koston.org">nick@koston.org</a></p> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.4...v2.4.5">v2.4.4...v2.4.5</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.4.6 (2025-02-07)</h2> <h3>Bug fixes</h3> <ul> <li>Ensure all timers are cancelled when after staggered race finishes (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/136">#136</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f75891d8974693b24af9789a8981ed7f6bc55c5c"><code>f75891d</code></a>)</li> </ul> <h2>v2.4.5 (2025-02-07)</h2> <h3>Bug fixes</h3> <ul> <li>Keep classifiers in project to avoid automatic enrichment (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/134">#134</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/99edb20e9d3e53ead65b55cb3e93c22c03d06599"><code>99edb20</code></a>)</li> <li>Move classifiers to prevent recalculation by poetry (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/131">#131</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/66e1c90ae81f71c7039cd62c60417a96202d906c"><code>66e1c90</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f18ad492a3e9f04bf4c89aa4e85c82e8cd113b58"><code>f18ad49</code></a> 2.4.6</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/f75891d8974693b24af9789a8981ed7f6bc55c5c"><code>f75891d</code></a> fix: ensure all timers are cancelled when after staggered race finishes (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/136">#136</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/cbc674d4095a49795e077d572fd0fda398432553"><code>cbc674d</code></a> 2.4.5</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/99edb20e9d3e53ead65b55cb3e93c22c03d06599"><code>99edb20</code></a> fix: keep classifiers in project to avoid automatic enrichment (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/134">#134</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/9baf0b340e973541c574f396c055925d947154f7"><code>9baf0b3</code></a> chore(deps-ci): bump the github-actions group with 9 updates (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/135">#135</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/678eab0dd4eabcba0c1ea1f033a41eb76c42bc1b"><code>678eab0</code></a> chore: update dependabot.yml to include GHA (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/133">#133</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/66e1c90ae81f71c7039cd62c60417a96202d906c"><code>66e1c90</code></a> fix: move classifiers to prevent recalculation by Poetry (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/131">#131</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/850640e0f79eb38c4eda33115416d91790fea734"><code>850640e</code></a> chore: migrate to poetry 2.0 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/132">#132</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/75ec0dcabc2e88c2a54cf6105e927e59f0b2864b"><code>75ec0dc</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/129">#129</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/7d7f1180f290b3547aa12b3bc69c6684d6ab0dd2"><code>7d7f118</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/128">#128</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.4...v2.4.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.4.4&new-version=2.4.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index ddd4e082f20..45a1fac6fa4 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 630217808ea..f980be60a22 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index a9526e88f37..59f7ba4db2c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index fcc9a417a1a..118f584b7dd 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index f43cbdf0f40..f0a0b0843e5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.4 +aiohappyeyeballs==2.4.6 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in From ba3228ee56a6d10d65e8d0870275470717998048 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:51:06 +0000 Subject: [PATCH 1207/1511] Bump pip from 25.0 to 25.0.1 (#10442) Bumps [pip](https://github.com/pypa/pip) from 25.0 to 25.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pip/blob/main/NEWS.rst">pip's changelog</a>.</em></p> <blockquote> <h1>25.0.1 (2025-02-09)</h1> <h2>Bug Fixes</h2> <ul> <li>Fix an unsupported type annotation on Python 3.10 and earlier. (<code>[#13181](https://github.com/pypa/pip/issues/13181) <https://github.com/pypa/pip/issues/13181></code>_)</li> <li>Fix a regression where truststore would never be used while installing build dependencies. (<code>[#13186](https://github.com/pypa/pip/issues/13186) <https://github.com/pypa/pip/issues/13186></code>_)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pip/commit/bc7c88cb3de9c9af769c51517833ea48bbe70d9a"><code>bc7c88c</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/ebd0a52e123af8f89b0f3e8e18627653f4c83bfe"><code>ebd0a52</code></a> Don't pass --cert to build subprocesses unless also given on CLI</li> <li><a href="https://github.com/pypa/pip/commit/aea86290d9b12ddbd2cb63f16c35d3e22f822bce"><code>aea8629</code></a> Fix locate_file() type hints for older Pythons</li> <li><a href="https://github.com/pypa/pip/commit/e612988a6155466a8da620b237639bc2682ecb68"><code>e612988</code></a> Add build-project.py compatibility note</li> <li><a href="https://github.com/pypa/pip/commit/202344eed3009a2546052b1885bdbcaee8295620"><code>202344e</code></a> Update the release process docs</li> <li><a href="https://github.com/pypa/pip/commit/dc696c28332ade10cfe7ce95bda7d6c2868f2083"><code>dc696c2</code></a> Patch out EXTERNALLY-MANAGED for self-check tests (<a href="https://redirect.github.com/pypa/pip/issues/13179">#13179</a>)</li> <li>See full diff in <a href="https://github.com/pypa/pip/compare/25.0...25.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=25.0&new-version=25.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f980be60a22..ecbda3707d0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -280,7 +280,7 @@ yarl==1.18.3 # via -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: -pip==25.0 +pip==25.0.1 # via pip-tools setuptools==75.8.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 59f7ba4db2c..cecd1357f0c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -271,7 +271,7 @@ yarl==1.18.3 # via -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: -pip==25.0 +pip==25.0.1 # via pip-tools setuptools==75.8.0 # via From ccbe9ace5f2f6713ff625b7270c01a6bdb4a3811 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:58:16 +0000 Subject: [PATCH 1208/1511] Bump coverage from 7.6.10 to 7.6.11 (#10444) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.10 to 7.6.11. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.6.11 — 2025-02-08</h2> <ul> <li> <p>Fix: a memory leak in CTracer has been fixed. The details are in <code>issue 1924</code>_ and <code>pytest-dev 676</code>_. This should reduce the memory footprint for everyone even if it hadn't caused a problem before.</p> </li> <li> <p>We now ship a py3-none-any.whl wheel file. Thanks, <code>Russell Keith-Magee <pull 1914_></code>_.</p> </li> </ul> <p>.. _pull 1914: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1914">nedbat/coveragepy#1914</a> .. _issue 1924: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1924">nedbat/coveragepy#1924</a> .. _pytest-dev 676: <a href="https://redirect.github.com/pytest-dev/pytest-cov/issues/676">pytest-dev/pytest-cov#676</a></p> <p>.. _changes_7-6-10:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/a20898d2d48b2fb7ff30d3e63eb33de8e8948fe1"><code>a20898d</code></a> docs: sample HTML for 7.6.11</li> <li><a href="https://github.com/nedbat/coveragepy/commit/938d519ba264ac67cdfcca8a436fe15a68318dc6"><code>938d519</code></a> docs: prep for 7.6.11</li> <li><a href="https://github.com/nedbat/coveragepy/commit/27ee4ffc3b6486bc7b52dc5b18466e05fca8616e"><code>27ee4ff</code></a> test: free-threading builds were failing the old leak test <a href="https://redirect.github.com/nedbat/coveragepy/issues/1924">#1924</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/f473b87e4116cceacdef1666a75feb4ba3cce0f3"><code>f473b87</code></a> test: it could be useful to disable branch coverage in this helper</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f85d9b7676a480c88d938c1e63a1dbebea45e21d"><code>f85d9b7</code></a> fix: prevent code objects from leaking <a href="https://redirect.github.com/nedbat/coveragepy/issues/1924">#1924</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/ae8d3b9981bac68f903c63c9e432ef5e30b4865b"><code>ae8d3b9</code></a> chore: make upgrade</li> <li><a href="https://github.com/nedbat/coveragepy/commit/156981f20730a2f4abd5a4efcc8cf1cad1d6a811"><code>156981f</code></a> build: zizmor can't tell this is safe</li> <li><a href="https://github.com/nedbat/coveragepy/commit/66030210bf7438d17830687aafa4504eb6e845db"><code>6603021</code></a> chore: bump the action-dependencies group with 6 updates (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1922">#1922</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/d6a1e5b6ea5bc03d0bf085fbac285b7e4a56d8a6"><code>d6a1e5b</code></a> test: run the pytracer first so .tox is left with a c extension for ad-hoc use</li> <li><a href="https://github.com/nedbat/coveragepy/commit/cd2db93276e680aca70bd36806847e82f76e2fa3"><code>cd2db93</code></a> docs: a reminder about when RESUME applies</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.6.10...7.6.11">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.6.10&new-version=7.6.11)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ecbda3707d0..5d6b50a66aa 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.10 +coverage==7.6.11 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index cecd1357f0c..1b89ccc6356 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.10 +coverage==7.6.11 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index f0a0b0843e5..8212c20a0f4 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -25,7 +25,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.6.10 +coverage==7.6.11 # via # -r requirements/test.in # pytest-cov From 1ea755908423b131636739c8dae95a1e5935048b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 11 Feb 2025 00:31:49 +0000 Subject: [PATCH 1209/1511] [PR #10447/343c632d backport][3.12] Fix test when symlink encountered (#10449) **This is a backport of PR #10447 as merged into master (343c632d2a6626c0c46aec588ab50c373550c5c4).** Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_urldispatch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 8ee3df33202..ba6bdff23a0 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -358,7 +358,7 @@ def test_add_static_path_resolution(router: any) -> None: """Test that static paths are expanded and absolute.""" res = router.add_static("/", "~/..") directory = str(res.get_info()["directory"]) - assert directory == str(pathlib.Path.home().parent) + assert directory == str(pathlib.Path.home().resolve(strict=True).parent) def test_add_static(router) -> None: From 094bf4eea616b169e585a20c4d8962870fc74a02 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 11 Feb 2025 01:20:46 +0000 Subject: [PATCH 1210/1511] [PR #10447/343c632d backport][3.11] Fix test when symlink encountered (#10448) **This is a backport of PR #10447 as merged into master (343c632d2a6626c0c46aec588ab50c373550c5c4).** Co-authored-by: Sam Bull <git@sambull.org> --- tests/test_urldispatch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 8ee3df33202..ba6bdff23a0 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -358,7 +358,7 @@ def test_add_static_path_resolution(router: any) -> None: """Test that static paths are expanded and absolute.""" res = router.add_static("/", "~/..") directory = str(res.get_info()["directory"]) - assert directory == str(pathlib.Path.home().parent) + assert directory == str(pathlib.Path.home().resolve(strict=True).parent) def test_add_static(router) -> None: From 75f7b1869d6e3495f9f9cb44c31dcfd274b6a438 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Feb 2025 11:15:48 +0000 Subject: [PATCH 1211/1511] Bump cython from 3.0.11 to 3.0.12 (#10452) Bumps [cython](https://github.com/cython/cython) from 3.0.11 to 3.0.12. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/cython/cython/blob/master/CHANGES.rst">cython's changelog</a>.</em></p> <blockquote> <h1>3.0.12 (2025-02-11)</h1> <h2>Bugs fixed</h2> <ul> <li> <p>Release 3.0.11 introduced some incorrect <code>noexcept</code> warnings. (Github issue :issue:<code>6335</code>)</p> </li> <li> <p>Conditional assignments to variables using the walrus operator could crash. (Github issue :issue:<code>6094</code>)</p> </li> <li> <p>Dict assignments to struct members with reserved C names could generate invalid C code.</p> </li> <li> <p>Fused ctuples with the same entry types but different sizes could fail to compile. (Github issue :issue:<code>6328</code>)</p> </li> <li> <p>In Py3, <code>pyximport</code> was not searching <code>sys.path</code> when looking for importable source files. (Github issue :issue:<code>5615</code>)</p> </li> <li> <p>Using <code>& 0</code> on integers produced with <code>int.from_bytes()</code> could read invalid memory on Python 3.10. (Github issue :issue:<code>6480</code>)</p> </li> <li> <p>Modules could fail to compile in PyPy 3.11 due to missing CPython specific header files. Patch by Matti Picus. (Github issue :issue:<code>6482</code>)</p> </li> <li> <p>Minor fix in C++ <code>partial_sum()</code> declaration.</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/cython/cython/commit/c7dae70222b36f52945e9723acf72d2bd698b08d"><code>c7dae70</code></a> Fix release date.</li> <li><a href="https://github.com/cython/cython/commit/b161c725492a91968a84f3bb7a43d8783706617f"><code>b161c72</code></a> Prepare release of 3.0.12.</li> <li><a href="https://github.com/cython/cython/commit/45c98258971884c6f82aa9944076cc52d11f6a5b"><code>45c9825</code></a> Update changelog.</li> <li><a href="https://github.com/cython/cython/commit/b9a5b7002b78d5be327ea62e1e128e8e393841c4"><code>b9a5b70</code></a> Exclude failing CI target macos-13/Py2.7.</li> <li><a href="https://github.com/cython/cython/commit/07d7cc1ff148ad1ab552a1a2cbcf9ffa6011c5b8"><code>07d7cc1</code></a> Fix spelling errors</li> <li><a href="https://github.com/cython/cython/commit/0824349cf41759d6c837855ecac4d23dfbc495c6"><code>0824349</code></a> Fix test in Py2.7.</li> <li><a href="https://github.com/cython/cython/commit/20ebc99beeac5d8b00cc1c8ad77f69b9c617cdc4"><code>20ebc99</code></a> Fix test in Py2.7.</li> <li><a href="https://github.com/cython/cython/commit/20f6e4f3408f6d190f180eb0a82e1d36265d9f6c"><code>20f6e4f</code></a> Do not include CPython 'internal/*.h' files in PyPy (<a href="https://redirect.github.com/cython/cython/issues/6482">GH-6482</a>)</li> <li><a href="https://github.com/cython/cython/commit/023738288cd210b64b52250e80078eaef028a035"><code>0237382</code></a> Fix libcpp partial_sum definition</li> <li><a href="https://github.com/cython/cython/commit/852286242ee46cc16f0a6efef7eb40a5ab3c57d5"><code>8522862</code></a> Backport ufunc Numpy2 fix</li> <li>Additional commits viewable in <a href="https://github.com/cython/cython/compare/3.0.11...3.0.12">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.11&new-version=3.0.12)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5d6b50a66aa..d9012a53c36 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -58,7 +58,7 @@ cryptography==44.0.0 # via # pyjwt # trustme -cython==3.0.11 +cython==3.0.12 # via -r requirements/cython.in distlib==0.3.9 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 7e392bddf91..b34cde941f8 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.11 +cython==3.0.12 # via -r requirements/cython.in multidict==6.1.0 # via -r requirements/multidict.in From 328275b3121490a202e1c5261298e6c32aef0d23 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Feb 2025 11:31:08 +0000 Subject: [PATCH 1212/1511] Bump virtualenv from 20.29.1 to 20.29.2 (#10453) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.29.1 to 20.29.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.29.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.29.1 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2828">pypa/virtualenv#2828</a></li> <li>Remove old virtualenv wheel by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2842">pypa/virtualenv#2842</a></li> <li>Bump pip to 25.0.1 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2843">pypa/virtualenv#2843</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.29.1...20.29.2">https://github.com/pypa/virtualenv/compare/20.29.1...20.29.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.29.2 (2025-02-10)</h2> <p>Bugfixes - 20.29.2</p> <pre><code>- Remove old virtualenv wheel from the source distribution - by :user:`gaborbernat`. (:issue:`2841`) - Upgrade embedded wheel pip to ``25.0.1`` from ``24.3.1`` - by :user:`gaborbernat`. (:issue:`2843`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/ca670cda6430254ff474100e770dabe5b4f439de"><code>ca670cd</code></a> release 20.29.2</li> <li><a href="https://github.com/pypa/virtualenv/commit/a2d2c3e48af745764f5194d024480d7128533617"><code>a2d2c3e</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2843">#2843</a> from gaborbernat/bump-2-10</li> <li><a href="https://github.com/pypa/virtualenv/commit/6203457b6d704ac88b0814b58465c53d5a6f141b"><code>6203457</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2842">#2842</a> from gaborbernat/remove-whl</li> <li><a href="https://github.com/pypa/virtualenv/commit/d8a68e6632646598bcec188a1e8c6f6f0d342786"><code>d8a68e6</code></a> Bump pip to 25.0.1</li> <li><a href="https://github.com/pypa/virtualenv/commit/7b8fde41f54b217284ce599afa3e0b1a1e6353a7"><code>7b8fde4</code></a> Remove old virtualenv wheel</li> <li><a href="https://github.com/pypa/virtualenv/commit/cb02b4f6ad3fa6e934c4162f343f073d8cbb36a1"><code>cb02b4f</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2837">#2837</a> from pypa/pre-commit-ci-update-config</li> <li><a href="https://github.com/pypa/virtualenv/commit/8332db87ecfd4fb97caa75f0c771356011dfacaa"><code>8332db8</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pypa/virtualenv/commit/192cef7a84f2e1d167ebede7083bb819123d41ff"><code>192cef7</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2833">#2833</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/a6a5c45b0c9da5a8823c03fd57e47e5a3773a081"><code>a6a5c45</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2830">#2830</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/d409f6a030c54783482aea12b45e609d1a0e1880"><code>d409f6a</code></a> release 20.29.1 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2828">#2828</a>)</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.29.1...20.29.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.29.1&new-version=20.29.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d9012a53c36..7adbab8939a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -270,7 +270,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 1b89ccc6356..1b1fd3555e0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -261,7 +261,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 7fff423e368..40713fc30b1 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -102,5 +102,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.0.2 # via -r requirements/lint.in -virtualenv==20.29.1 +virtualenv==20.29.2 # via pre-commit From c0871b018f7d9beec5bb4d49e88ea83f58098641 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Feb 2025 11:14:39 +0000 Subject: [PATCH 1213/1511] Bump valkey from 6.0.2 to 6.1.0 (#10460) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [valkey](https://github.com/valkey-io/valkey-py) from 6.0.2 to 6.1.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/valkey-io/valkey-py/releases">valkey's releases</a>.</em></p> <blockquote> <h2>6.1.0</h2> <h1>Changes</h1> <ul> <li>v6.1.0 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/173">#173</a>)</li> <li>Fix updated linters errors (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/172">#172</a>)</li> <li>Removing my work account from CODEOWNERS (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/168">#168</a>)</li> <li>connection: fix getpid() call on disconnect (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/166">#166</a>)</li> <li>Revert some typing commits (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/163">#163</a>)</li> <li>fixed type hints of hash methods (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/154">#154</a>)</li> <li>fixed type hint of hrandfield method (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/155">#155</a>)</li> <li>Add new CODEOWNERS (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/152">#152</a>)</li> <li>Add more test cases (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/147">#147</a>)</li> <li>refactor: updating typing for xreadgroup / xread / xadd (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/130">#130</a>)</li> <li>update typing for copy (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/139">#139</a>)</li> <li>update the return type of scan family (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/135">#135</a>)</li> <li>Issue 131: Fixes Flaky test by increasing timeout to 1s (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/136">#136</a>)</li> <li>Fixes for valkey 8.0 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/124">#124</a>)</li> <li>Set socket_timeout default value to 5 seconds (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/120">#120</a>)</li> <li>sort methods <code>ACL DELUSER</code> and <code>ACL DRYRUN</code> by alphabetically, checked doc (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/103">#103</a>)</li> <li>drop compose format and commands v1, use supported v2+ (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/105">#105</a>)</li> <li>fix redis-cluster-py link (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/99">#99</a>)</li> <li>make documentation link more obvious (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/98">#98</a>)</li> <li>Added geosearch tests (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/97">#97</a>)</li> </ul> <h2>🔥 Breaking Changes</h2> <ul> <li>Remove expiration/TTL commands that are not supported by Valkey (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/125">#125</a>)</li> </ul> <h2>🚀 New Features</h2> <ul> <li>Add dynamic_startup_nodes parameter to async ValkeyCluster (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/167">#167</a>)</li> <li>fixed problems in <a href="https://redirect.github.com/valkey-io/valkey-py/issues/143">#143</a> (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/144">#144</a>)</li> <li>Add async class aliases for redis-py compatibility (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/148">#148</a>)</li> </ul> <h2>🐛 Bug Fixes</h2> <ul> <li>Allow relative path in unix socket URLs (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/153">#153</a>)</li> </ul> <h2>🧰 Maintenance</h2> <ul> <li>connection: add a pointer to os.getpid to ConnectionPool (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/159">#159</a>)</li> <li>fix verify params in getex() (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/145">#145</a>)</li> <li>build(deps): bump rojopolis/spellcheck-github-actions from 0.44.0 to 0.45.0 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/128">#128</a>)</li> <li>build(deps): bump codecov/codecov-action from 4 to 5 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/127">#127</a>)</li> <li>Add support for Python 3.13 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/116">#116</a>)</li> <li>build(deps): bump rojopolis/spellcheck-github-actions from 0.42.0 to 0.44.0 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/118">#118</a>)</li> <li>Revert "Temporarily fix <a href="https://github.com/actions/runner-images/issu%E2%80%A6">https://github.com/actions/runner-images/issu…</a>" (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/117">#117</a>)</li> <li>parsers: resp3: be less verbose (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/112">#112</a>)</li> <li>Temporarily fix <a href="https://redirect.github.com/actions/runner-images/issues/10781">actions/runner-images#10781</a> (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/113">#113</a>)</li> <li>build(deps): bump rojopolis/spellcheck-github-actions from 0.41.0 to 0.42.0 (<a href="https://redirect.github.com/valkey-io/valkey-py/issues/108">#108</a>)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/valkey-io/valkey-py/commit/95c4b68c93190bdb4c4e64fd38c9980d9dd35e8a"><code>95c4b68</code></a> Merge pull request <a href="https://redirect.github.com/valkey-io/valkey-py/issues/173">#173</a> from valkey-io/mkmkme/6.1.0</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/a589997b030ce0041526d99ba3c3389f6a83dc9d"><code>a589997</code></a> v6.1.0</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/2d648a43d570b3f8ecc3209061e0782433d9da12"><code>2d648a4</code></a> Merge pull request <a href="https://redirect.github.com/valkey-io/valkey-py/issues/172">#172</a> from valkey-io/mkmkme/fix-isort</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/05e1f314ba0ff4ed0cc1683d8503e152a6ff137e"><code>05e1f31</code></a> tests: fix floating-point error for georadius tests</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/81f63b343f5461116d76ad47ef80b75dab0145b3"><code>81f63b3</code></a> fix black formatting</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/0b825f1c8273e5dbe027b78b246e29bbb254b108"><code>0b825f1</code></a> tests: fix flynt error</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/d0e5bdf21aa36907ea4024890f030dd50c09e04f"><code>d0e5bdf</code></a> url_parser: fix isort error</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/457be2d773688d311ed501e7d59428720d825317"><code>457be2d</code></a> Merge pull request <a href="https://redirect.github.com/valkey-io/valkey-py/issues/168">#168</a> from valkey-io/aiven-sal/removeme</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/e4025bdaf35b05916683a1966aac76a0a8485142"><code>e4025bd</code></a> Removing my work account from CODEOWNERS</li> <li><a href="https://github.com/valkey-io/valkey-py/commit/bcd5e2cb9e5bd21558feb2fd4a658d9bb905c950"><code>bcd5e2c</code></a> Merge pull request <a href="https://redirect.github.com/valkey-io/valkey-py/issues/167">#167</a> from Kakadus/2472redis-add-dynamic-startup-nodes-flag...</li> <li>Additional commits viewable in <a href="https://github.com/valkey-io/valkey-py/compare/v6.0.2...v6.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=valkey&package-manager=pip&previous-version=6.0.2&new-version=6.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7adbab8939a..0938b5496a6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -268,7 +268,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in -valkey==6.0.2 +valkey==6.1.0 # via -r requirements/lint.in virtualenv==20.29.2 # via pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 1b1fd3555e0..905505a08fa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -259,7 +259,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via # -r requirements/base.in # -r requirements/lint.in -valkey==6.0.2 +valkey==6.1.0 # via -r requirements/lint.in virtualenv==20.29.2 # via pre-commit diff --git a/requirements/lint.txt b/requirements/lint.txt index 40713fc30b1..81ab07708bf 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -100,7 +100,7 @@ typing-extensions==4.12.2 # rich uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in -valkey==6.0.2 +valkey==6.1.0 # via -r requirements/lint.in virtualenv==20.29.2 # via pre-commit From b8b7159e0ff6052528fe5a1cec05c2f3c5047798 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Feb 2025 11:16:09 +0000 Subject: [PATCH 1214/1511] Bump cryptography from 44.0.0 to 44.0.1 (#10461) Bumps [cryptography](https://github.com/pyca/cryptography) from 44.0.0 to 44.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>44.0.1 - 2025-02-11</p> <pre><code> * Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.4.1. * We now build ``armv7l`` ``manylinux`` wheels and publish them to PyPI. * We now build ``manylinux_2_34`` wheels and publish them to PyPI. <p>.. _v44-0-0: </code></pre></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/adaaaed77db676bbaa9d171175db81dce056e2a7"><code>adaaaed</code></a> Bump for 44.0.1 release (<a href="https://redirect.github.com/pyca/cryptography/issues/12441">#12441</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/ccc61dabe38b86956bf218565cd4e82b918345a1"><code>ccc61da</code></a> [backport] test and build on armv7l (<a href="https://redirect.github.com/pyca/cryptography/issues/12420">#12420</a>) (<a href="https://redirect.github.com/pyca/cryptography/issues/12431">#12431</a>)</li> <li>See full diff in <a href="https://github.com/pyca/cryptography/compare/44.0.0...44.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=44.0.0&new-version=44.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0938b5496a6..9cd66c31096 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ coverage==7.6.11 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.1 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 905505a08fa..e97b52d4c97 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ coverage==7.6.11 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.1 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index 81ab07708bf..57fdc34c119 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -19,7 +19,7 @@ cfgv==3.4.0 # via pre-commit click==8.1.8 # via slotscheck -cryptography==44.0.0 +cryptography==44.0.1 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 8212c20a0f4..791bc28e9b4 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -29,7 +29,7 @@ coverage==7.6.11 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.0 +cryptography==44.0.1 # via trustme exceptiongroup==1.2.2 # via pytest From 8b390027b99204c258a334bbce0f046b03aabeac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 11:08:06 +0000 Subject: [PATCH 1215/1511] Bump actions/cache from 4.2.0 to 4.2.1 (#10472) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.2.0 to 4.2.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.2.1</h2> <h2>What's Changed</h2> <blockquote> <p>[!IMPORTANT] As a reminder, there were important backend changes to release v4.2.0, see <a href="https://github.com/actions/cache/releases/tag/v4.2.0">those release notes</a> and <a href="https://github.com/actions/cache/discussions/1510">the announcement</a> for more details.</p> </blockquote> <ul> <li>docs: GitHub is spelled incorrectly in caching-strategies.md by <a href="https://github.com/janco-absa"><code>@​janco-absa</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1526">actions/cache#1526</a></li> <li>docs: Make the "always save prime numbers" example more clear by <a href="https://github.com/Tobbe"><code>@​Tobbe</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1525">actions/cache#1525</a></li> <li>Update force deletion docs due a recent deprecation by <a href="https://github.com/sebbalex"><code>@​sebbalex</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1500">actions/cache#1500</a></li> <li>Bump <code>@​actions/cache</code> to v4.0.1 by <a href="https://github.com/robherley"><code>@​robherley</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1554">actions/cache#1554</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/janco-absa"><code>@​janco-absa</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1526">actions/cache#1526</a></li> <li><a href="https://github.com/Tobbe"><code>@​Tobbe</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1525">actions/cache#1525</a></li> <li><a href="https://github.com/sebbalex"><code>@​sebbalex</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1500">actions/cache#1500</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4.2.0...v4.2.1">https://github.com/actions/cache/compare/v4.2.0...v4.2.1</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.2.1</h3> <ul> <li>Bump <code>@actions/cache</code> to v4.0.1</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/0c907a75c2c80ebcb7f088228285e798b750cf8f"><code>0c907a7</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1554">#1554</a> from actions/robherley/v4.2.1</li> <li><a href="https://github.com/actions/cache/commit/710893c2369beb60748049b671f18c43a3656fce"><code>710893c</code></a> bump <code>@​actions/cache</code> to v4.0.1</li> <li><a href="https://github.com/actions/cache/commit/9fa7e61ec7e1f44ac75218e7aaea81da8856fd11"><code>9fa7e61</code></a> Update force deletion docs due a recent deprecation (<a href="https://redirect.github.com/actions/cache/issues/1500">#1500</a>)</li> <li><a href="https://github.com/actions/cache/commit/36f1e144e1c8edb0a652766b484448563d8baf46"><code>36f1e14</code></a> docs: Make the "always save prime numbers" example more clear (<a href="https://redirect.github.com/actions/cache/issues/1525">#1525</a>)</li> <li><a href="https://github.com/actions/cache/commit/53aa38c736a561b9c17b62df3fe885a17b78ee6d"><code>53aa38c</code></a> Correct GitHub Spelling in caching-strategies.md (<a href="https://redirect.github.com/actions/cache/issues/1526">#1526</a>)</li> <li>See full diff in <a href="https://github.com/actions/cache/compare/v4.2.0...v4.2.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.2.0&new-version=4.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a794dc65d77..95f9fc7c631 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.2.0 + uses: actions/cache@v4.2.1 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.2.0 + uses: actions/cache@v4.2.1 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.2.0 + uses: actions/cache@v4.2.1 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 79c5093dc119ca29070200db9db69e324d5c0e65 Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Fri, 21 Feb 2025 02:52:59 +0800 Subject: [PATCH 1216/1511] [PR #10434/ed84464 backport][3.11] Fix inappropriate "break in finally" (#10476) --- CHANGES/10434.bugfix.rst | 2 ++ aiohttp/web_protocol.py | 28 +++++++++++++++------------- tests/test_web_server.py | 18 ++++++++++++++++++ 3 files changed, 35 insertions(+), 13 deletions(-) create mode 100644 CHANGES/10434.bugfix.rst diff --git a/CHANGES/10434.bugfix.rst b/CHANGES/10434.bugfix.rst new file mode 100644 index 00000000000..c4bc50dc6aa --- /dev/null +++ b/CHANGES/10434.bugfix.rst @@ -0,0 +1,2 @@ +Avoid break statement inside the finally block in :py:class:`~aiohttp.web.RequestHandler` +-- by :user:`Cycloctane`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 32f503474a9..e4c347e5a9e 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -608,26 +608,28 @@ async def start(self) -> None: except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection") + self.force_close() raise except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() + except BaseException: + self.force_close() + raise finally: if self.transport is None and resp is not None: self.log_debug("Ignored premature client disconnection.") - elif not self._force_close: - if self._keepalive and not self._close: - # start keep-alive timer - if keepalive_timeout is not None: - now = loop.time() - close_time = now + keepalive_timeout - self._next_keepalive_close_time = close_time - if self._keepalive_handle is None: - self._keepalive_handle = loop.call_at( - close_time, self._process_keepalive - ) - else: - break + + if self._keepalive and not self._close and not self._force_close: + # start keep-alive timer + close_time = loop.time() + keepalive_timeout + self._next_keepalive_close_time = close_time + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + close_time, self._process_keepalive + ) + else: + break # remove handler, close transport if no handlers left if not self._force_close: diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 910f074e90f..9098ef9e7bf 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -233,6 +233,24 @@ async def handler(request): logger.debug.assert_called_with("Ignored premature client disconnection") +async def test_raw_server_does_not_swallow_base_exceptions( + aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient +) -> None: + class UnexpectedException(BaseException): + """Dummy base exception.""" + + async def handler(request: web.BaseRequest) -> NoReturn: + raise UnexpectedException() + + loop = asyncio.get_event_loop() + loop.set_debug(True) + server = await aiohttp_raw_server(handler) + cli = await aiohttp_client(server) + + with pytest.raises(client.ServerDisconnectedError): + await cli.get("/path/to", timeout=client.ClientTimeout(10)) + + async def test_raw_server_cancelled_in_write_eof(aiohttp_raw_server, aiohttp_client): async def handler(request): resp = web.Response(text=str(request.rel_url)) From 9f933bffe6e1ed3a09afe360bf1fc0ca0a6e0107 Mon Sep 17 00:00:00 2001 From: Cycloctane <Cycloctane@outlook.com> Date: Fri, 21 Feb 2025 03:03:24 +0800 Subject: [PATCH 1217/1511] [PR #10434/ed84464 backport][3.12] Fix inappropriate "break in finally" (#10475) --- CHANGES/10434.bugfix.rst | 2 ++ aiohttp/web_protocol.py | 28 +++++++++++++++------------- tests/test_web_server.py | 18 ++++++++++++++++++ 3 files changed, 35 insertions(+), 13 deletions(-) create mode 100644 CHANGES/10434.bugfix.rst diff --git a/CHANGES/10434.bugfix.rst b/CHANGES/10434.bugfix.rst new file mode 100644 index 00000000000..c4bc50dc6aa --- /dev/null +++ b/CHANGES/10434.bugfix.rst @@ -0,0 +1,2 @@ +Avoid break statement inside the finally block in :py:class:`~aiohttp.web.RequestHandler` +-- by :user:`Cycloctane`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 32f503474a9..e4c347e5a9e 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -608,26 +608,28 @@ async def start(self) -> None: except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection") + self.force_close() raise except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() + except BaseException: + self.force_close() + raise finally: if self.transport is None and resp is not None: self.log_debug("Ignored premature client disconnection.") - elif not self._force_close: - if self._keepalive and not self._close: - # start keep-alive timer - if keepalive_timeout is not None: - now = loop.time() - close_time = now + keepalive_timeout - self._next_keepalive_close_time = close_time - if self._keepalive_handle is None: - self._keepalive_handle = loop.call_at( - close_time, self._process_keepalive - ) - else: - break + + if self._keepalive and not self._close and not self._force_close: + # start keep-alive timer + close_time = loop.time() + keepalive_timeout + self._next_keepalive_close_time = close_time + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + close_time, self._process_keepalive + ) + else: + break # remove handler, close transport if no handlers left if not self._force_close: diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 910f074e90f..9098ef9e7bf 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -233,6 +233,24 @@ async def handler(request): logger.debug.assert_called_with("Ignored premature client disconnection") +async def test_raw_server_does_not_swallow_base_exceptions( + aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient +) -> None: + class UnexpectedException(BaseException): + """Dummy base exception.""" + + async def handler(request: web.BaseRequest) -> NoReturn: + raise UnexpectedException() + + loop = asyncio.get_event_loop() + loop.set_debug(True) + server = await aiohttp_raw_server(handler) + cli = await aiohttp_client(server) + + with pytest.raises(client.ServerDisconnectedError): + await cli.get("/path/to", timeout=client.ClientTimeout(10)) + + async def test_raw_server_cancelled_in_write_eof(aiohttp_raw_server, aiohttp_client): async def handler(request): resp = web.Response(text=str(request.rel_url)) From b7187a8f83c2ef8daaee23363bd2d29b851378e1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 19:55:14 +0000 Subject: [PATCH 1218/1511] [PR #10464/182198fc backport][3.11] Close the socket if there's a failure in start_connection() (#10477) Co-authored-by: Andrew Top <142360808+top-oai@users.noreply.github.com> --- CHANGES/10464.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 13 ++++++++++++- tests/conftest.py | 1 + tests/test_connector.py | 23 +++++++++++++++++++++++ tests/test_proxy.py | 1 + 6 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10464.bugfix.rst diff --git a/CHANGES/10464.bugfix.rst b/CHANGES/10464.bugfix.rst new file mode 100644 index 00000000000..4e21000a317 --- /dev/null +++ b/CHANGES/10464.bugfix.rst @@ -0,0 +1 @@ +Changed connection creation to explicitly close sockets if an exception is raised in the event loop's ``create_connection`` method -- by :user:`top-oai`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index fb1b87ccc9d..1f0d1e7d2d7 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -42,6 +42,7 @@ Andrej Antonov Andrew Leech Andrew Lytvyn Andrew Svetlov +Andrew Top Andrew Zhou Andrii Soldatenko Anes Abismail diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 7e0986df657..14433ba37e1 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1108,6 +1108,7 @@ async def _wrap_create_connection( client_error: Type[Exception] = ClientConnectorError, **kwargs: Any, ) -> Tuple[asyncio.Transport, ResponseHandler]: + sock: Union[socket.socket, None] = None try: async with ceil_timeout( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold @@ -1119,7 +1120,11 @@ async def _wrap_create_connection( interleave=self._interleave, loop=self._loop, ) - return await self._loop.create_connection(*args, **kwargs, sock=sock) + connection = await self._loop.create_connection( + *args, **kwargs, sock=sock + ) + sock = None + return connection except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc except ssl_errors as exc: @@ -1128,6 +1133,12 @@ async def _wrap_create_connection( if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise raise client_error(req.connection_key, exc) from exc + finally: + if sock is not None: + # Will be hit if an exception is thrown before the event loop takes the socket. + # In that case, proactively close the socket to guard against event loop leaks. + # For example, see https://github.com/MagicStack/uvloop/issues/653. + sock.close() async def _wrap_existing_connection( self, diff --git a/tests/conftest.py b/tests/conftest.py index 44ae384b633..95a98cd4fc0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -221,6 +221,7 @@ def start_connection(): "aiohttp.connector.aiohappyeyeballs.start_connection", autospec=True, spec_set=True, + return_value=mock.create_autospec(socket.socket, spec_set=True, instance=True), ) as start_connection_mock: yield start_connection_mock diff --git a/tests/test_connector.py b/tests/test_connector.py index 483759a4180..e79b36a673d 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -617,6 +617,29 @@ async def certificate_error(*args, **kwargs): await conn.close() +async def test_tcp_connector_closes_socket_on_error( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) + + conn = aiohttp.TCPConnector() + with ( + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ValueError, + ), + pytest.raises(ValueError), + ): + await conn.connect(req, [], ClientTimeout()) + + assert start_connection.return_value.close.called + + await conn.close() + + async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock ) -> None: diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 1679b68909f..83457de891f 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -207,6 +207,7 @@ async def make_conn(): "aiohttp.connector.aiohappyeyeballs.start_connection", autospec=True, spec_set=True, + return_value=mock.create_autospec(socket.socket, spec_set=True, instance=True), ) def test_proxy_connection_error(self, start_connection: Any) -> None: async def make_conn(): From a9c8841d62926debef7a876bb8e5a1ee6a180cf8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 20:38:57 +0000 Subject: [PATCH 1219/1511] [PR #10464/182198fc backport][3.12] Close the socket if there's a failure in start_connection() (#10478) Co-authored-by: Andrew Top <142360808+top-oai@users.noreply.github.com> --- CHANGES/10464.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 13 ++++++++++++- tests/conftest.py | 1 + tests/test_connector.py | 23 +++++++++++++++++++++++ tests/test_proxy.py | 1 + 6 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10464.bugfix.rst diff --git a/CHANGES/10464.bugfix.rst b/CHANGES/10464.bugfix.rst new file mode 100644 index 00000000000..4e21000a317 --- /dev/null +++ b/CHANGES/10464.bugfix.rst @@ -0,0 +1 @@ +Changed connection creation to explicitly close sockets if an exception is raised in the event loop's ``create_connection`` method -- by :user:`top-oai`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 7eb48579097..a2511e95511 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -42,6 +42,7 @@ Andrej Antonov Andrew Leech Andrew Lytvyn Andrew Svetlov +Andrew Top Andrew Zhou Andrii Soldatenko Anes Abismail diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 7e0986df657..14433ba37e1 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1108,6 +1108,7 @@ async def _wrap_create_connection( client_error: Type[Exception] = ClientConnectorError, **kwargs: Any, ) -> Tuple[asyncio.Transport, ResponseHandler]: + sock: Union[socket.socket, None] = None try: async with ceil_timeout( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold @@ -1119,7 +1120,11 @@ async def _wrap_create_connection( interleave=self._interleave, loop=self._loop, ) - return await self._loop.create_connection(*args, **kwargs, sock=sock) + connection = await self._loop.create_connection( + *args, **kwargs, sock=sock + ) + sock = None + return connection except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc except ssl_errors as exc: @@ -1128,6 +1133,12 @@ async def _wrap_create_connection( if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise raise client_error(req.connection_key, exc) from exc + finally: + if sock is not None: + # Will be hit if an exception is thrown before the event loop takes the socket. + # In that case, proactively close the socket to guard against event loop leaks. + # For example, see https://github.com/MagicStack/uvloop/issues/653. + sock.close() async def _wrap_existing_connection( self, diff --git a/tests/conftest.py b/tests/conftest.py index 44ae384b633..95a98cd4fc0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -221,6 +221,7 @@ def start_connection(): "aiohttp.connector.aiohappyeyeballs.start_connection", autospec=True, spec_set=True, + return_value=mock.create_autospec(socket.socket, spec_set=True, instance=True), ) as start_connection_mock: yield start_connection_mock diff --git a/tests/test_connector.py b/tests/test_connector.py index 483759a4180..e79b36a673d 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -617,6 +617,29 @@ async def certificate_error(*args, **kwargs): await conn.close() +async def test_tcp_connector_closes_socket_on_error( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) + + conn = aiohttp.TCPConnector() + with ( + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ValueError, + ), + pytest.raises(ValueError), + ): + await conn.connect(req, [], ClientTimeout()) + + assert start_connection.return_value.close.called + + await conn.close() + + async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock ) -> None: diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 1679b68909f..83457de891f 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -207,6 +207,7 @@ async def make_conn(): "aiohttp.connector.aiohappyeyeballs.start_connection", autospec=True, spec_set=True, + return_value=mock.create_autospec(socket.socket, spec_set=True, instance=True), ) def test_proxy_connection_error(self, start_connection: Any) -> None: async def make_conn(): From d896b2104d557776c3816c49a2ae017d8e59a2bf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 20:39:02 +0000 Subject: [PATCH 1220/1511] Bump proxy-py from 2.4.9 to 2.4.10 (#10471) Bumps [proxy-py](https://github.com/abhinavsingh/proxy.py) from 2.4.9 to 2.4.10. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/3b9964b683dccf4507380fd17d3403ac0cf64342"><code>3b9964b</code></a> Bump actions cache (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1520">#1520</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/dc952abdaed219ee14aaa425a1b9d4027fc37116"><code>dc952ab</code></a> <code>DEFAULT_CONNECT_TIMEOUT = 10.0</code> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1519">#1519</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/d5023491d1bb7f66a54b63a4867db616d4939b71"><code>d502349</code></a> Avoid closing upstream if its managed by plugins (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1518">#1518</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/ad2e1074b36f190d1f5d583190b76770a3612405"><code>ad2e107</code></a> Leakage class (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1510">#1510</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/71d796baa961cacda7c1e661a98f10e1e474cd85"><code>71d796b</code></a> Dont crash threadless if work shutdown errors out (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1509">#1509</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/5076c3040c2031ce4b666c7a10a4c0eba927ba90"><code>5076c30</code></a> Lint fix</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/edad693fbbc7499e8b3827fe81c61b2e3f8e97af"><code>edad693</code></a> Cast <code>--inactive-conn-cleanup-timeout</code> to float</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/8d81b1426f373c90e1efad19907c82e03b2bcabb"><code>8d81b14</code></a> <code>--inactive-conn-cleanup-timeout</code> (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1504">#1504</a>)</li> <li><a href="https://github.com/abhinavsingh/proxy.py/commit/050ac1c39a83fa47229d69df4ce824a53bc07228"><code>050ac1c</code></a> <code>--rewrite-host-header</code> flag for reverse proxy (<a href="https://redirect.github.com/abhinavsingh/proxy.py/issues/1492">#1492</a>)</li> <li>See full diff in <a href="https://github.com/abhinavsingh/proxy.py/compare/v2.4.9...v2.4.10">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=proxy-py&package-manager=pip&previous-version=2.4.9&new-version=2.4.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9cd66c31096..60b86c6df53 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -136,7 +136,7 @@ propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl -proxy-py==2.4.9 +proxy-py==2.4.10 # via -r requirements/test.in pycares==4.5.0 # via aiodns diff --git a/requirements/dev.txt b/requirements/dev.txt index e97b52d4c97..8c1c027cc09 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -133,7 +133,7 @@ propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl -proxy-py==2.4.9 +proxy-py==2.4.10 # via -r requirements/test.in pycares==4.5.0 # via aiodns diff --git a/requirements/test.txt b/requirements/test.txt index 791bc28e9b4..5ee3a0751b3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,7 +71,7 @@ propcache==0.2.1 # via # -r requirements/runtime-deps.in # yarl -proxy-py==2.4.9 +proxy-py==2.4.10 # via -r requirements/test.in pycares==4.5.0 # via aiodns From ae6220671b65064bc84909f62a0fa423eb0f6595 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 20:39:04 +0000 Subject: [PATCH 1221/1511] Bump coverage from 7.6.11 to 7.6.12 (#10462) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.11 to 7.6.12. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.6.12 — 2025-02-11</h2> <ul> <li>Fix: some aarch64 distributions were missing (<code>issue 1927</code>_). These are now building reliably.</li> </ul> <p>.. _issue 1927: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1927">nedbat/coveragepy#1927</a></p> <p>.. _changes_7-6-11:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/7e5373eadff4753bb9273dad43bdee98bff65af0"><code>7e5373e</code></a> docs: sample HTML for 7.6.12</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a4ed38b3f8bbcb8db8998069b2170e6011925c24"><code>a4ed38b</code></a> docs: prep for 7.6.12</li> <li><a href="https://github.com/nedbat/coveragepy/commit/ce4efdcde3a55654624c791b587885d5bfc84b7b"><code>ce4efdc</code></a> build: fix aarch64 kits <a href="https://redirect.github.com/nedbat/coveragepy/issues/1927">#1927</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/a1f3192673d02e63ce6e1c17439e7983e5769755"><code>a1f3192</code></a> build: don't publish if kit building failed</li> <li><a href="https://github.com/nedbat/coveragepy/commit/bb68f99a01116070d014d4d77d38dbffcd3e8e9b"><code>bb68f99</code></a> chore: bump the action-dependencies group with 2 updates (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1926">#1926</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f3d6b4a8c28aabb12f8167884dea473b677e18fa"><code>f3d6b4a</code></a> refactor: check for more kinds of constant tests</li> <li><a href="https://github.com/nedbat/coveragepy/commit/67899ea7f08e7d9b015a60e073acd0466f1af9f6"><code>67899ea</code></a> refactor: we no longer care what kind of constant the compile-time constants are</li> <li><a href="https://github.com/nedbat/coveragepy/commit/c850f2005b8d95d7754117a3225e9f5e2130a892"><code>c850f20</code></a> refactor: macOS is MACOS, not OSX</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a1b2c1a17b72a1c9a24a704222f310983f11e51d"><code>a1b2c1a</code></a> build: there are always tweaks to howto.txt</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9c0303959f905b00fc10d0c93173bba0ea3127ac"><code>9c03039</code></a> build: bump version to 7.6.12</li> <li>See full diff in <a href="https://github.com/nedbat/coveragepy/compare/7.6.11...7.6.12">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.6.11&new-version=7.6.12)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 60b86c6df53..97ecbb8cef5 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.11 +coverage==7.6.12 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 8c1c027cc09..29e90f1cf51 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -50,7 +50,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.11 +coverage==7.6.12 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 5ee3a0751b3..9e089d12363 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -25,7 +25,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.6.11 +coverage==7.6.12 # via # -r requirements/test.in # pytest-cov From be1159b823703047377348d6f84dfe8165c8bfb9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 20:45:48 +0000 Subject: [PATCH 1222/1511] Bump identify from 2.6.6 to 2.6.7 (#10443) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.6 to 2.6.7. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/157ee8efe3a7be5fab92f23396663cf99d4185de"><code>157ee8e</code></a> v2.6.7</li> <li><a href="https://github.com/pre-commit/identify/commit/76cf7163721a5cb32544776a77bd5d7483ca057a"><code>76cf716</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/503">#503</a> from mheiges/uv-interpreter</li> <li><a href="https://github.com/pre-commit/identify/commit/8130ef98fab826d31e35a65451ec7795bf6ba375"><code>8130ef9</code></a> add uv to interpreters list</li> <li><a href="https://github.com/pre-commit/identify/commit/e09b6899e652358e1fc13ac303e9c9f5f5cfc231"><code>e09b689</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/501">#501</a> from dinoshauer/patch-1</li> <li><a href="https://github.com/pre-commit/identify/commit/5a2b68b54ae2db7efe6093e93a1911762c2bf791"><code>5a2b68b</code></a> add <code>.templ</code> extension</li> <li><a href="https://github.com/pre-commit/identify/commit/4db44f007218ce0b1a2d4b91c0f7ad4cdc60ba13"><code>4db44f0</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/504">#504</a> from pre-commit/all-repos_autofix_all-repos-sed</li> <li><a href="https://github.com/pre-commit/identify/commit/82ab282ad58f1a0110ef8a467f48051efdb06898"><code>82ab282</code></a> upgrade asottile/workflows</li> <li><a href="https://github.com/pre-commit/identify/commit/96281e4bdbe64825574f15bf4aa790579bc58808"><code>96281e4</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/502">#502</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/94c5fb14a3e767e9ed09b1f6db3ac145b1389306"><code>94c5fb1</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.6...v2.6.7">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.6&new-version=2.6.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 97ecbb8cef5..9955dfc0747 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -82,7 +82,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.6 +identify==2.6.7 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 29e90f1cf51..a3b06a8ab8a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -80,7 +80,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.6 +identify==2.6.7 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 57fdc34c119..616639a8508 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -29,7 +29,7 @@ filelock==3.17.0 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.6 +identify==2.6.7 # via pre-commit idna==3.7 # via trustme From af48c8dbb0a11e022c08a88630fc0b93f7970f22 Mon Sep 17 00:00:00 2001 From: Christophe Bornet <cbornet@hotmail.com> Date: Fri, 21 Feb 2025 01:18:09 +0100 Subject: [PATCH 1223/1511] [PR #10433/0c4b1c7 backport][3.12] Detect blocking calls in coroutines using BlockBuster (#10480) --- CHANGES/10433.feature.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 9 ++- requirements/lint.in | 1 + requirements/lint.txt | 4 ++ requirements/test.in | 1 + requirements/test.txt | 4 ++ tests/conftest.py | 31 +++++++++++ tests/test_client_functional.py | 97 +++++++++++++++++---------------- tests/test_cookiejar.py | 2 +- tests/test_web_functional.py | 28 +++++----- 11 files changed, 113 insertions(+), 66 deletions(-) create mode 100644 CHANGES/10433.feature.rst diff --git a/CHANGES/10433.feature.rst b/CHANGES/10433.feature.rst new file mode 100644 index 00000000000..11a29d6e368 --- /dev/null +++ b/CHANGES/10433.feature.rst @@ -0,0 +1 @@ +Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index a2511e95511..9dd9d873003 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -80,6 +80,7 @@ Chris AtLee Chris Laws Chris Moore Chris Shucksmith +Christophe Bornet Christopher Schmitt Claudiu Popa Colin Dunklau diff --git a/aiohttp/client.py b/aiohttp/client.py index 2d5a9a4cdce..8ba5e282e2c 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -664,11 +664,14 @@ async def _request( if req_cookies: all_cookies.load(req_cookies) + proxy_: Optional[URL] = None if proxy is not None: - proxy = URL(proxy) + proxy_ = URL(proxy) elif self._trust_env: with suppress(LookupError): - proxy, proxy_auth = get_env_proxy_for_url(url) + proxy_, proxy_auth = await asyncio.to_thread( + get_env_proxy_for_url, url + ) req = self._request_class( method, @@ -685,7 +688,7 @@ async def _request( expect100=expect100, loop=self._loop, response_class=self._response_class, - proxy=proxy, + proxy=proxy_, proxy_auth=proxy_auth, timer=timer, session=self, diff --git a/requirements/lint.in b/requirements/lint.in index 04eea1408da..4759dadc6a9 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,4 +1,5 @@ aiodns +blockbuster freezegun mypy; implementation_name == "cpython" pre-commit diff --git a/requirements/lint.txt b/requirements/lint.txt index 616639a8508..fd5a757b7b8 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,6 +10,8 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey +blockbuster==1.5.21 + # via -r requirements/lint.in cffi==1.17.1 # via # cryptography @@ -27,6 +29,8 @@ exceptiongroup==1.2.2 # via pytest filelock==3.17.0 # via virtualenv +forbiddenfruit==0.1.4 + # via blockbuster freezegun==1.5.1 # via -r requirements/lint.in identify==2.6.7 diff --git a/requirements/test.in b/requirements/test.in index 6686b373758..c6547bee5e5 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -1,5 +1,6 @@ -r base.in +blockbuster coverage freezegun mypy; implementation_name == "cpython" diff --git a/requirements/test.txt b/requirements/test.txt index 9e089d12363..e3e1c11e5bb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,6 +16,8 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.1.0 # via -r requirements/runtime-deps.in +blockbuster==1.5.21 + # via -r requirements/test.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.17.1 @@ -35,6 +37,8 @@ exceptiongroup==1.2.2 # via pytest execnet==2.1.1 # via pytest-xdist +forbiddenfruit==0.1.4 + # via blockbuster freezegun==1.5.1 # via -r requirements/test.in frozenlist==1.5.0 diff --git a/tests/conftest.py b/tests/conftest.py index 95a98cd4fc0..f7b3dcdb752 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,6 +12,7 @@ from uuid import uuid4 import pytest +from blockbuster import blockbuster_ctx from aiohttp.client_proto import ResponseHandler from aiohttp.http import WS_KEY @@ -33,6 +34,36 @@ IS_LINUX = sys.platform.startswith("linux") +@pytest.fixture(autouse=True) +def blockbuster(request): + # No blockbuster for benchmark tests. + node = request.node.parent + while node: + if node.name.startswith("test_benchmarks"): + yield + return + node = node.parent + with blockbuster_ctx( + "aiohttp", excluded_modules=["aiohttp.pytest_plugin", "aiohttp.test_utils"] + ) as bb: + # TODO: Fix blocking call in ClientRequest's constructor. + # https://github.com/aio-libs/aiohttp/issues/10435 + for func in ["io.TextIOWrapper.read", "os.stat"]: + bb.functions[func].can_block_in("aiohttp/client_reqrep.py", "update_auth") + for func in [ + "os.getcwd", + "os.readlink", + "os.stat", + "os.path.abspath", + "os.path.samestat", + ]: + bb.functions[func].can_block_in( + "aiohttp/web_urldispatcher.py", "add_static" + ) + bb.functions["os.getcwd"].can_block_in("coverage/control.py", "_should_trace") + yield + + @pytest.fixture def tls_certificate_authority(): if not TRUSTME: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index ba75e8e93c6..9ffe5f523f3 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -477,7 +477,7 @@ async def test_post_data_with_bytesio_file(aiohttp_client) -> None: async def handler(request): post_data = await request.post() assert ["file"] == list(post_data.keys()) - assert data == post_data["file"].file.read() + assert data == await asyncio.to_thread(post_data["file"].file.read) post_data["file"].file.close() # aiohttp < 4 doesn't autoclose files return web.Response() @@ -1595,14 +1595,14 @@ async def handler(request: web.Request) -> web.Response: async def test_POST_FILES(aiohttp_client, fname) -> None: + content1 = fname.read_bytes() + async def handler(request): data = await request.post() assert data["some"].filename == fname.name - with fname.open("rb") as f: - content1 = f.read() - content2 = data["some"].file.read() - assert content1 == content2 - assert data["test"].file.read() == b"data" + content2 = await asyncio.to_thread(data["some"].file.read) + assert content2 == content1 + assert await asyncio.to_thread(data["test"].file.read) == b"data" data["some"].file.close() data["test"].file.close() return web.Response() @@ -1619,14 +1619,14 @@ async def handler(request): async def test_POST_FILES_DEFLATE(aiohttp_client, fname) -> None: + content1 = fname.read_bytes() + async def handler(request): data = await request.post() assert data["some"].filename == fname.name - with fname.open("rb") as f: - content1 = f.read() - content2 = data["some"].file.read() + content2 = await asyncio.to_thread(data["some"].file.read) data["some"].file.close() - assert content1 == content2 + assert content2 == content1 return web.Response() app = web.Application() @@ -1676,12 +1676,12 @@ async def handler(request): async def test_POST_FILES_STR(aiohttp_client, fname) -> None: + content1 = fname.read_bytes().decode() + async def handler(request): data = await request.post() - with fname.open("rb") as f: - content1 = f.read().decode() content2 = data["some"] - assert content1 == content2 + assert content2 == content1 return web.Response() app = web.Application() @@ -1694,11 +1694,11 @@ async def handler(request): async def test_POST_FILES_STR_SIMPLE(aiohttp_client, fname) -> None: + content = fname.read_bytes() + async def handler(request): data = await request.read() - with fname.open("rb") as f: - content = f.read() - assert content == data + assert data == content return web.Response() app = web.Application() @@ -1711,12 +1711,12 @@ async def handler(request): async def test_POST_FILES_LIST(aiohttp_client, fname) -> None: + content = fname.read_bytes() + async def handler(request): data = await request.post() assert fname.name == data["some"].filename - with fname.open("rb") as f: - content = f.read() - assert content == data["some"].file.read() + assert await asyncio.to_thread(data["some"].file.read) == content data["some"].file.close() return web.Response() @@ -1730,13 +1730,13 @@ async def handler(request): async def test_POST_FILES_CT(aiohttp_client, fname) -> None: + content = fname.read_bytes() + async def handler(request): data = await request.post() assert fname.name == data["some"].filename assert "text/plain" == data["some"].content_type - with fname.open("rb") as f: - content = f.read() - assert content == data["some"].file.read() + assert await asyncio.to_thread(data["some"].file.read) == content data["some"].file.close() return web.Response() @@ -1752,11 +1752,11 @@ async def handler(request): async def test_POST_FILES_SINGLE(aiohttp_client, fname) -> None: + content = fname.read_bytes().decode() + async def handler(request): data = await request.text() - with fname.open("rb") as f: - content = f.read().decode() - assert content == data + assert data == content # if system cannot determine 'text/x-python' MIME type # then use 'application/octet-stream' default assert request.content_type in [ @@ -1778,11 +1778,11 @@ async def handler(request): async def test_POST_FILES_SINGLE_content_disposition(aiohttp_client, fname) -> None: + content = fname.read_bytes().decode() + async def handler(request): data = await request.text() - with fname.open("rb") as f: - content = f.read().decode() - assert content == data + assert data == content # if system cannot determine 'application/pgp-keys' MIME type # then use 'application/octet-stream' default assert request.content_type in [ @@ -1808,11 +1808,11 @@ async def handler(request): async def test_POST_FILES_SINGLE_BINARY(aiohttp_client, fname) -> None: + content = fname.read_bytes() + async def handler(request): data = await request.read() - with fname.open("rb") as f: - content = f.read() - assert content == data + assert data == content # if system cannot determine 'application/pgp-keys' MIME type # then use 'application/octet-stream' default assert request.content_type in [ @@ -1835,7 +1835,7 @@ async def handler(request): async def test_POST_FILES_IO(aiohttp_client) -> None: async def handler(request): data = await request.post() - assert b"data" == data["unknown"].file.read() + assert b"data" == await asyncio.to_thread(data["unknown"].file.read) assert data["unknown"].content_type == "application/octet-stream" assert data["unknown"].filename == "unknown" data["unknown"].file.close() @@ -1856,7 +1856,7 @@ async def handler(request): assert data["test"] == "true" assert data["unknown"].content_type == "application/octet-stream" assert data["unknown"].filename == "unknown" - assert data["unknown"].file.read() == b"data" + assert await asyncio.to_thread(data["unknown"].file.read) == b"data" data["unknown"].file.close() assert data.getall("q") == ["t1", "t2"] @@ -1875,6 +1875,8 @@ async def handler(request): async def test_POST_FILES_WITH_DATA(aiohttp_client, fname) -> None: + content = fname.read_bytes() + async def handler(request): data = await request.post() assert data["test"] == "true" @@ -1884,9 +1886,8 @@ async def handler(request): "application/octet-stream", ] assert data["some"].filename == fname.name - with fname.open("rb") as f: - assert data["some"].file.read() == f.read() - data["some"].file.close() + assert await asyncio.to_thread(data["some"].file.read) == content + data["some"].file.close() return web.Response() @@ -1900,13 +1901,13 @@ async def handler(request): async def test_POST_STREAM_DATA(aiohttp_client, fname) -> None: + expected = fname.read_bytes() + async def handler(request): assert request.content_type == "application/octet-stream" content = await request.read() - with fname.open("rb") as f: - expected = f.read() - assert request.content_length == len(expected) - assert content == expected + assert request.content_length == len(expected) + assert content == expected return web.Response() @@ -1922,10 +1923,10 @@ async def handler(request): @aiohttp.streamer async def stream(writer, fname): with fname.open("rb") as f: - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) while data: await writer.write(data) - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) async with client.post( "/", data=stream(fname), headers={"Content-Length": str(data_size)} @@ -1934,13 +1935,13 @@ async def stream(writer, fname): async def test_POST_STREAM_DATA_no_params(aiohttp_client, fname) -> None: + expected = fname.read_bytes() + async def handler(request): assert request.content_type == "application/octet-stream" content = await request.read() - with fname.open("rb") as f: - expected = f.read() - assert request.content_length == len(expected) - assert content == expected + assert request.content_length == len(expected) + assert content == expected return web.Response() @@ -1956,10 +1957,10 @@ async def handler(request): @aiohttp.streamer async def stream(writer): with fname.open("rb") as f: - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) while data: await writer.write(data) - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) async with client.post( "/", data=stream, headers={"Content-Length": str(data_size)} diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 0b440bc2ca6..4c37e962597 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -179,7 +179,7 @@ async def test_constructor_with_expired( assert jar._loop is loop -async def test_save_load(tmp_path, loop, cookies_to_send, cookies_to_receive) -> None: +def test_save_load(tmp_path, loop, cookies_to_send, cookies_to_receive) -> None: file_path = pathlib.Path(str(tmp_path)) / "aiohttp.test.cookie" # export cookie jar diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index e4979851300..47189f7460b 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -403,7 +403,7 @@ async def handler(request): data = await request.post() assert ["data.unknown_mime_type"] == list(data.keys()) for fs in data.values(): - check_file(fs) + await asyncio.to_thread(check_file, fs) fs.file.close() resp = web.Response(body=b"OK") return resp @@ -429,9 +429,9 @@ async def handler(request): for _file in files: assert not _file.file.closed if _file.filename == "test1.jpeg": - assert _file.file.read() == b"binary data 1" + assert await asyncio.to_thread(_file.file.read) == b"binary data 1" if _file.filename == "test2.jpeg": - assert _file.file.read() == b"binary data 2" + assert await asyncio.to_thread(_file.file.read) == b"binary data 2" file_names.add(_file.filename) _file.file.close() assert len(files) == 2 @@ -471,7 +471,7 @@ async def handler(request): data = await request.post() assert ["data.unknown_mime_type", "conftest.py"] == list(data.keys()) for fs in data.values(): - check_file(fs) + await asyncio.to_thread(check_file, fs) fs.file.close() resp = web.Response(body=b"OK") return resp @@ -757,7 +757,7 @@ async def test_upload_file(aiohttp_client) -> None: async def handler(request): form = await request.post() - raw_data = form["file"].file.read() + raw_data = await asyncio.to_thread(form["file"].file.read) form["file"].file.close() assert data == raw_data return web.Response() @@ -780,7 +780,7 @@ async def test_upload_file_object(aiohttp_client) -> None: async def handler(request): form = await request.post() - raw_data = form["file"].file.read() + raw_data = await asyncio.to_thread(form["file"].file.read) form["file"].file.close() assert data == raw_data return web.Response() @@ -906,10 +906,10 @@ async def test_response_with_async_gen(aiohttp_client, fname) -> None: async def stream(f_name): with f_name.open("rb") as f: - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) while data: yield data - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) async def handler(request): headers = {"Content-Length": str(data_size)} @@ -940,10 +940,10 @@ async def test_response_with_streamer(aiohttp_client, fname) -> None: @aiohttp.streamer async def stream(writer, f_name): with f_name.open("rb") as f: - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) while data: await writer.write(data) - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) async def handler(request): headers = {"Content-Length": str(data_size)} @@ -969,10 +969,10 @@ async def test_response_with_async_gen_no_params(aiohttp_client, fname) -> None: async def stream(): with fname.open("rb") as f: - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) while data: yield data - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) async def handler(request): headers = {"Content-Length": str(data_size)} @@ -1003,10 +1003,10 @@ async def test_response_with_streamer_no_params(aiohttp_client, fname) -> None: @aiohttp.streamer async def stream(writer): with fname.open("rb") as f: - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) while data: await writer.write(data) - data = f.read(100) + data = await asyncio.to_thread(f.read, 100) async def handler(request): headers = {"Content-Length": str(data_size)} From 079a2424c9c5f76d2dffefbb9d6333b9e0ad5c98 Mon Sep 17 00:00:00 2001 From: Christophe Bornet <cbornet@hotmail.com> Date: Fri, 21 Feb 2025 01:46:39 +0100 Subject: [PATCH 1224/1511] [PR #10481/60819de backport][3.12] Bump blockbuster to 1.5.22 (#10481) (#10482) --- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- tests/conftest.py | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/requirements/lint.txt b/requirements/lint.txt index fd5a757b7b8..a8195a21d4b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey -blockbuster==1.5.21 +blockbuster==1.5.22 # via -r requirements/lint.in cffi==1.17.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index e3e1c11e5bb..fc607243bc2 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.1.0 # via -r requirements/runtime-deps.in -blockbuster==1.5.21 +blockbuster==1.5.22 # via -r requirements/test.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/tests/conftest.py b/tests/conftest.py index f7b3dcdb752..5bca52fe67c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -60,7 +60,6 @@ def blockbuster(request): bb.functions[func].can_block_in( "aiohttp/web_urldispatcher.py", "add_static" ) - bb.functions["os.getcwd"].can_block_in("coverage/control.py", "_should_trace") yield From 0704705ac58c7cfd113182881b1457ac42a4d745 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Feb 2025 10:57:07 +0000 Subject: [PATCH 1225/1511] Bump propcache from 0.2.1 to 0.3.0 (#10483) Bumps [propcache](https://github.com/aio-libs/propcache) from 0.2.1 to 0.3.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/propcache/releases">propcache's releases</a>.</em></p> <blockquote> <h2>0.3.0</h2> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/propcache/issues/84">#84</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Started building wheels for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/propcache/issues/84">#84</a>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>GitHub Actions CI/CD is now configured to manage caching pip-ecosystem dependencies using <code>re-actors/cache-python-deps</code>_ -- an action by :user:<code>webknjaz</code> that takes into account ABI stability and the exact version of Python runtime.</p> <p>.. _<code>re-actors/cache-python-deps</code>: <a href="https://github.com/marketplace/actions/cache-python-deps">https://github.com/marketplace/actions/cache-python-deps</a></p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/propcache/issues/93">#93</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/propcache/blob/master/CHANGES.rst">propcache's changelog</a>.</em></p> <blockquote> <h1>0.3.0</h1> <p><em>(2025-02-20)</em></p> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>84</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Started building wheels for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>84</code>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li> <p>GitHub Actions CI/CD is now configured to manage caching pip-ecosystem dependencies using <code>re-actors/cache-python-deps</code>_ -- an action by :user:<code>webknjaz</code> that takes into account ABI stability and the exact version of Python runtime.</p> <p>.. _<code>re-actors/cache-python-deps</code>: <a href="https://github.com/marketplace/actions/cache-python-deps">https://github.com/marketplace/actions/cache-python-deps</a></p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>93</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/propcache/commit/7b88cc099e5bec0a847ef6099d5934b99a53f61e"><code>7b88cc0</code></a> Revert "Start building wheels for armv7l manylinux (<a href="https://redirect.github.com/aio-libs/propcache/issues/94">#94</a>)" (<a href="https://redirect.github.com/aio-libs/propcache/issues/98">#98</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/3d74e2d791c59f401e17f85c4208cc86c62a2953"><code>3d74e2d</code></a> Release 0.3.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/96">#96</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/9f8918b9154022cc74d537f91ec2745384f1cacd"><code>9f8918b</code></a> Start building wheels for armv7l manylinux (<a href="https://redirect.github.com/aio-libs/propcache/issues/94">#94</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/50155c340e31e9705b262045d19cc5c03c0d34df"><code>50155c3</code></a> Add qemu workaround to fix wheel builds (<a href="https://redirect.github.com/aio-libs/propcache/issues/95">#95</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/ccab7f47d7e4c47dbae3392f6584043b70418660"><code>ccab7f4</code></a> Implement support for the free-threaded build of CPython 3.13 (<a href="https://redirect.github.com/aio-libs/propcache/issues/84">#84</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/f5609407cbb52767ec1a337bbe114d875e12c10b"><code>f560940</code></a> Bump sphinx from 8.1.3 to 8.2.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/92">#92</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/f203f9755b38f626d8435162a0dc00f8d4c0b9df"><code>f203f97</code></a> Bump cython from 3.0.11 to 3.0.12 (<a href="https://redirect.github.com/aio-libs/propcache/issues/91">#91</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/b67c178a5bd83f679636438d3edc56d607e7c7c7"><code>b67c178</code></a> Bump pytest-codspeed from 3.1.2 to 3.2.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/90">#90</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/4fe87bc96f9d5d67466b3050538623fad99a1ee0"><code>4fe87bc</code></a> Switch caching to use re-actors/cache-python-deps (<a href="https://redirect.github.com/aio-libs/propcache/issues/93">#93</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/b8aeb2f02eec8bdb6c786591cbf7844963703c5e"><code>b8aeb2f</code></a> Bump pre-commit from 4.0.1 to 4.1.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/88">#88</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/propcache/compare/v0.2.1...v0.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=propcache&package-manager=pip&previous-version=0.2.1&new-version=0.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 8 +++++++- requirements/dev.txt | 8 +++++++- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 45a1fac6fa4..fb7a1d43f43 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,7 +32,7 @@ multidict==6.1.0 # yarl packaging==24.2 # via gunicorn -propcache==0.2.1 +propcache==0.3.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9955dfc0747..adfdd34ce8d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,6 +26,10 @@ attrs==25.1.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx +blockbuster==1.5.22 + # via + # -r requirements/lint.in + # -r requirements/test.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 @@ -70,6 +74,8 @@ execnet==2.1.1 # via pytest-xdist filelock==3.17.0 # via virtualenv +forbiddenfruit==0.1.4 + # via blockbuster freezegun==1.5.1 # via # -r requirements/lint.in @@ -132,7 +138,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.1.0 # via -r requirements/lint.in -propcache==0.2.1 +propcache==0.3.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/dev.txt b/requirements/dev.txt index a3b06a8ab8a..3d536fc0a41 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,6 +26,10 @@ attrs==25.1.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx +blockbuster==1.5.22 + # via + # -r requirements/lint.in + # -r requirements/test.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 @@ -68,6 +72,8 @@ execnet==2.1.1 # via pytest-xdist filelock==3.17.0 # via virtualenv +forbiddenfruit==0.1.4 + # via blockbuster freezegun==1.5.1 # via # -r requirements/lint.in @@ -129,7 +135,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.1.0 # via -r requirements/lint.in -propcache==0.2.1 +propcache==0.3.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 118f584b7dd..4265eab71a3 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -28,7 +28,7 @@ multidict==6.1.0 # via # -r requirements/runtime-deps.in # yarl -propcache==0.2.1 +propcache==0.3.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index fc607243bc2..2a71315b0f7 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,7 +71,7 @@ packaging==24.2 # pytest pluggy==1.5.0 # via pytest -propcache==0.2.1 +propcache==0.3.0 # via # -r requirements/runtime-deps.in # yarl From 17983d356a71e9609b81bee79ccb6abc7b2158e7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 21 Feb 2025 21:58:42 +0000 Subject: [PATCH 1226/1511] [PR #10474/7379a866 backport][3.12] Expose setsockopt in TCPConnector API (#10486) **This is a backport of PR #10474 as merged into master (7379a866bb9fa41986a01aa6fff2a73a210b66de).** Co-authored-by: Tim Menninger <tmenninger22@gmail.com> --- CHANGES/10474.feature.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 12 ++++++++++++ docs/client_advanced.rst | 15 +++++++++++++++ docs/client_reference.rst | 9 ++++++++- tests/test_connector.py | 23 +++++++++++++++++++++++ 6 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10474.feature.rst diff --git a/CHANGES/10474.feature.rst b/CHANGES/10474.feature.rst new file mode 100644 index 00000000000..d5d6e4b40b9 --- /dev/null +++ b/CHANGES/10474.feature.rst @@ -0,0 +1,2 @@ +Added ``tcp_sockopts`` to ``TCPConnector`` to allow specifying custom socket options +-- by :user:`TimMenninger`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 9dd9d873003..fb5217e3e6b 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -332,6 +332,7 @@ Thanos Lefteris Thijs Vermeir Thomas Forbes Thomas Grainger +Tim Menninger Tolga Tezel Tomasz Trebski Toshiaki Tanaka diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 14433ba37e1..75d5796f7d2 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -19,6 +19,7 @@ DefaultDict, Deque, Dict, + Iterable, Iterator, List, Literal, @@ -60,6 +61,11 @@ ) from .resolver import DefaultResolver +if sys.version_info >= (3, 12): + from collections.abc import Buffer +else: + Buffer = Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] + if TYPE_CHECKING: import ssl @@ -828,6 +834,8 @@ class TCPConnector(BaseConnector): the happy eyeballs algorithm, set to None. interleave - “First Address Family Count” as defined in RFC 8305 loop - Optional event loop. + tcp_sockopts - List of tuples of sockopts applied to underlying + socket """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) @@ -853,6 +861,7 @@ def __init__( timeout_ceil_threshold: float = 5, happy_eyeballs_delay: Optional[float] = 0.25, interleave: Optional[int] = None, + tcp_sockopts: Iterable[Tuple[int, int, Union[int, Buffer]]] = [], ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -879,6 +888,7 @@ def __init__( self._happy_eyeballs_delay = happy_eyeballs_delay self._interleave = interleave self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() + self._tcp_sockopts = tcp_sockopts def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" @@ -1120,6 +1130,8 @@ async def _wrap_create_connection( interleave=self._interleave, loop=self._loop, ) + for sockopt in self._tcp_sockopts: + sock.setsockopt(*sockopt) connection = await self._loop.create_connection( *args, **kwargs, sock=sock ) diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 2d00418ffac..eeb0ee98574 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -461,6 +461,21 @@ If your HTTP server uses UNIX domain sockets you can use session = aiohttp.ClientSession(connector=conn) +Setting socket options +^^^^^^^^^^^^^^^^^^^^^^ + +Socket options passed to the :class:`~aiohttp.TCPConnector` will be passed +to the underlying socket when creating a connection. For example, we may +want to change the conditions under which we consider a connection dead. +The following would change that to 9*7200 = 18 hours:: + + import socket + + conn = aiohttp.TCPConnector(tcp_sockopts=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True), + (socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7200), + (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 9) ]) + + Named pipes in Windows ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 013c43a13e4..1e49b014007 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1144,7 +1144,8 @@ is controlled by *force_close* constructor's parameter). resolver=None, keepalive_timeout=sentinel, \ force_close=False, limit=100, limit_per_host=0, \ enable_cleanup_closed=False, timeout_ceil_threshold=5, \ - happy_eyeballs_delay=0.25, interleave=None, loop=None) + happy_eyeballs_delay=0.25, interleave=None, loop=None, \ + tcp_sockopts=[]) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. @@ -1265,6 +1266,12 @@ is controlled by *force_close* constructor's parameter). .. versionadded:: 3.10 + :param list tcp_sockopts: options applied to the socket when a connection is + created. This should be a list of 3-tuples, each a ``(level, optname, value)``. + Each tuple is deconstructed and passed verbatim to ``<socket>.setsockopt``. + + .. versionadded:: 3.12 + .. attribute:: family *TCP* socket family e.g. :data:`socket.AF_INET` or diff --git a/tests/test_connector.py b/tests/test_connector.py index e79b36a673d..b7531361287 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -3581,6 +3581,29 @@ def test_connect() -> Literal[True]: assert raw_response_list == [True, True] +async def test_tcp_connector_setsockopts( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + """Check that sockopts get passed to socket""" + conn = aiohttp.TCPConnector( + tcp_sockopts=[(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 2)] + ) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + start_connection.return_value = s + create_connection.return_value = mock.Mock(), mock.Mock() + + req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) + + with closing(await conn.connect(req, [], ClientTimeout())): + assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT) == 2 + + await conn.close() + + def test_default_ssl_context_creation_without_ssl() -> None: """Verify _make_ssl_context does not raise when ssl is not available.""" with mock.patch.object(connector_module, "ssl", None): From c8143c317c9403f4a5f58cd17056508c756c26a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 12:28:21 +0000 Subject: [PATCH 1227/1511] Bump identify from 2.6.7 to 2.6.8 (#10489) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.7 to 2.6.8. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/f905a2e20c85a323b0ad6d8fd56bd57dcecb95a6"><code>f905a2e</code></a> v2.6.8</li> <li><a href="https://github.com/pre-commit/identify/commit/6d9a7b62e3bba286e47f9c42c708fc8eeda39e85"><code>6d9a7b6</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/511">#511</a> from cidlik/main</li> <li><a href="https://github.com/pre-commit/identify/commit/9f16bfa253e7632b3a7cc5cea5598f5836e36ee1"><code>9f16bfa</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/510">#510</a> from jodood/feature/sas</li> <li><a href="https://github.com/pre-commit/identify/commit/c10063193f43ab9f762a835835d7d2761e9d8d94"><code>c100631</code></a> extensions: Introduce wsdl</li> <li><a href="https://github.com/pre-commit/identify/commit/4de539aa59bb4128665cfcb0428344723085ae56"><code>4de539a</code></a> feat: Register sas as an extension</li> <li><a href="https://github.com/pre-commit/identify/commit/3b170313bf0e2e9b7c31a190cf04b95c7c8cc0dc"><code>3b17031</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/509">#509</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/20eb272d4e968ec9e35e6c558fb0e37ddce5f33c"><code>20eb272</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/identify/commit/9ae7182ad4db05a4e033f8ffb3b74766ee0f75a8"><code>9ae7182</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/507">#507</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/ce5c8d9c921733acc39bc8aaf4330e929ee1cf34"><code>ce5c8d9</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.7...v2.6.8">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.7&new-version=2.6.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index adfdd34ce8d..acac48ef15f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -88,7 +88,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.7 +identify==2.6.8 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 3d536fc0a41..0ea100e733e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -86,7 +86,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.7 +identify==2.6.8 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index a8195a21d4b..ff0677c855a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -33,7 +33,7 @@ forbiddenfruit==0.1.4 # via blockbuster freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.7 +identify==2.6.8 # via pre-commit idna==3.7 # via trustme From 94924a32f6e69cabeaff142167458a6c4cd63e90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 12:36:33 +0000 Subject: [PATCH 1228/1511] Bump blockbuster from 1.5.22 to 1.5.23 (#10490) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [blockbuster](https://github.com/cbornet/blockbuster) from 1.5.22 to 1.5.23. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/cbornet/blockbuster/commit/2d136bad20eb5644af39d34f897e5e3fd837a905"><code>2d136ba</code></a> Exclude subprocess_exec</li> <li>See full diff in <a href="https://github.com/cbornet/blockbuster/compare/v1.5.22...v1.5.23">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=blockbuster&package-manager=pip&previous-version=1.5.22&new-version=1.5.23)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index acac48ef15f..591d7b5af31 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,7 +26,7 @@ attrs==25.1.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -blockbuster==1.5.22 +blockbuster==1.5.23 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 0ea100e733e..78e0fdec604 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ attrs==25.1.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -blockbuster==1.5.22 +blockbuster==1.5.23 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index ff0677c855a..26b62bff3fc 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey -blockbuster==1.5.22 +blockbuster==1.5.23 # via -r requirements/lint.in cffi==1.17.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index 2a71315b0f7..a4f30c80b76 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.1.0 # via -r requirements/runtime-deps.in -blockbuster==1.5.22 +blockbuster==1.5.23 # via -r requirements/test.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 6f09c515993ce754f009428df55b28bb77d202a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 24 Feb 2025 10:04:30 -0500 Subject: [PATCH 1229/1511] Release 3.11.13 (#10491) --- CHANGES.rst | 53 +++++++++++++++++++++++++++++++++++++ CHANGES/10422.misc.rst | 3 --- CHANGES/10423.packaging.rst | 1 - CHANGES/10434.bugfix.rst | 2 -- CHANGES/10464.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 54 insertions(+), 8 deletions(-) delete mode 100644 CHANGES/10422.misc.rst delete mode 100644 CHANGES/10423.packaging.rst delete mode 100644 CHANGES/10434.bugfix.rst delete mode 100644 CHANGES/10464.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 104dd7a746d..39c45196c26 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,59 @@ .. towncrier release notes start +3.11.13 (2025-02-24) +==================== + +Bug fixes +--------- + +- Removed a break statement inside the finally block in :py:class:`~aiohttp.web.RequestHandler` + -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`10434`. + + + +- Changed connection creation to explicitly close sockets if an exception is raised in the event loop's ``create_connection`` method -- by :user:`top-oai`. + + + *Related issues and pull requests on GitHub:* + :issue:`10464`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Fixed test ``test_write_large_payload_deflate_compression_data_in_eof_writelines`` failing with Python 3.12.9+ or 3.13.2+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10423`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added human-readable error messages to the exceptions for WebSocket disconnects due to PONG not being received -- by :user:`bdraco`. + + Previously, the error messages were empty strings, which made it hard to determine what went wrong. + + + *Related issues and pull requests on GitHub:* + :issue:`10422`. + + + + +---- + + 3.11.12 (2025-02-05) ==================== diff --git a/CHANGES/10422.misc.rst b/CHANGES/10422.misc.rst deleted file mode 100644 index 7ecb1c0e2e2..00000000000 --- a/CHANGES/10422.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added human-readable error messages to the exceptions for WebSocket disconnects due to PONG not being received -- by :user:`bdraco`. - -Previously, the error messages were empty strings, which made it hard to determine what went wrong. diff --git a/CHANGES/10423.packaging.rst b/CHANGES/10423.packaging.rst deleted file mode 100644 index 6cf58c5a10b..00000000000 --- a/CHANGES/10423.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed test ``test_write_large_payload_deflate_compression_data_in_eof_writelines`` failing with Python 3.12.9+ or 3.13.2+ -- by :user:`bdraco`. diff --git a/CHANGES/10434.bugfix.rst b/CHANGES/10434.bugfix.rst deleted file mode 100644 index c4bc50dc6aa..00000000000 --- a/CHANGES/10434.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid break statement inside the finally block in :py:class:`~aiohttp.web.RequestHandler` --- by :user:`Cycloctane`. diff --git a/CHANGES/10464.bugfix.rst b/CHANGES/10464.bugfix.rst deleted file mode 100644 index 4e21000a317..00000000000 --- a/CHANGES/10464.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Changed connection creation to explicitly close sockets if an exception is raised in the event loop's ``create_connection`` method -- by :user:`top-oai`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6652a0b979d..786eed63650 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.13.dev0" +__version__ = "3.11.13" from typing import TYPE_CHECKING, Tuple From 613a3f0a13ef2b7fd567c09651ac344e3fd50e08 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 24 Feb 2025 16:25:50 +0000 Subject: [PATCH 1230/1511] Increment version to 3.11.14.dev0 --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 786eed63650..96eced5960d 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.13" +__version__ = "3.11.14.dev0" from typing import TYPE_CHECKING, Tuple From f4cdb6e347a5e3da9f3ccc0c7b4d0f4d29c66186 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 26 Feb 2025 11:21:14 +0100 Subject: [PATCH 1231/1511] [PR #10493/8e8fa959 backport][3.12] Document adjustments to the release process (#10496) **This is a backport of PR #10493 as merged into master (8e8fa959e811f95c73bf09966f534e5abf3c452e).** - Adding some notes here so I do not forget the social media posting - Add note to check RTD to verify the changelog looks good - Sign the tags Co-authored-by: J. Nick Koston <nick@koston.org> --- docs/contributing-admins.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst index acfaebc0e97..b17cbe1019a 100644 --- a/docs/contributing-admins.rst +++ b/docs/contributing-admins.rst @@ -21,9 +21,9 @@ To create a new release: #. Run ``towncrier``. #. Check and cleanup the changes in ``CHANGES.rst``. #. Checkout a new branch: e.g. ``git checkout -b release/v3.8.6`` -#. Commit and create a PR. Once PR is merged, continue. +#. Commit and create a PR. Verify the changelog and release notes look good on Read the Docs. Once PR is merged, continue. #. Go back to the release branch: e.g. ``git checkout 3.8 && git pull`` -#. Add a tag: e.g. ``git tag -a v3.8.6 -m 'Release 3.8.6'`` +#. Add a tag: e.g. ``git tag -a v3.8.6 -m 'Release 3.8.6' -s`` #. Push the tag: e.g. ``git push origin v3.8.6`` #. Monitor CI to ensure release process completes without errors. @@ -49,6 +49,10 @@ first merge into the newer release branch (e.g. 3.8 into 3.9) and then to master Back on the original release branch, bump the version number and append ``.dev0`` in ``__init__.py``. +Post the release announcement to social media: + - BlueSky: https://bsky.app/profile/aiohttp.org and re-post to https://bsky.app/profile/aio-libs.org + - Mastodon: https://fosstodon.org/@aiohttp and re-post to https://fosstodon.org/@aio_libs + If doing a minor release: #. Create a new release branch for future features to go to: e.g. ``git checkout -b 3.10 3.9 && git push`` From 40fe535ca81be698eafa6f43f9ae2cb19016b795 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 26 Feb 2025 11:21:37 +0100 Subject: [PATCH 1232/1511] [PR #10493/8e8fa959 backport][3.11] Document adjustments to the release process (#10495) **This is a backport of PR #10493 as merged into master (8e8fa959e811f95c73bf09966f534e5abf3c452e).** - Adding some notes here so I do not forget the social media posting - Add note to check RTD to verify the changelog looks good - Sign the tags Co-authored-by: J. Nick Koston <nick@koston.org> --- docs/contributing-admins.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst index acfaebc0e97..b17cbe1019a 100644 --- a/docs/contributing-admins.rst +++ b/docs/contributing-admins.rst @@ -21,9 +21,9 @@ To create a new release: #. Run ``towncrier``. #. Check and cleanup the changes in ``CHANGES.rst``. #. Checkout a new branch: e.g. ``git checkout -b release/v3.8.6`` -#. Commit and create a PR. Once PR is merged, continue. +#. Commit and create a PR. Verify the changelog and release notes look good on Read the Docs. Once PR is merged, continue. #. Go back to the release branch: e.g. ``git checkout 3.8 && git pull`` -#. Add a tag: e.g. ``git tag -a v3.8.6 -m 'Release 3.8.6'`` +#. Add a tag: e.g. ``git tag -a v3.8.6 -m 'Release 3.8.6' -s`` #. Push the tag: e.g. ``git push origin v3.8.6`` #. Monitor CI to ensure release process completes without errors. @@ -49,6 +49,10 @@ first merge into the newer release branch (e.g. 3.8 into 3.9) and then to master Back on the original release branch, bump the version number and append ``.dev0`` in ``__init__.py``. +Post the release announcement to social media: + - BlueSky: https://bsky.app/profile/aiohttp.org and re-post to https://bsky.app/profile/aio-libs.org + - Mastodon: https://fosstodon.org/@aiohttp and re-post to https://fosstodon.org/@aio_libs + If doing a minor release: #. Create a new release branch for future features to go to: e.g. ``git checkout -b 3.10 3.9 && git push`` From 6ede5e021d755eef5b7bb1575252375a57d2a75c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Feb 2025 11:02:19 +0000 Subject: [PATCH 1233/1511] Bump setuptools from 75.8.0 to 75.8.1 (#10498) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.8.0 to 75.8.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.8.1</h1> <h2>Bugfixes</h2> <ul> <li>Fix wheel file naming to follow binary distribution specification -- by :user:<code>di</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4766">#4766</a>)</li> <li>Fixed crash generating error message printed when building wheels for the free-threaded build using the limited API. -- by :user:<code>ngoldbaum</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4809">#4809</a>)</li> <li>Fix documentation for recent CFLAGS distutils change. -- by :user:<code>thesamesam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4836">#4836</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/56c055b653f080544f490e198605974c71b6fe35"><code>56c055b</code></a> Bump version: 75.8.0 → 75.8.1</li> <li><a href="https://github.com/pypa/setuptools/commit/e46cfbf739f6c1149e4401eab9e6b7c9079d34f0"><code>e46cfbf</code></a> Update CFLAGS docs to reflect distutils change (<a href="https://redirect.github.com/pypa/setuptools/issues/4846">#4846</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/d31c5b9fc3d79e2b95f6809384e730813b0ba518"><code>d31c5b9</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li><a href="https://github.com/pypa/setuptools/commit/5b5b2ab55c9454f756a962ec9584aee549f10414"><code>5b5b2ab</code></a> Update CFLAGS docs to reflect distutils change</li> <li><a href="https://github.com/pypa/setuptools/commit/ba243756233d0afe944db6e02ddcb70064dcd22c"><code>ba24375</code></a> [CI] Address problems with <code>cygwin</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4832">#4832</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/9d6ade8c85fa799700bdcdf4572cbc2755a1732e"><code>9d6ade8</code></a> Try to install tox using 'pip' on cygwin</li> <li><a href="https://github.com/pypa/setuptools/commit/32332e4728f46e9e37b1c775bbcc6c4ade282ed0"><code>32332e4</code></a> Attempt to install missing dependencies</li> <li><a href="https://github.com/pypa/setuptools/commit/94a84c2f90f2c36b9f8ec3631ed7cba80b6fad7c"><code>94a84c2</code></a> Add command for debugging purposes</li> <li><a href="https://github.com/pypa/setuptools/commit/6f809a0741386fad5146d63c656049d1473cc5c7"><code>6f809a0</code></a> Attempt to solve problems with cygwin in the CI</li> <li><a href="https://github.com/pypa/setuptools/commit/aee344d781920bba42ddbee4b4b44af29d7bab6e"><code>aee344d</code></a> Removing dependabot config. Closes <a href="https://redirect.github.com/jaraco/skeleton/issues/156">jaraco/skeleton#156</a></li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v75.8.0...v75.8.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.8.0&new-version=75.8.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 591d7b5af31..69a93b02ce3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==75.8.0 +setuptools==75.8.1 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 78e0fdec604..cb00e041889 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==75.8.0 +setuptools==75.8.1 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4be7358fe8d..a5408f08014 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.8.0 +setuptools==75.8.1 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 8324f007e3a..a69dbe4b14d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.8.0 +setuptools==75.8.1 # via incremental From 470ae18ab89799a57681e0c5c12d186dcb7411b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Feb 2025 10:38:28 +0000 Subject: [PATCH 1234/1511] Bump setuptools from 75.8.1 to 75.8.2 (#10499) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.8.1 to 75.8.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v75.8.2</h1> <h2>Bugfixes</h2> <ul> <li>Fixed <code>pkg_resources.require(...)</code> to also consider standardised <code>dist-info</code> directories. (<a href="https://redirect.github.com/pypa/setuptools/issues/4856">#4856</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/9aa3771f57f070bd13b9a3e83bd469413afc98c7"><code>9aa3771</code></a> Bump version: 75.8.1 → 75.8.2</li> <li><a href="https://github.com/pypa/setuptools/commit/c0f39c04c5e7632b3f1f4800c8776f5d4709fabf"><code>c0f39c0</code></a> Update WorkingSet.find to consider standardised .dist-info directory names (#...</li> <li><a href="https://github.com/pypa/setuptools/commit/edca1811df4daa15d18eb06d0dd5da11eda8b3af"><code>edca181</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/22355fcb3337317d4f6ca675aa60947692c9af3a"><code>22355fc</code></a> Also consider '-' separator in tests</li> <li><a href="https://github.com/pypa/setuptools/commit/8280e2c4fc2f32a5da1ec3ba322c534e2f5369a3"><code>8280e2c</code></a> Attempt to solve path normalisation issue in windows tests</li> <li><a href="https://github.com/pypa/setuptools/commit/a3718c8099235fb3b40d013f97530d5aeb5ba0ce"><code>a3718c8</code></a> Slightly change test, so that we are sure about the correct distribution bein...</li> <li><a href="https://github.com/pypa/setuptools/commit/23b73aaef2cb95650a997f80ea74c8d51bc5f01c"><code>23b73aa</code></a> Fix mypy errors</li> <li><a href="https://github.com/pypa/setuptools/commit/2c242238f536c4b942812632ba9dd0b1c48b4b85"><code>2c24223</code></a> Update WorkingSet.find to consider standardised dist-info names</li> <li><a href="https://github.com/pypa/setuptools/commit/79d6e46d4949a77238ca8884ed8137d5d6175d31"><code>79d6e46</code></a> Add regression test for issue 4853</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v75.8.1...v75.8.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.8.1&new-version=75.8.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 69a93b02ce3..265115d0b1f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==75.8.1 +setuptools==75.8.2 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index cb00e041889..2cb8d351b72 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==75.8.1 +setuptools==75.8.2 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index a5408f08014..687b6827211 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.8.1 +setuptools==75.8.2 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index a69dbe4b14d..ab132b5678e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.8.1 +setuptools==75.8.2 # via incremental From f1a4d2eec84257981cf7e9e3e4ab094aabc561cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Feb 2025 10:41:50 +0000 Subject: [PATCH 1235/1511] Bump actions/cache from 4.2.1 to 4.2.2 (#10503) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.2.1 to 4.2.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.2.2</h2> <h2>What's Changed</h2> <blockquote> <p>[!IMPORTANT] As a reminder, there were important backend changes to release v4.2.0, see <a href="https://github.com/actions/cache/releases/tag/v4.2.0">those release notes</a> and <a href="https://github.com/actions/cache/discussions/1510">the announcement</a> for more details.</p> </blockquote> <ul> <li>Bump <code>@​actions/cache</code> to v4.0.2 by <a href="https://github.com/robherley"><code>@​robherley</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1560">actions/cache#1560</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4.2.1...v4.2.2">https://github.com/actions/cache/compare/v4.2.1...v4.2.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.2.2</h3> <ul> <li>Bump <code>@actions/cache</code> to v4.0.2</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/d4323d4df104b026a6aa633fdb11d772146be0bf"><code>d4323d4</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1560">#1560</a> from actions/robherley/v4.2.2</li> <li><a href="https://github.com/actions/cache/commit/da26677639ccfb4615f1acc52d1fc3dc89152490"><code>da26677</code></a> bump <code>@​actions/cache</code> to v4.0.2, prep for v4.2.2 release</li> <li><a href="https://github.com/actions/cache/commit/7921ae235bdcb376cc8f22558dc5f8ddc3c3c2f9"><code>7921ae2</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1557">#1557</a> from actions/robherley/ia-workflow-released</li> <li><a href="https://github.com/actions/cache/commit/393773170624981bfaa3aac1cb736e3004eac1de"><code>3937731</code></a> Update publish-immutable-actions.yml</li> <li>See full diff in <a href="https://github.com/actions/cache/compare/v4.2.1...v4.2.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.2.1&new-version=4.2.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 95f9fc7c631..a4f6d03c806 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.2 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.2 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.2 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 5124966aab34cf28ff0a9e5adbd976327323673c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Feb 2025 10:57:42 +0000 Subject: [PATCH 1236/1511] Bump sphinxcontrib-towncrier from 0.4.0a0 to 0.5.0a0 (#10504) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [sphinxcontrib-towncrier](https://github.com/sphinx-contrib/sphinxcontrib-towncrier) from 0.4.0a0 to 0.5.0a0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/releases">sphinxcontrib-towncrier's releases</a>.</em></p> <blockquote> <h2>v0.5.0a0</h2> <!-- raw HTML omitted --> <h1>Release v0.5.0a0</h1> <p>This release is published to <a href="https://pypi.org/project/sphinxcontrib-towncrier/0.5.0a0">https://pypi.org/project/sphinxcontrib-towncrier/0.5.0a0</a>.</p> <p>This release has been produced by the following workflow run: <a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/actions/runs/13579622041">https://github.com/sphinx-contrib/sphinxcontrib-towncrier/actions/runs/13579622041</a>.</p> <h2>🐛 What's Fixed</h2> <p>The main improvement is that <a href="https://github.com/bennyrowland"><code>@​bennyrowland</code></a><a href="https://github.com/sponsors/bennyrowland">💰</a> added support for Towncrier 24.7.0rc1 and higher via <a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/96">#96</a>. Towncrier versions of year 2024 are now integrated into the CI as well (<a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/93">#93</a>).</p> <p>There are a few more corner case adjustments and fixes that we didn't bother recording as they aren't met in the “happy path”. They handle missing configs, files and directories.</p> <h2>🛠️ Internal Updates</h2> <p><a href="https://github.com/dvzrv"><code>@​dvzrv</code></a><a href="https://github.com/sponsors/dvzrv">💰</a> upgraded <code>setuptools-scm</code> in packaging to rely on modern handling of <code>git archives</code> in <a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/80">#80</a></p> <p><a href="https://github.com/webknjaz"><code>@​webknjaz</code></a><a href="https://github.com/sponsors/webknjaz">💰</a> heavily refactored the CI/CD, introducing a reusable workflow generalizing tox invocations @ <a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/pull/106">sphinx-contrib/sphinxcontrib-towncrier#106</a> and a few direct commits.</p> <p>As a part of the testing improvements, the code coverage level has been raised to about 20% higher than before. Additionally, a typing has been fixed here and there.</p> <h2>☣️ Anything else I might care about?</h2> <blockquote> <p>[!caution] This release heavily reduced the support matrix. The minimum supported Python version is now 3.9. And the lowest required Towncrier is 23.</p> </blockquote> <h2>💪 New Contributors</h2> <ul> <li><a href="https://github.com/dvzrv"><code>@​dvzrv</code></a><a href="https://github.com/sponsors/dvzrv">💰</a> made their first contribution in <a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/80">#80</a></li> <li><a href="https://github.com/bennyrowland"><code>@​bennyrowland</code></a><a href="https://github.com/sponsors/bennyrowland">💰</a> made their first contribution in <a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/96">#96</a></li> </ul> <p><strong>🪞 Full Diff</strong>: <a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/compare/v0.4.0a0...v0.5.0a0">https://github.com/sphinx-contrib/sphinxcontrib-towncrier/compare/v0.4.0a0...v0.5.0a0</a></p> <p><strong>🧔‍♂️ Release Manager:</strong> <a href="https://github.com/sponsors/webknjaz"><code>@​webknjaz</code></a> <a href="https://stand-with-ukraine.pp.ua">🇺🇦</a></p> <p><strong>🙏 Special Thanks</strong> to <a href="https://github.com/bennyrowland"><code>@​bennyrowland</code></a><a href="https://github.com/sponsors/bennyrowland">💰</a> for driving the Towncrier compatibility research and proposing fixes! They laid the foundation for this entire release.</p> <p><strong>💬 Discuss</strong> <a href="https://bsky.app/profile/webknjaz.me/post/3lj7dd2lus22y">on Bluesky 🦋</a>, <a href="https://mastodon.social/@webknjaz/114079069834270824">on Mastodon 🐘</a> and <a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/discussions/107">on GitHub</a>.</p> <p><a href="https://github.com/sponsors/webknjaz"><img src="https://img.shields.io/badge/%40webknjaz-transparent?logo=githubsponsors&logoColor=%23EA4AAA&label=Sponsor&color=2a313c" alt="GH Sponsors badge" /></a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/8f76a5515289b5d0d5a82a445bbaa74b25a18648"><code>8f76a55</code></a> 🧪 Skip running pylint @ pre-commit.ci</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/70143285a705ee65fcde454d67f83dcca732c624"><code>7014328</code></a> 🧪 Bump Towncrier to v24 @ pre-commit</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/28c6db95f6f45f2dd06e42ee07a45b86a65abbe8"><code>28c6db9</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/105">#105</a>)</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/20c16675da14eb5eb63e600d628faee532632586"><code>20c1667</code></a> Bump jinja2 from 3.1.4 to 3.1.5 in /docs (<a href="https://redirect.github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/104">#104</a>)</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/13b228cb260ab843c3aca1d19bebc1b89f929453"><code>13b228c</code></a> Merge branch 'maintenance/tox4-win'</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/ac010591ed8b4cf5ebcb1bd480bfd25b79584ec7"><code>ac01059</code></a> 🚑 Force UTF-8 mode in CPython calling tox</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/a93edfe1027b05bfe9c60fd5caa5e9b3be1d76d1"><code>a93edfe</code></a> 🧪📦 Sync expected dist basename computation</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/7b6a89a99af8f2265735a6fe442bf1df45709094"><code>7b6a89a</code></a> 🧪 Move cron runs into a separate GHA workflow</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/7bda7eeef898fb4a33f02ccb2edbfc172b96d5de"><code>7bda7ee</code></a> Merge branch 'maintenance/gha-reusable-tox'</li> <li><a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/commit/26c1d04af75c2346ab4a65aa55bd3c5dc7e3397a"><code>26c1d04</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li>Additional commits viewable in <a href="https://github.com/sphinx-contrib/sphinxcontrib-towncrier/compare/v0.4.0a0...v0.5.0a0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sphinxcontrib-towncrier&package-manager=pip&previous-version=0.4.0a0&new-version=0.5.0a0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 265115d0b1f..e1081b455f1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -232,7 +232,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in -sphinxcontrib-towncrier==0.4.0a0 +sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in stamina==24.3.0 # via cherry-picker diff --git a/requirements/dev.txt b/requirements/dev.txt index 2cb8d351b72..d294cc0203e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -223,7 +223,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sphinxcontrib-towncrier==0.4.0a0 +sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in stamina==24.3.0 # via cherry-picker diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 687b6827211..48b8d4bce60 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -61,7 +61,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in -sphinxcontrib-towncrier==0.4.0a0 +sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in tomli==2.2.1 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index ab132b5678e..b2fce4162ca 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -54,7 +54,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sphinxcontrib-towncrier==0.4.0a0 +sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in tomli==2.2.1 # via From f5d41407765c76c6e06675b7f0cedef5d14e2b20 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 18:27:21 -1000 Subject: [PATCH 1237/1511] Bump pypa/cibuildwheel from 2.22.0 to 2.23.0 (#10511) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.22.0 to 2.23.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.23.0</h2> <ul> <li>✨ Adds official support for the new GitHub Actions Arm runners. In fact these worked out-of-the-box, now we include them in our tests and example configs. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2135">#2135</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> <li>✨ Adds support for building PyPy 3.11 wheels (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2268">#2268</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> <li>🛠 Adopts the beta pypa/manylinux image for armv7l builds (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2269">#2269</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> <li>🛠 Dependency updates, including Pyodide 0.27 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2117">#2117</a> and <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.23.0</h3> <p><em>1 March 2025</em></p> <ul> <li>✨ Adds official support for the new GitHub Actions Arm runners. In fact these worked out-of-the-box, now we include them in our tests and example configs. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2135">#2135</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> <li>✨ Adds support for building PyPy 3.11 wheels (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2268">#2268</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> <li>🛠 Adopts the beta pypa/manylinux image for armv7l builds (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2269">#2269</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> <li>🛠 Dependency updates, including Pyodide 0.27 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2117">#2117</a> and <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2281">#2281</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/6cccd09a31908ffd175b012fb8bf4e1dbda3bc6c"><code>6cccd09</code></a> Bump version: v2.23.0</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/aa1534d2a3274835efb753e3a0cedea94eb488fa"><code>aa1534d</code></a> chore(2.x): update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2294">#2294</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/ee23dd147b0862283ddb4744f91240fb1ddc4b8a"><code>ee23dd1</code></a> fix(test): implement retry for test_container_removed</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/55037e9716e60e89c94226470c51f178af3da5f4"><code>55037e9</code></a> ci/doc: move azure macOS build to <code>macOS-13</code> (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2101">#2101</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/8874ee204820309af33b85e21cc3d336d06a2f73"><code>8874ee2</code></a> Remove specific Python versions from the update-dependencies job</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/f76d345e81295537e69528a5df4d1f88c9195b56"><code>f76d345</code></a> chore: bump dependencies</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/5a5e514fbd240b7fe9b66feda05d86c9130ad7da"><code>5a5e514</code></a> Bump to Pyodide 0.27 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2117">#2117</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/017abbba683629357f1871875b9a616b8bf46044"><code>017abbb</code></a> feature: add PyPy 3.11 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2268">#2268</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/bb1fd3320e57a5f3d2491612ae9ad3e0b1694742"><code>bb1fd33</code></a> feat: Add support for ubuntu-24.04-arm GHA runner (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2135">#2135</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/98a6f3bf64389f680e35e62dcae589087a711d27"><code>98a6f3b</code></a> chore: use pypa/manylinux for manylinux_2_31_armv7l (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2269">#2269</a>)</li> <li>See full diff in <a href="https://github.com/pypa/cibuildwheel/compare/v2.22.0...v2.23.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.22.0&new-version=2.23.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a4f6d03c806..cade6835b92 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -414,7 +414,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.22.0 + uses: pypa/cibuildwheel@v2.23.0 env: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 From 29ca84306e3ef658721a2d88b8a0b6dc07a87cd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 18:27:36 -1000 Subject: [PATCH 1238/1511] Bump pytest from 8.3.4 to 8.3.5 (#10510) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.4 to 8.3.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest/releases">pytest's releases</a>.</em></p> <blockquote> <h2>8.3.5</h2> <h1>pytest 8.3.5 (2025-03-02)</h1> <h2>Bug fixes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/11777">#11777</a>: Fixed issue where sequences were still being shortened even with <code>-vv</code> verbosity.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/12888">#12888</a>: Fixed broken input when using Python 3.13+ and a <code>libedit</code> build of Python, such as on macOS or with uv-managed Python binaries from the <code>python-build-standalone</code> project. This could manifest e.g. by a broken prompt when using <code>Pdb</code>, or seeing empty inputs with manual usage of <code>input()</code> and suspended capturing.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/13026">#13026</a>: Fixed <code>AttributeError</code>{.interpreted-text role="class"} crash when using <code>--import-mode=importlib</code> when top-level directory same name as another module of the standard library.</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/13053">#13053</a>: Fixed a regression in pytest 8.3.4 where, when using <code>--import-mode=importlib</code>, a directory containing py file with the same name would cause an <code>ImportError</code></li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/13083">#13083</a>: Fixed issue where pytest could crash if one of the collected directories got removed during collection.</li> </ul> <h2>Improved documentation</h2> <ul> <li> <p><a href="https://redirect.github.com/pytest-dev/pytest/issues/12842">#12842</a>: Added dedicated page about using types with pytest.</p> <p>See <code>types</code>{.interpreted-text role="ref"} for detailed usage.</p> </li> </ul> <h2>Contributor-facing changes</h2> <ul> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/13112">#13112</a>: Fixed selftest failures in <code>test_terminal.py</code> with Pygments >= 2.19.0</li> <li><a href="https://redirect.github.com/pytest-dev/pytest/issues/13256">#13256</a>: Support for Towncrier versions released in 2024 has been re-enabled when building Sphinx docs -- by <code>webknjaz</code>{.interpreted-text role="user"}.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest/commit/b55ab2aabb68c0ce94c3903139b062d0c2790152"><code>b55ab2a</code></a> Prepare release version 8.3.5</li> <li><a href="https://github.com/pytest-dev/pytest/commit/e217726d2a0edfaf58eae95bf835b85834b96da3"><code>e217726</code></a> Added dedicated page about using types with pytest <a href="https://redirect.github.com/pytest-dev/pytest/issues/12842">#12842</a> (<a href="https://redirect.github.com/pytest-dev/pytest/issues/12963">#12963</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13260">#13260</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/2fa3f8306c3da4aad7f7349a4947ac37ba6c652f"><code>2fa3f83</code></a> Add more resources and studies to flaky tests page in docs (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13250">#13250</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13259">#13259</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/e5c2efe3c36199731b41fd68bbf4df5e21404a8b"><code>e5c2efe</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/13256">#13256</a> from webknjaz/maintenance/towncrier-bump (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13258">#13258</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/3419674225a3a7b7d6f93650d75f6de52fe637d5"><code>3419674</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/13187">#13187</a> from pytest-dev/patchback/backports/8.3.x/b4009b319...</li> <li><a href="https://github.com/pytest-dev/pytest/commit/b75cfb162dbb927739698effa3fbcf279655da49"><code>b75cfb1</code></a> Add readline workaround for libedit (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13176">#13176</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/edbfff72a4051ed9c5f3d9b5d6f316b407cb6961"><code>edbfff7</code></a> doc: Clarify capturing .readouterr() return value (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13222">#13222</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13225">#13225</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/2ebba0063c66b77a7bd171221de059f3b3e47b86"><code>2ebba00</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pytest/issues/13199">#13199</a> from jakkdl/tox_docs_no_fetch (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13200">#13200</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/eb6496b79759f9acde581ed9d7a0777a49b5f820"><code>eb6496b</code></a> doc: Change training to remote only (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13196">#13196</a>) (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13197">#13197</a>)</li> <li><a href="https://github.com/pytest-dev/pytest/commit/78cf1f67f707fc07372a89775fd10d2065b5f17a"><code>78cf1f6</code></a> ci: Bump build-and-inspect-python-package (<a href="https://redirect.github.com/pytest-dev/pytest/issues/13188">#13188</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest/compare/8.3.4...8.3.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.3.4&new-version=8.3.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e1081b455f1..3f5589421be 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -166,7 +166,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d294cc0203e..0e329c562ec 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -161,7 +161,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 26b62bff3fc..ceb4fce019d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -67,7 +67,7 @@ pydantic-core==2.27.2 # via pydantic pygments==2.19.1 # via rich -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/lint.in # pytest-codspeed diff --git a/requirements/test.txt b/requirements/test.txt index a4f30c80b76..3fcfcbcb869 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -87,7 +87,7 @@ pydantic-core==2.27.2 # via pydantic pygments==2.19.1 # via rich -pytest==8.3.4 +pytest==8.3.5 # via # -r requirements/test.in # pytest-codspeed From 9ccca2585e8447344a47430aaa1bb98baadcee74 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 18:27:54 -1000 Subject: [PATCH 1239/1511] Bump cryptography from 44.0.1 to 44.0.2 (#10509) Bumps [cryptography](https://github.com/pyca/cryptography) from 44.0.1 to 44.0.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>44.0.2 - 2025-03-01</p> <pre><code> * We now build wheels for PyPy 3.11. <p>.. _v44-0-1:<br /> </code></pre></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/56cfce682c8bd2ee5101b654a429b05d0f610f0e"><code>56cfce6</code></a> 44.0.2 release (<a href="https://redirect.github.com/pyca/cryptography/issues/12537">#12537</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/7b4cc268cb5d95180ab5cfa32d55f6045bb2dcf0"><code>7b4cc26</code></a> Backport PyPy3.11 support (<a href="https://redirect.github.com/pyca/cryptography/issues/12536">#12536</a>)</li> <li>See full diff in <a href="https://github.com/pyca/cryptography/compare/44.0.1...44.0.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=44.0.1&new-version=44.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3f5589421be..dae27da7eda 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -58,7 +58,7 @@ coverage==7.6.12 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.1 +cryptography==44.0.2 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 0e329c562ec..594302de133 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -58,7 +58,7 @@ coverage==7.6.12 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.1 +cryptography==44.0.2 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index ceb4fce019d..5740ddb58a0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -21,7 +21,7 @@ cfgv==3.4.0 # via pre-commit click==8.1.8 # via slotscheck -cryptography==44.0.1 +cryptography==44.0.2 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 3fcfcbcb869..9b35aab8cb7 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -31,7 +31,7 @@ coverage==7.6.12 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.1 +cryptography==44.0.2 # via trustme exceptiongroup==1.2.2 # via pytest From bc3e5cdb5abfd1043af9e12517fd55221f6a01cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 4 Mar 2025 05:52:30 -1000 Subject: [PATCH 1240/1511] Bump aiohappyeyeballs from 2.4.6 to 2.4.8 (#10517) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.4.6 to 2.4.8. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h2>v2.4.8 (2025-03-04)</h2> <h3>Bug Fixes</h3> <ul> <li>Close runner up sockets in the event there are multiple winners (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/143">#143</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/476a05b956627700baa84eb6aac28c395da92a9f"><code>476a05b</code></a>)</li> </ul> <p>The first attempt to fix this was to use the cpython staggered race updates in <a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/142">#142</a> but there is still a race there where there can be multiple winners. Instead we now accept that we will not be able to cancel all coros in time and there will always be a risk of multiple winners. We store all sockets in a set that were not already cleaned up and we close all but the first winner after the staggered race finishes.</p> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.7...v2.4.8">v2.4.7...v2.4.8</a></p> <h2>v2.4.7 (2025-03-04)</h2> <h3>Bug Fixes</h3> <ul> <li> <p>Resolve warnings when running tests (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/144">#144</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/e96264aec89b9bd34d37413f610d039c56393a48"><code>e96264a</code></a>)</p> </li> <li> <p>Instead of raising SystemExit which causes a RuntimeError, mock out SystemExit to a new exception</p> </li> <li> <p>Make sure the event loop is closed in tests</p> </li> </ul> <p>fixes <a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/97">#97</a></p> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.6...v2.4.7">v2.4.6...v2.4.7</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.4.8 (2025-03-04)</h2> <h3>Bug fixes</h3> <ul> <li>Close runner up sockets in the event there are multiple winners (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/143">#143</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/476a05b956627700baa84eb6aac28c395da92a9f"><code>476a05b</code></a>)</li> </ul> <h2>v2.4.7 (2025-03-04)</h2> <h3>Bug fixes</h3> <ul> <li>Resolve warnings when running tests (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/144">#144</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/e96264aec89b9bd34d37413f610d039c56393a48"><code>e96264a</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/c4ab1e5409fe086c12932e47dc4a305eb15b4091"><code>c4ab1e5</code></a> 2.4.8</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/476a05b956627700baa84eb6aac28c395da92a9f"><code>476a05b</code></a> fix: close runner up sockets in the event there are multiple winners (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/143">#143</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/9c55c91ca72a53d1944becb80ef893e6f6f7b181"><code>9c55c91</code></a> 2.4.7</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/e96264aec89b9bd34d37413f610d039c56393a48"><code>e96264a</code></a> fix: resolve warnings when running tests (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/144">#144</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/ece56a5e5ac2ce66a1f0ada7466d312e8d456fbd"><code>ece56a5</code></a> chore: manually update deps (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/145">#145</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/a77bee707dc850ce485c9f1facae62ccb4f4f233"><code>a77bee7</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/141">#141</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/035d976dee1f5e731852649f3fffd4e1aca21825"><code>035d976</code></a> chore(deps-ci): bump the github-actions group with 2 updates (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/140">#140</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2e130292b873d2e7236106236c43b42c773961a6"><code>2e13029</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/139">#139</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/253bd757ac4e43f0ed7fe3dc59263b7ec2d76780"><code>253bd75</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/138">#138</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/9e02614bfbf8fb51bfeb027c787b2f0099db079f"><code>9e02614</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/137">#137</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.6...v2.4.8">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.4.6&new-version=2.4.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index fb7a1d43f43..f56b3bda93c 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.6 +aiohappyeyeballs==2.4.8 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index dae27da7eda..38b472dc453 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.6 +aiohappyeyeballs==2.4.8 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 594302de133..2ab4fb7bb57 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.6 +aiohappyeyeballs==2.4.8 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4265eab71a3..cb9deee6bd2 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.6 +aiohappyeyeballs==2.4.8 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 9b35aab8cb7..4b3daedf2c8 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.6 +aiohappyeyeballs==2.4.8 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in From 76a9dc692e08ec38e6f15107165d44b1d792ab49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 11:09:15 +0000 Subject: [PATCH 1241/1511] Bump aiohappyeyeballs from 2.4.8 to 2.5.0 (#10523) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.4.8 to 2.5.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h2>v2.5.0 (2025-03-06)</h2> <h3>Features</h3> <ul> <li>Add callback for users to customize socket creation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/147">#147</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/8e1bc6a4bc6282ccf29db441c33dd8d806003ffd"><code>8e1bc6a</code></a>)</li> </ul> <p>Co-authored-by: Kieren <!-- raw HTML omitted --></p> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.8...v2.5.0">v2.4.8...v2.5.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.5.0 (2025-03-06)</h2> <h3>Features</h3> <ul> <li>Add callback for users to customize socket creation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/147">#147</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/8e1bc6a4bc6282ccf29db441c33dd8d806003ffd"><code>8e1bc6a</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/3fed4432036740392483608a9da86637cf1bd64b"><code>3fed443</code></a> 2.5.0</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/8e1bc6a4bc6282ccf29db441c33dd8d806003ffd"><code>8e1bc6a</code></a> feat: add callback for users to customize socket creation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/147">#147</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.4.8...v2.5.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.4.8&new-version=2.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index f56b3bda93c..de111dd8268 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.8 +aiohappyeyeballs==2.5.0 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 38b472dc453..57c94399c91 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.8 +aiohappyeyeballs==2.5.0 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 2ab4fb7bb57..e490f947f44 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.8 +aiohappyeyeballs==2.5.0 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index cb9deee6bd2..128b0ed2d6f 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.8 +aiohappyeyeballs==2.5.0 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 4b3daedf2c8..ea0451e9ea6 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.4.8 +aiohappyeyeballs==2.5.0 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in From 2761ce82a031c3d4fb8ff9896175f38960051702 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 11:34:05 +0000 Subject: [PATCH 1242/1511] Bump jinja2 from 3.1.5 to 3.1.6 (#10522) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.5 to 3.1.6. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pallets/jinja/releases">jinja2's releases</a>.</em></p> <blockquote> <h2>3.1.6</h2> <p>This is the Jinja 3.1.6 security release, which fixes security issues but does not otherwise change behavior and should not result in breaking changes compared to the latest feature release.</p> <p>PyPI: <a href="https://pypi.org/project/Jinja2/3.1.6/">https://pypi.org/project/Jinja2/3.1.6/</a> Changes: <a href="https://jinja.palletsprojects.com/en/stable/changes/#version-3-1-6">https://jinja.palletsprojects.com/en/stable/changes/#version-3-1-6</a></p> <ul> <li>The <code>|attr</code> filter does not bypass the environment's attribute lookup, allowing the sandbox to apply its checks. <a href="https://github.com/pallets/jinja/security/advisories/GHSA-cpwx-vrp4-4pq7">https://github.com/pallets/jinja/security/advisories/GHSA-cpwx-vrp4-4pq7</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pallets/jinja/blob/main/CHANGES.rst">jinja2's changelog</a>.</em></p> <blockquote> <h2>Version 3.1.6</h2> <p>Released 2025-03-05</p> <ul> <li>The <code>|attr</code> filter does not bypass the environment's attribute lookup, allowing the sandbox to apply its checks. :ghsa:<code>cpwx-vrp4-4pq7</code></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pallets/jinja/commit/15206881c006c79667fe5154fe80c01c65410679"><code>1520688</code></a> release version 3.1.6</li> <li><a href="https://github.com/pallets/jinja/commit/90457bbf33b8662926ae65cdde4c4c32e756e403"><code>90457bb</code></a> Merge commit from fork</li> <li><a href="https://github.com/pallets/jinja/commit/065334d1ee5b7210e1a0a93c37238c86858f2af7"><code>065334d</code></a> attr filter uses env.getattr</li> <li><a href="https://github.com/pallets/jinja/commit/033c20015c7ca899ab52eb921bb0f08e6d3dd145"><code>033c200</code></a> start version 3.1.6</li> <li><a href="https://github.com/pallets/jinja/commit/bc68d4efa99c5f77334f0e519628558059ae8c35"><code>bc68d4e</code></a> use global contributing guide (<a href="https://redirect.github.com/pallets/jinja/issues/2070">#2070</a>)</li> <li><a href="https://github.com/pallets/jinja/commit/247de5e0c5062a792eb378e50e13e692885ee486"><code>247de5e</code></a> use global contributing guide</li> <li><a href="https://github.com/pallets/jinja/commit/ab8218c7a1b66b62e0ad6b941bd514e3a64a358f"><code>ab8218c</code></a> use project advisory link instead of global</li> <li><a href="https://github.com/pallets/jinja/commit/b4ffc8ff299dfd360064bea4cd2f862364601ad2"><code>b4ffc8f</code></a> release version 3.1.5 (<a href="https://redirect.github.com/pallets/jinja/issues/2066">#2066</a>)</li> <li>See full diff in <a href="https://github.com/pallets/jinja/compare/3.1.5...3.1.6">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jinja2&package-manager=pip&previous-version=3.1.5&new-version=3.1.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 57c94399c91..ccfb8ea989e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -101,7 +101,7 @@ incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest -jinja2==3.1.5 +jinja2==3.1.6 # via # sphinx # towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index e490f947f44..bb0e6b6bba2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -99,7 +99,7 @@ incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest -jinja2==3.1.5 +jinja2==3.1.6 # via # sphinx # towncrier diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 48b8d4bce60..07c0c6b8b1c 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -24,7 +24,7 @@ imagesize==1.4.1 # via sphinx incremental==24.7.2 # via towncrier -jinja2==3.1.5 +jinja2==3.1.6 # via # sphinx # towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index b2fce4162ca..74e0ebfcda7 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -24,7 +24,7 @@ imagesize==1.4.1 # via sphinx incremental==24.7.2 # via towncrier -jinja2==3.1.5 +jinja2==3.1.6 # via # sphinx # towncrier From 505d307e98d1b970fd16696aced4a968f8e9e49d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Mar 2025 10:52:37 +0000 Subject: [PATCH 1243/1511] Bump virtualenv from 20.29.2 to 20.29.3 (#10527) Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.29.2 to 20.29.3. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.29.3 (2025-03-06)</h2> <p>Bugfixes - 20.29.3</p> <pre><code>- Ignore unreadable directories in ``PATH``. (:issue:`2794`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/b1666e9096324b4ea37da49b15ae3b3ce29931b8"><code>b1666e9</code></a> release 20.29.3</li> <li><a href="https://github.com/pypa/virtualenv/commit/e05b2860950392a5fa47da5a3344a430e52b1fe6"><code>e05b286</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2853">#2853</a> from pypa/pre-commit-ci-update-config</li> <li><a href="https://github.com/pypa/virtualenv/commit/d6bc4a943def45d14aacaad83d5a3363188c8e67"><code>d6bc4a9</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pypa/virtualenv/commit/1fc647edb1c63a690b26b3d472e076e8012f9e30"><code>1fc647e</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2852">#2852</a> from barneygale/fix-2794</li> <li><a href="https://github.com/pypa/virtualenv/commit/4567521ecf35d3ee494067e6f1c0e6e9c945284f"><code>4567521</code></a> Add changelog entry</li> <li><a href="https://github.com/pypa/virtualenv/commit/220b6b850f12a47cc3b93bfefd5c61bb93c9c377"><code>220b6b8</code></a> Add test</li> <li><a href="https://github.com/pypa/virtualenv/commit/ee9d84cbb098e76dec9414347621137ceb7c9d3c"><code>ee9d84c</code></a> Ignore directories in PATH that can't be opened (<a href="https://redirect.github.com/pypa/virtualenv/issues/2794">#2794</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/7365ad287729e786ede24da2e3afab3409c60dfa"><code>7365ad2</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2849">#2849</a> from tipabu/logging-interpolation</li> <li><a href="https://github.com/pypa/virtualenv/commit/5b74c9a78b60d7c390fb749c282cee397dccdf0b"><code>5b74c9a</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2850">#2850</a> from shenxianpeng/remove-duplicate-template</li> <li><a href="https://github.com/pypa/virtualenv/commit/ca5935ad6c71da93ee6336cf4f815e3f2e78e870"><code>ca5935a</code></a> Remove duplicate bug report template</li> <li>Additional commits viewable in <a href="https://github.com/pypa/virtualenv/compare/20.29.2...20.29.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.29.2&new-version=20.29.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ccfb8ea989e..7a00dcae42f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -276,7 +276,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.29.2 +virtualenv==20.29.3 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index bb0e6b6bba2..281c2c0a08b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -267,7 +267,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.29.2 +virtualenv==20.29.3 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 5740ddb58a0..d2dba87daf8 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -106,5 +106,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.29.2 +virtualenv==20.29.3 # via pre-commit From e1ef02d2058cb9bcc55c79b970216503f16dbbe8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Mar 2025 11:11:32 +0000 Subject: [PATCH 1244/1511] Bump setuptools from 75.8.2 to 76.0.0 (#10530) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 75.8.2 to 76.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v76.0.0</h1> <h2>Deprecations and Removals</h2> <ul> <li>Synced with pypa/distutils@5589d7527 including a simplified shebang generation when building scripts (<a href="https://redirect.github.com/pypa/setuptools/issues/4863">#4863</a>). (<a href="https://redirect.github.com/pypa/setuptools/issues/4865">#4865</a>)</li> </ul> <h1>v75.9.1</h1> <h2>Bugfixes</h2> <ul> <li>Fix ImportError in distutils when configuring for linking. (<a href="https://redirect.github.com/pypa/setuptools/issues/4866">#4866</a>)</li> </ul> <h1>v75.9.0</h1> <h2>Features</h2> <ul> <li><code>pypa/distutils#327</code><a href="https://redirect.github.com/pypa/setuptools/issues/4852">#4852</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/c11a4940deba04a6f8f8e1410686116f587f6f70"><code>c11a494</code></a> Bump version: 75.9.1 → 76.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/427babbd960fac659167061b9a030c3041112107"><code>427babb</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4865">#4865</a> from pypa/feature/distutils-5589d7527</li> <li><a href="https://github.com/pypa/setuptools/commit/7530d69a8decc3fac377e361f17d00c0878cd6ea"><code>7530d69</code></a> Bump version: 75.9.0 → 75.9.1</li> <li><a href="https://github.com/pypa/setuptools/commit/d8620a8a8d1ce9648716cf3379718a257d721530"><code>d8620a8</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4867">#4867</a> from pypa/bugfix/distutils-6d7cc0ff9</li> <li><a href="https://github.com/pypa/setuptools/commit/7cff740272fcb99ef1f7c17cc074029acaa9ca42"><code>7cff740</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/149a28a70b02b724b72cf16a9b44db36d347e06b"><code>149a28a</code></a> Merge commit '6d7cc0ff9' into bugfix/distutils-6d7cc0ff9</li> <li><a href="https://github.com/pypa/setuptools/commit/6d7cc0ff91349ce0a9d3b5902a93302d0073d7b0"><code>6d7cc0f</code></a> In config command, move to eager imports. Restore LinkError to ccompilers mod...</li> <li><a href="https://github.com/pypa/setuptools/commit/5cc292799ced115f6404b65d59eed0e629741bd7"><code>5cc2927</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/c7e97a0012edcd583001336268bb18fb985bce1e"><code>c7e97a0</code></a> Merge with pypa/distutils@5589d7527</li> <li><a href="https://github.com/pypa/setuptools/commit/0cffd6186ecc7b76d8544a099473cc262b9335f3"><code>0cffd61</code></a> Bump version: 75.8.2 → 75.9.0</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v75.8.2...v76.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=75.8.2&new-version=76.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7a00dcae42f..0a6748b9704 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==75.8.2 +setuptools==76.0.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 281c2c0a08b..998cc2fdee5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==75.8.2 +setuptools==76.0.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 07c0c6b8b1c..ff527ae486b 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.8.2 +setuptools==76.0.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 74e0ebfcda7..f00d523f092 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==75.8.2 +setuptools==76.0.0 # via incremental From 98804dcb6e92d10abbfe1e95c2301b19031d9df6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Mar 2025 11:21:39 +0000 Subject: [PATCH 1245/1511] Bump identify from 2.6.8 to 2.6.9 (#10531) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.8 to 2.6.9. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/856f0162b1bdb01e47bd31c2cf1786a8ea41bcc5"><code>856f016</code></a> v2.6.9</li> <li><a href="https://github.com/pre-commit/identify/commit/d7518a7cc915abcbce988c40d32ebe8db4a7f333"><code>d7518a7</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/513">#513</a> from pre-commit/revert-503-uv-interpreter</li> <li><a href="https://github.com/pre-commit/identify/commit/45f344bf720858256602a400766531993f9adac9"><code>45f344b</code></a> Revert "add uv to interpreters list"</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.8...v2.6.9">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.8&new-version=2.6.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0a6748b9704..184642b29bb 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -88,7 +88,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.8 +identify==2.6.9 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 998cc2fdee5..30afd855beb 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -86,7 +86,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.8 +identify==2.6.9 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index d2dba87daf8..d25a756f73e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -33,7 +33,7 @@ forbiddenfruit==0.1.4 # via blockbuster freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.8 +identify==2.6.9 # via pre-commit idna==3.7 # via trustme From c1fdd1af0aa03d9c73ba104256706ea477abfeb0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Mar 2025 10:52:35 +0000 Subject: [PATCH 1246/1511] Bump attrs from 25.1.0 to 25.2.0 (#10537) Bumps [attrs](https://github.com/sponsors/hynek) from 25.1.0 to 25.2.0. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/sponsors/hynek/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=25.1.0&new-version=25.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index de111dd8268..d4ee6ef8c8f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.1.0 +attrs==25.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 184642b29bb..7da904d3412 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==25.1.0 +attrs==25.2.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 30afd855beb..d2c3960bae9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==25.1.0 +attrs==25.2.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 128b0ed2d6f..1f60317174d 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.1.0 +attrs==25.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index ea0451e9ea6..53cee8b83e5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.1.0 +attrs==25.2.0 # via -r requirements/runtime-deps.in blockbuster==1.5.23 # via -r requirements/test.in From 532cf6548c88b86551d88b22d98a74f46f57b4b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Mar 2025 10:54:35 +0000 Subject: [PATCH 1247/1511] Bump stamina from 24.3.0 to 25.1.0 (#10538) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [stamina](https://github.com/hynek/stamina) from 24.3.0 to 25.1.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/hynek/stamina/releases">stamina's releases</a>.</em></p> <blockquote> <h2>25.1.0</h2> <h2>Highlights</h2> <p>The context manager edition! You can now use <code>stamina.set_testing()</code> as a context manager and also return one from an instrumentation retry hook which is entered and exited before and after sleep which allows you to emit spans for back off times!</p> <p><em>Full changelog below!</em></p> <h2>Special Thanks</h2> <p>This release would not be possible without my generous sponsors! Thank you to all of you making sustainable maintenance possible! If <em>you</em> would like to join them, go to <a href="https://github.com/sponsors/hynek">https://github.com/sponsors/hynek</a> and check out the sweet perks!</p> <h3>Above and Beyond</h3> <p><a href="https://www.variomedia.de/">Variomedia AG</a> (@<a href="https://github.com/variomedia">variomedia</a>), <a href="https://www.tidelift.com/">Tidelift</a> (@<a href="https://github.com/tidelift">tidelift</a>), <a href="https://ecosyste.ms">Ecosystems</a> (@<a href="https://github.com/ecosyste-ms">ecosyste-ms</a>), <a href="https://www.klaviyo.com">Klaviyo</a> (@<a href="https://github.com/klaviyo">klaviyo</a>), <a href="https://privacy-solutions.org">Privacy Solutions GmbH</a> (@<a href="https://github.com/privacy-solutions">privacy-solutions</a>), <a href="http://filepreviews.io/">FilePreviews</a> (@<a href="https://github.com/filepreviews">filepreviews</a>), <a href="https://blog.alternatebuild.dev">nate nowack</a> (@<a href="https://github.com/zzstoatzz">zzstoatzz</a>), Daniel Fortunov (@<a href="https://github.com/asqui">asqui</a>), and Kevin P. Fleming (@<a href="https://github.com/kpfleming">kpfleming</a>).</p> <h3>Maintenance Sustainers</h3> <p><a href="https://buttondown.com">Buttondown</a> (@<a href="https://github.com/buttondown">buttondown</a>), <a href="https://christopher.xyz">Christopher Dignam</a> (@<a href="https://github.com/chdsbd">chdsbd</a>), Magnus Watn (@<a href="https://github.com/magnuswatn">magnuswatn</a>), <a href="https://cra.mr">David Cramer</a> (@<a href="https://github.com/dcramer">dcramer</a>), Jesse Snyder (@<a href="https://github.com/jessesnyder">jessesnyder</a>), <a href="https://rivolaks.com">Rivo Laks</a> (@<a href="https://github.com/rivol">rivol</a>), <a href="https://polar.sh">Polar</a> (@<a href="https://github.com/polarsource">polarsource</a>), <a href="https://www.miketheman.net">Mike Fiedler</a> (@<a href="https://github.com/miketheman">miketheman</a>), Duncan Hill (@<a href="https://github.com/cricalix">cricalix</a>), Colin Marquardt (@<a href="https://github.com/cmarqu">cmarqu</a>), <a href="https://blog.journeythatcounts.nl">Pieter Swinkels</a> (@<a href="https://github.com/swinkels">swinkels</a>), Nick Libertini (@<a href="https://github.com/libertininick">libertininick</a>), <a href="https://mpr.crossjam.net/">Brian M. Dennis</a> (@<a href="https://github.com/crossjam">crossjam</a>), Moving Content AG (@<a href="https://github.com/moving-content">moving-content</a>), <a href="https://proteinqure.com/">ProteinQure</a> (@<a href="https://github.com/ProteinQure">ProteinQure</a>), <a href="https://westervelt.com">The Westervelt Company</a> (@<a href="https://github.com/westerveltco">westerveltco</a>), <a href="https://slafs.net">Sławomir Ehlert</a> (@<a href="https://github.com/slafs">slafs</a>), Mostafa Khalil (@<a href="https://github.com/khadrawy">khadrawy</a>), <a href="https://fmularczyk.pl">Filip Mularczyk</a> (@<a href="https://github.com/mukiblejlok">mukiblejlok</a>), Thomas Klinger (@<a href="https://github.com/thmsklngr">thmsklngr</a>), <a href="https://poehlmann.io">Andreas Poehlmann</a> (@<a href="https://github.com/ap--">ap--</a>), <a href="https://atbigelow.com">August Trapper Bigelow</a> (@<a href="https://github.com/atbigelow">atbigelow</a>), <a href="https://noumenal.es/">Carlton Gibson</a> (@<a href="https://github.com/carltongibson">carltongibson</a>), and <a href="https://roboflow.com">Roboflow</a> (@<a href="https://github.com/roboflow">roboflow</a>).</p> <p>Not to forget 14 more amazing humans who chose to be generous but anonymous!</p> <h2>Full Changelog</h2> <h3>Added</h3> <ul> <li> <p><em>cap</em> argument to <code>stamina.set_testing()</code>. By default, the value passed as <em>attempts</em> is used strictly. When <code>cap=True</code>, it is used as an upper cap; that means that if the original attempts number is lower, it's not changed. <a href="https://redirect.github.com/hynek/stamina/pull/80">#80</a></p> </li> <li> <p><code>stamina.set_testing()</code> can now be used as a context manager. <a href="https://redirect.github.com/hynek/stamina/pull/94">#94</a></p> </li> <li> <p>Instrumentation hooks can now can return context managers. If they do, they are entered when a retry is scheduled and exited right before the retry is attempted. <a href="https://redirect.github.com/hynek/stamina/pull/95">#95</a></p> </li> </ul> <hr /> <p>This release contains contributions from <a href="https://github.com/hynek"><code>@​hynek</code></a> and <a href="https://github.com/sparkiegeek"><code>@​sparkiegeek</code></a>.</p> <h2>Artifact Attestations</h2> <p>You can verify this release's <a href="https://docs.github.com/en/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds">artifact attestions</a> using <a href="https://cli.github.com">GitHub's CLI tool</a> by downloading the sdist and wheel from <a href="https://pypi.org/project/stamina">PyPI</a> and running:</p> <pre lang="console"><code>$ gh attestation verify --owner hynek stamina-25.1.0.tar.gz </code></pre> <p>and</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/hynek/stamina/blob/main/CHANGELOG.md">stamina's changelog</a>.</em></p> <blockquote> <h2><a href="https://github.com/hynek/stamina/compare/24.3.0...25.1.0">25.1.0</a> - 2025-03-12</h2> <h3>Added</h3> <ul> <li> <p><em>cap</em> argument to <code>stamina.set_testing()</code>. By default, the value passed as <em>attempts</em> is used strictly. When <code>cap=True</code>, it is used as an upper cap; that means that if the original attempts number is lower, it's not changed. <a href="https://redirect.github.com/hynek/stamina/pull/80">#80</a></p> </li> <li> <p><code>stamina.set_testing()</code> can now be used as a context manager. <a href="https://redirect.github.com/hynek/stamina/pull/94">#94</a></p> </li> <li> <p>Instrumentation hooks can now can return context managers. If they do, they are entered when a retry is scheduled and exited right before the retry is attempted. <a href="https://redirect.github.com/hynek/stamina/pull/95">#95</a></p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/hynek/stamina/commit/25288ed058f9723bf113768b1ae26cf8857d65c1"><code>25288ed</code></a> Prepare 25.1.0</li> <li><a href="https://github.com/hynek/stamina/commit/8e16d3b8052c0e2033381f09e8b71822ba172305"><code>8e16d3b</code></a> Leave some space</li> <li><a href="https://github.com/hynek/stamina/commit/5730d029cacbeb5ce63e676578e968c2435f19e1"><code>5730d02</code></a> Use consistent language</li> <li><a href="https://github.com/hynek/stamina/commit/6d99d49e00c8a795a096b0252ff5c64302df43b9"><code>6d99d49</code></a> update ruff</li> <li><a href="https://github.com/hynek/stamina/commit/b163605ed98863dcf1f4130dd87f45cd5301747c"><code>b163605</code></a> Allow instrumentation hooks to be contextmanagers (<a href="https://redirect.github.com/hynek/stamina/issues/95">#95</a>)</li> <li><a href="https://github.com/hynek/stamina/commit/5eb259d170cf689658b592b026d6050a2e2b02ad"><code>5eb259d</code></a> [docs] add stamina video link to motivation (<a href="https://redirect.github.com/hynek/stamina/issues/96">#96</a>)</li> <li><a href="https://github.com/hynek/stamina/commit/51b8072095969f2576555c9aea2b7032f0fb43fa"><code>51b8072</code></a> Allow set_testing to be used as a context manager (<a href="https://redirect.github.com/hynek/stamina/issues/94">#94</a>)</li> <li><a href="https://github.com/hynek/stamina/commit/5b51f2cbda79607edfdbaf9d26efdeefe9193bec"><code>5b51f2c</code></a> Stop using setup-python (<a href="https://redirect.github.com/hynek/stamina/issues/93">#93</a>)</li> <li><a href="https://github.com/hynek/stamina/commit/16504cad9d86efd1ffd9c0bfc21002a3faea6115"><code>16504ca</code></a> update ruff</li> <li><a href="https://github.com/hynek/stamina/commit/c62b0a8c37fba0aae13ea0f7d428f546c56fcd65"><code>c62b0a8</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/hynek/stamina/issues/91">#91</a>)</li> <li>Additional commits viewable in <a href="https://github.com/hynek/stamina/compare/24.3.0...25.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=stamina&package-manager=pip&previous-version=24.3.0&new-version=25.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7da904d3412..24e0145ccd6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -234,7 +234,7 @@ sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in -stamina==24.3.0 +stamina==25.1.0 # via cherry-picker tenacity==9.0.0 # via stamina diff --git a/requirements/dev.txt b/requirements/dev.txt index d2c3960bae9..e5c130da871 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -225,7 +225,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in -stamina==24.3.0 +stamina==25.1.0 # via cherry-picker tenacity==9.0.0 # via stamina From 0b17481eda21b9a7f7c08341520b12987f51c4a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 23:53:28 +0000 Subject: [PATCH 1248/1511] Bump aiohappyeyeballs from 2.5.0 to 2.6.1 (#10539) Bumps [aiohappyeyeballs](https://github.com/aio-libs/aiohappyeyeballs) from 2.5.0 to 2.6.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/releases">aiohappyeyeballs's releases</a>.</em></p> <blockquote> <h2>v2.6.1 (2025-03-12)</h2> <h3>Bug Fixes</h3> <ul> <li>Resolve TypeError on import for Python < 3.9.2 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/151">#151</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2042c82f9978f41c31b58aa4e3d8fc3b9c3ec2ec"><code>2042c82</code></a>)</li> </ul> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.6.0...v2.6.1">v2.6.0...v2.6.1</a></p> <h2>v2.6.0 (2025-03-11)</h2> <h3>Features</h3> <ul> <li><strong>docs</strong>: Publish documentation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/pull/149">#149</a>, <a href="https://github.com/aio-libs/aiohappyeyeballs/commit/42352736d12c60d500c63b9598ffab05ef5e8829"><code>4235273</code></a>)</li> </ul> <p>Creates an api_reference.rst file to expose the existing documentation for the few functions that have docstrings, as well as add documentation for AddrInfoType and SocketFactoryType. Now, these can be properly pointed to by other projects' documentation.</p> <hr /> <p><strong>Detailed Changes</strong>: <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.5.0...v2.6.0">v2.5.0...v2.6.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md">aiohappyeyeballs's changelog</a>.</em></p> <blockquote> <h2>v2.6.1 (2025-03-12)</h2> <h3>Bug fixes</h3> <ul> <li>Resolve typeerror on import for python < 3.9.2 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/151">#151</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2042c82f9978f41c31b58aa4e3d8fc3b9c3ec2ec"><code>2042c82</code></a>)</li> </ul> <h2>v2.6.0 (2025-03-11)</h2> <h3>Features</h3> <ul> <li>Publish documentation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/149">#149</a>) (<a href="https://github.com/aio-libs/aiohappyeyeballs/commit/42352736d12c60d500c63b9598ffab05ef5e8829"><code>4235273</code></a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/e3bd5bdf44f5d187802de6dcb08d27e1ca6da048"><code>e3bd5bd</code></a> 2.6.1</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/2042c82f9978f41c31b58aa4e3d8fc3b9c3ec2ec"><code>2042c82</code></a> fix: resolve TypeError on import for Python < 3.9.2 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/151">#151</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/b23d9194d6db1c4d237da0da623a081573385915"><code>b23d919</code></a> chore: remove unused CI exclude for Python 3.8.0 (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/153">#153</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/36149e7194d9a24fc8109265dee80fecd362f449"><code>36149e7</code></a> 2.6.0</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/42352736d12c60d500c63b9598ffab05ef5e8829"><code>4235273</code></a> feat(docs): publish documentation (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/149">#149</a>)</li> <li><a href="https://github.com/aio-libs/aiohappyeyeballs/commit/17d5e48488f7b533efea33ebb89c25e7b4eda7ad"><code>17d5e48</code></a> chore(pre-commit.ci): pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiohappyeyeballs/issues/148">#148</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/aiohappyeyeballs/compare/v2.5.0...v2.6.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiohappyeyeballs&package-manager=pip&previous-version=2.5.0&new-version=2.6.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index d4ee6ef8c8f..e76578f430c 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.5.0 +aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 24e0145ccd6..49e6f58d6f1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.5.0 +aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/dev.txt b/requirements/dev.txt index e5c130da871..6c6d5274712 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.5.0 +aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 1f60317174d..dd279106314 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.5.0 +aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 53cee8b83e5..81f10090fbb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.5.0 +aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiosignal==1.3.2 # via -r requirements/runtime-deps.in From 1f309113c09e2873a803c0b85b0a4bbc3aa82979 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 05:42:59 +0000 Subject: [PATCH 1249/1511] Bump attrs from 25.2.0 to 25.3.0 (#10546) Bumps [attrs](https://github.com/sponsors/hynek) from 25.2.0 to 25.3.0. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/sponsors/hynek/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=25.2.0&new-version=25.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e76578f430c..db4712426a9 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.2.0 +attrs==25.3.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 49e6f58d6f1..e744bc6f708 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==25.2.0 +attrs==25.3.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 6c6d5274712..c1cd350baf5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==25.2.0 +attrs==25.3.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index dd279106314..c3863f8e5e3 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.3.2 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.2.0 +attrs==25.3.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 81f10090fbb..fb20a21e251 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.2.0 +attrs==25.3.0 # via -r requirements/runtime-deps.in blockbuster==1.5.23 # via -r requirements/test.in From e9b657fa110d7d11a9782ea6b5e9ba162ab88317 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 05:43:03 +0000 Subject: [PATCH 1250/1511] Bump filelock from 3.17.0 to 3.18.0 (#10547) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.17.0 to 3.18.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/py-filelock/releases">filelock's releases</a>.</em></p> <blockquote> <h2>3.18.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Indicate that locks are exclusive/write locks. by <a href="https://github.com/bicarlsen"><code>@​bicarlsen</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/394">tox-dev/filelock#394</a></li> <li>Support fcntl check on Emscripten by <a href="https://github.com/juntyr"><code>@​juntyr</code></a> in <a href="https://redirect.github.com/tox-dev/filelock/pull/398">tox-dev/filelock#398</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/bicarlsen"><code>@​bicarlsen</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/filelock/pull/394">tox-dev/filelock#394</a></li> <li><a href="https://github.com/juntyr"><code>@​juntyr</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/filelock/pull/398">tox-dev/filelock#398</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/filelock/compare/3.17.0...3.18.0">https://github.com/tox-dev/filelock/compare/3.17.0...3.18.0</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/filelock/commit/129a1efb6a0e1c40ff7c0b094db1b888fcb46d93"><code>129a1ef</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/405">#405</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/717d7e018c6e3c3cd00a0f35c775ebbbafda41cc"><code>717d7e0</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/403">#403</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/efba779022cf62956efe88ea3c65620098fbf75c"><code>efba779</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/401">#401</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/82f5a2d6c260a43dc88b51121087bdfa21fb205f"><code>82f5a2d</code></a> Support fcntl check on Emscripten (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/398">#398</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/468ba431d1d40b494724f3269d3241a5d1ddd411"><code>468ba43</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/396">#396</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/0ac68266c62f3b7ae1bc5d4a35c88f1d3913b98d"><code>0ac6826</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/395">#395</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/b5f98a66d0fb59e62894ad2b537deb877279adda"><code>b5f98a6</code></a> Indicate that locks are exclusive/write locks. (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/394">#394</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/3c9b68049961d1b4aade1cc806e731f90b5d2c24"><code>3c9b680</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/393">#393</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/2760759ac82f7e8583f708d9bf801c3a79ccd340"><code>2760759</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/391">#391</a>)</li> <li><a href="https://github.com/tox-dev/filelock/commit/18c9571afba87bb6329aef9c2c7798cbe8adfea5"><code>18c9571</code></a> Bump pypa/gh-action-pypi-publish from 1.12.3 to 1.12.4 (<a href="https://redirect.github.com/tox-dev/py-filelock/issues/390">#390</a>)</li> <li>See full diff in <a href="https://github.com/tox-dev/py-filelock/compare/3.17.0...3.18.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.17.0&new-version=3.18.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e744bc6f708..4f88e08420a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -72,7 +72,7 @@ exceptiongroup==1.2.2 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.17.0 +filelock==3.18.0 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/dev.txt b/requirements/dev.txt index c1cd350baf5..1a70923b154 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -70,7 +70,7 @@ exceptiongroup==1.2.2 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.17.0 +filelock==3.18.0 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/lint.txt b/requirements/lint.txt index d25a756f73e..964750637c3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -27,7 +27,7 @@ distlib==0.3.9 # via virtualenv exceptiongroup==1.2.2 # via pytest -filelock==3.17.0 +filelock==3.18.0 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster From 7205661affce22fb8978d670342898295c265d43 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 22:29:12 +0000 Subject: [PATCH 1251/1511] [PR #10553/55c5f1fc backport][3.11] Add benchmark for JSON post requests that check the content type (#10554) **This is a backport of PR #10553 as merged into master (55c5f1fc16e61f576f05d31c1f9bbd324943729c).** <!-- Thank you for your contribution! --> ## What do these changes do? Add benchmark for JSON post requests that check the content type ## Are there changes in behavior for the user? no ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index ac3131e9750..ae89bc1f667 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -319,3 +319,30 @@ async def run_client_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_json_post_requests( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 JSON POST requests that check the content-type.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + _ = request.content_type + _ = request.charset + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.post("/", json={"key": "value"}) + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From f39062116245f15b58889dea03aaf710de28a8e1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 22:41:35 +0000 Subject: [PATCH 1252/1511] [PR #10553/55c5f1fc backport][3.12] Add benchmark for JSON post requests that check the content type (#10555) **This is a backport of PR #10553 as merged into master (55c5f1fc16e61f576f05d31c1f9bbd324943729c).** <!-- Thank you for your contribution! --> ## What do these changes do? Add benchmark for JSON post requests that check the content type ## Are there changes in behavior for the user? no ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index ac3131e9750..ae89bc1f667 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -319,3 +319,30 @@ async def run_client_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_json_post_requests( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 JSON POST requests that check the content-type.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + _ = request.content_type + _ = request.charset + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + await client.post("/", json={"key": "value"}) + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From 928e6d70480c662d7e3a84a8b9615c2d9f1b46ac Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 23:21:49 +0000 Subject: [PATCH 1253/1511] [PR #10552/44e669be backport][3.11] Cache parsing of the content-type (#10557) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10552 as merged into master (44e669be1ab1a60c40183f92f172670d912cb834).** <!-- Thank you for your contribution! --> ## What do these changes do? When profiling some frequent POST requests, I found the bulk of the time was spent parsing the content-type string. Use the same strategy as we do for `parse_mimetype` to cache the parsing. ## Are there changes in behavior for the user? performance improvement ## Is it a substantial burden for the maintainers to support this? no ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. <img width="570" alt="Screenshot 2025-03-15 at 11 25 10 AM" src="https://github.com/user-attachments/assets/cabaaa7c-3a39-4f90-b450-a6a0559d22d6" /> Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10552.misc.rst | 1 + aiohttp/helpers.py | 24 +++++++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 CHANGES/10552.misc.rst diff --git a/CHANGES/10552.misc.rst b/CHANGES/10552.misc.rst new file mode 100644 index 00000000000..6755cbf7396 --- /dev/null +++ b/CHANGES/10552.misc.rst @@ -0,0 +1 @@ +Improved performance of parsing content types by adding a cache in the same manner currently done with mime types -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 8038931ebec..ace4f0e9b53 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -21,7 +21,7 @@ from email.utils import parsedate from math import ceil from pathlib import Path -from types import TracebackType +from types import MappingProxyType, TracebackType from typing import ( Any, Callable, @@ -357,6 +357,20 @@ def parse_mimetype(mimetype: str) -> MimeType: ) +@functools.lru_cache(maxsize=56) +def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]: + """Parse Content-Type header. + + Returns a tuple of the parsed content type and a + MappingProxyType of parameters. + """ + msg = HeaderParser().parsestr(f"Content-Type: {raw}") + content_type = msg.get_content_type() + params = msg.get_params(()) + content_dict = dict(params[1:]) # First element is content type again + return content_type, MappingProxyType(content_dict) + + def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: name = getattr(obj, "name", None) if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": @@ -710,10 +724,10 @@ def _parse_content_type(self, raw: Optional[str]) -> None: self._content_type = "application/octet-stream" self._content_dict = {} else: - msg = HeaderParser().parsestr("Content-Type: " + raw) - self._content_type = msg.get_content_type() - params = msg.get_params(()) - self._content_dict = dict(params[1:]) # First element is content type again + content_type, content_mapping_proxy = parse_content_type(raw) + self._content_type = content_type + # _content_dict needs to be mutable so we can update it + self._content_dict = content_mapping_proxy.copy() @property def content_type(self) -> str: From a95e38f78ce424575ea4667e16139a9926c55155 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 23:28:25 +0000 Subject: [PATCH 1254/1511] [PR #10552/44e669be backport][3.12] Cache parsing of the content-type (#10558) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10552 as merged into master (44e669be1ab1a60c40183f92f172670d912cb834).** <!-- Thank you for your contribution! --> ## What do these changes do? When profiling some frequent POST requests, I found the bulk of the time was spent parsing the content-type string. Use the same strategy as we do for `parse_mimetype` to cache the parsing. ## Are there changes in behavior for the user? performance improvement ## Is it a substantial burden for the maintainers to support this? no ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. <img width="570" alt="Screenshot 2025-03-15 at 11 25 10 AM" src="https://github.com/user-attachments/assets/cabaaa7c-3a39-4f90-b450-a6a0559d22d6" /> Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10552.misc.rst | 1 + aiohttp/helpers.py | 24 +++++++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 CHANGES/10552.misc.rst diff --git a/CHANGES/10552.misc.rst b/CHANGES/10552.misc.rst new file mode 100644 index 00000000000..6755cbf7396 --- /dev/null +++ b/CHANGES/10552.misc.rst @@ -0,0 +1 @@ +Improved performance of parsing content types by adding a cache in the same manner currently done with mime types -- by :user:`bdraco`. diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 8038931ebec..ace4f0e9b53 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -21,7 +21,7 @@ from email.utils import parsedate from math import ceil from pathlib import Path -from types import TracebackType +from types import MappingProxyType, TracebackType from typing import ( Any, Callable, @@ -357,6 +357,20 @@ def parse_mimetype(mimetype: str) -> MimeType: ) +@functools.lru_cache(maxsize=56) +def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]: + """Parse Content-Type header. + + Returns a tuple of the parsed content type and a + MappingProxyType of parameters. + """ + msg = HeaderParser().parsestr(f"Content-Type: {raw}") + content_type = msg.get_content_type() + params = msg.get_params(()) + content_dict = dict(params[1:]) # First element is content type again + return content_type, MappingProxyType(content_dict) + + def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: name = getattr(obj, "name", None) if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": @@ -710,10 +724,10 @@ def _parse_content_type(self, raw: Optional[str]) -> None: self._content_type = "application/octet-stream" self._content_dict = {} else: - msg = HeaderParser().parsestr("Content-Type: " + raw) - self._content_type = msg.get_content_type() - params = msg.get_params(()) - self._content_dict = dict(params[1:]) # First element is content type again + content_type, content_mapping_proxy = parse_content_type(raw) + self._content_type = content_type + # _content_dict needs to be mutable so we can update it + self._content_dict = content_mapping_proxy.copy() @property def content_type(self) -> str: From e9f3f03144451d65eac6bf4cf782a7f8b89e820a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 00:02:33 +0000 Subject: [PATCH 1255/1511] [PR #10529/492f63dc backport][3.11] Fixed bug that lead to infinite wait for dns futures (#10559) **This is a backport of PR #10529 as merged into master (492f63dc252e76ce892e459081286fb46bde87cf).** <!-- Thank you for your contribution! --> ## What do these changes do? Fixed bug that lead to infinite wait for dns futures when exception occured in trace.send_dns_cache_miss call. ## Are there changes in behavior for the user? No ## Is it a substantial burden for the maintainers to support this? No ## Related issue number No issue. ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Co-authored-by: Alexey Stavrov <logioniz@ya.ru> --- CHANGES/10529.bugfix.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 6 ++--- tests/test_connector.py | 55 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 3 deletions(-) create mode 100644 CHANGES/10529.bugfix.rst diff --git a/CHANGES/10529.bugfix.rst b/CHANGES/10529.bugfix.rst new file mode 100644 index 00000000000..d6714ffd043 --- /dev/null +++ b/CHANGES/10529.bugfix.rst @@ -0,0 +1,2 @@ +Fixed an issue where dns queries were delayed indefinitely when an exception occurred in a ``trace.send_dns_cache_miss`` +-- by :user:`logioniz`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 1f0d1e7d2d7..953af52498a 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -31,6 +31,7 @@ Alexandru Mihai Alexey Firsov Alexey Nikitin Alexey Popravka +Alexey Stavrov Alexey Stepanov Amin Etesamian Amit Tulshyan diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 14433ba37e1..b3a918b3cee 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1015,11 +1015,11 @@ async def _resolve_host_with_throttle( This method must be run in a task and shielded from cancellation to avoid cancelling the underlying lookup. """ - if traces: - for trace in traces: - await trace.send_dns_cache_miss(host) try: if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + for trace in traces: await trace.send_dns_resolvehost_start(host) diff --git a/tests/test_connector.py b/tests/test_connector.py index e79b36a673d..e8cc46c54d2 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -3497,6 +3497,61 @@ async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: await connector.close() +async def test_connector_resolve_in_case_of_trace_cache_miss_exception( + loop: asyncio.AbstractEventLoop, +) -> None: + token: ResolveResult = { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + + request_count = 0 + + class DummyTracer(Trace): + def __init__(self) -> None: + """Dummy""" + + async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: + """Dummy send_dns_cache_hit""" + + async def send_dns_resolvehost_start( + self, *args: object, **kwargs: object + ) -> None: + """Dummy send_dns_resolvehost_start""" + + async def send_dns_resolvehost_end( + self, *args: object, **kwargs: object + ) -> None: + """Dummy send_dns_resolvehost_end""" + + async def send_dns_cache_miss(self, *args: object, **kwargs: object) -> None: + nonlocal request_count + request_count += 1 + if request_count <= 1: + raise Exception("first attempt") + + async def resolve_response() -> List[ResolveResult]: + await asyncio.sleep(0) + return [token] + + with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: + m_resolver().resolve.return_value = resolve_response() + + connector = TCPConnector() + traces = [DummyTracer()] + + with pytest.raises(Exception): + await connector._resolve_host("", 0, traces) + + await connector._resolve_host("", 0, traces) == [token] + + await connector.close() + + async def test_connector_does_not_remove_needed_waiters( loop: asyncio.AbstractEventLoop, key: ConnectionKey ) -> None: From 25c9ab8b727db02188500471719275755d7849cc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 00:06:40 +0000 Subject: [PATCH 1256/1511] [PR #10529/492f63dc backport][3.12] Fixed bug that lead to infinite wait for dns futures (#10560) **This is a backport of PR #10529 as merged into master (492f63dc252e76ce892e459081286fb46bde87cf).** <!-- Thank you for your contribution! --> ## What do these changes do? Fixed bug that lead to infinite wait for dns futures when exception occured in trace.send_dns_cache_miss call. ## Are there changes in behavior for the user? No ## Is it a substantial burden for the maintainers to support this? No ## Related issue number No issue. ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Co-authored-by: Alexey Stavrov <logioniz@ya.ru> --- CHANGES/10529.bugfix.rst | 2 ++ CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 6 ++--- tests/test_connector.py | 55 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 3 deletions(-) create mode 100644 CHANGES/10529.bugfix.rst diff --git a/CHANGES/10529.bugfix.rst b/CHANGES/10529.bugfix.rst new file mode 100644 index 00000000000..d6714ffd043 --- /dev/null +++ b/CHANGES/10529.bugfix.rst @@ -0,0 +1,2 @@ +Fixed an issue where dns queries were delayed indefinitely when an exception occurred in a ``trace.send_dns_cache_miss`` +-- by :user:`logioniz`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index fb5217e3e6b..4c44c5f4001 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -31,6 +31,7 @@ Alexandru Mihai Alexey Firsov Alexey Nikitin Alexey Popravka +Alexey Stavrov Alexey Stepanov Amin Etesamian Amit Tulshyan diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 75d5796f7d2..081ff330d38 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1025,11 +1025,11 @@ async def _resolve_host_with_throttle( This method must be run in a task and shielded from cancellation to avoid cancelling the underlying lookup. """ - if traces: - for trace in traces: - await trace.send_dns_cache_miss(host) try: if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + for trace in traces: await trace.send_dns_resolvehost_start(host) diff --git a/tests/test_connector.py b/tests/test_connector.py index b7531361287..b199d9b5703 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -3497,6 +3497,61 @@ async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: await connector.close() +async def test_connector_resolve_in_case_of_trace_cache_miss_exception( + loop: asyncio.AbstractEventLoop, +) -> None: + token: ResolveResult = { + "hostname": "localhost", + "host": "127.0.0.1", + "port": 80, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + + request_count = 0 + + class DummyTracer(Trace): + def __init__(self) -> None: + """Dummy""" + + async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None: + """Dummy send_dns_cache_hit""" + + async def send_dns_resolvehost_start( + self, *args: object, **kwargs: object + ) -> None: + """Dummy send_dns_resolvehost_start""" + + async def send_dns_resolvehost_end( + self, *args: object, **kwargs: object + ) -> None: + """Dummy send_dns_resolvehost_end""" + + async def send_dns_cache_miss(self, *args: object, **kwargs: object) -> None: + nonlocal request_count + request_count += 1 + if request_count <= 1: + raise Exception("first attempt") + + async def resolve_response() -> List[ResolveResult]: + await asyncio.sleep(0) + return [token] + + with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: + m_resolver().resolve.return_value = resolve_response() + + connector = TCPConnector() + traces = [DummyTracer()] + + with pytest.raises(Exception): + await connector._resolve_host("", 0, traces) + + await connector._resolve_host("", 0, traces) == [token] + + await connector.close() + + async def test_connector_does_not_remove_needed_waiters( loop: asyncio.AbstractEventLoop, key: ConnectionKey ) -> None: From 6357c055f543908dbaf167422231c09877e9b955 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 00:45:45 +0000 Subject: [PATCH 1257/1511] [PR #10551/d067260d backport][3.11] Re-raise OSError as ClientConnectionError when failing to explicitly close connector socket (#10561) **This is a backport of PR #10551 as merged into master (d067260df75e4d04a23a0481cf9cf8f7194c80f1).** <!-- Thank you for your contribution! --> ## What do these changes do? This is a followup to #10464 to handle the case where `socket.close()` can also raise. This matches the logic we have in aiohappyeyeballs: https://github.com/aio-libs/aiohappyeyeballs/blob/e3bd5bdf44f5d187802de6dcb08d27e1ca6da048/src/aiohappyeyeballs/impl.py#L227 We shouldn't raising `OSError` externally from this method as callers expect a `ClientError` ## Are there changes in behavior for the user? bugfix ## Is it a substantial burden for the maintainers to support this? no ## Related issue number fixes #10506 ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10551.bugfix.rst | 1 + aiohttp/connector.py | 5 ++++- tests/test_connector.py | 27 +++++++++++++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10551.bugfix.rst diff --git a/CHANGES/10551.bugfix.rst b/CHANGES/10551.bugfix.rst new file mode 100644 index 00000000000..8f3eb24d6ae --- /dev/null +++ b/CHANGES/10551.bugfix.rst @@ -0,0 +1 @@ +The connector now raises :exc:`aiohttp.ClientConnectionError` instead of :exc:`OSError` when failing to explicitly close the socket after :py:meth:`asyncio.loop.create_connection` fails -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index b3a918b3cee..e5cf3674cba 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1138,7 +1138,10 @@ async def _wrap_create_connection( # Will be hit if an exception is thrown before the event loop takes the socket. # In that case, proactively close the socket to guard against event loop leaks. # For example, see https://github.com/MagicStack/uvloop/issues/653. - sock.close() + try: + sock.close() + except OSError as exc: + raise client_error(req.connection_key, exc) from exc async def _wrap_existing_connection( self, diff --git a/tests/test_connector.py b/tests/test_connector.py index e8cc46c54d2..a86a2417423 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -640,6 +640,33 @@ async def test_tcp_connector_closes_socket_on_error( await conn.close() +async def test_tcp_connector_closes_socket_on_error_results_in_another_error( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + """Test that when error occurs while closing the socket.""" + req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) + start_connection.return_value.close.side_effect = OSError( + 1, "error from closing socket" + ) + + conn = aiohttp.TCPConnector() + with ( + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ValueError, + ), + pytest.raises(aiohttp.ClientConnectionError, match="error from closing socket"), + ): + await conn.connect(req, [], ClientTimeout()) + + assert start_connection.return_value.close.called + + await conn.close() + + async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock ) -> None: From a3c36ad3dd11bbf916cd673ca8c480db90361f12 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 01:02:06 +0000 Subject: [PATCH 1258/1511] [PR #10551/d067260d backport][3.12] Re-raise OSError as ClientConnectionError when failing to explicitly close connector socket (#10562) **This is a backport of PR #10551 as merged into master (d067260df75e4d04a23a0481cf9cf8f7194c80f1).** <!-- Thank you for your contribution! --> ## What do these changes do? This is a followup to #10464 to handle the case where `socket.close()` can also raise. This matches the logic we have in aiohappyeyeballs: https://github.com/aio-libs/aiohappyeyeballs/blob/e3bd5bdf44f5d187802de6dcb08d27e1ca6da048/src/aiohappyeyeballs/impl.py#L227 We shouldn't raising `OSError` externally from this method as callers expect a `ClientError` ## Are there changes in behavior for the user? bugfix ## Is it a substantial burden for the maintainers to support this? no ## Related issue number fixes #10506 ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10551.bugfix.rst | 1 + aiohttp/connector.py | 5 ++++- tests/test_connector.py | 27 +++++++++++++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10551.bugfix.rst diff --git a/CHANGES/10551.bugfix.rst b/CHANGES/10551.bugfix.rst new file mode 100644 index 00000000000..8f3eb24d6ae --- /dev/null +++ b/CHANGES/10551.bugfix.rst @@ -0,0 +1 @@ +The connector now raises :exc:`aiohttp.ClientConnectionError` instead of :exc:`OSError` when failing to explicitly close the socket after :py:meth:`asyncio.loop.create_connection` fails -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 081ff330d38..de9062e8ae3 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1150,7 +1150,10 @@ async def _wrap_create_connection( # Will be hit if an exception is thrown before the event loop takes the socket. # In that case, proactively close the socket to guard against event loop leaks. # For example, see https://github.com/MagicStack/uvloop/issues/653. - sock.close() + try: + sock.close() + except OSError as exc: + raise client_error(req.connection_key, exc) from exc async def _wrap_existing_connection( self, diff --git a/tests/test_connector.py b/tests/test_connector.py index b199d9b5703..2aaa50985a1 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -640,6 +640,33 @@ async def test_tcp_connector_closes_socket_on_error( await conn.close() +async def test_tcp_connector_closes_socket_on_error_results_in_another_error( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + """Test that when error occurs while closing the socket.""" + req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) + start_connection.return_value.close.side_effect = OSError( + 1, "error from closing socket" + ) + + conn = aiohttp.TCPConnector() + with ( + mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + side_effect=ValueError, + ), + pytest.raises(aiohttp.ClientConnectionError, match="error from closing socket"), + ): + await conn.connect(req, [], ClientTimeout()) + + assert start_connection.return_value.close.called + + await conn.close() + + async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock ) -> None: From 077e4fa7680f75d9928a333e3d838896a7027af0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 11:39:32 -1000 Subject: [PATCH 1259/1511] [PR #10556/9d4e1161 backport][3.12] Break cyclic references at connection close when there was a traceback (#10567) **This is a backport of PR #10556 as merged into master (9d4e11617a1e596926d11e483280183224fc7ee6).** <!-- Thank you for your contribution! --> ## What do these changes do? Clears the exception on the `DataQueue` and `WebSocketDataQueue` when the connection is closed to break cyclic references. ## Are there changes in behavior for the user? bugfix ## Is it a substantial burden for the maintainers to support this? no ## Related issue number fixes #10535 ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10556.bugfix.rst | 3 ++ aiohttp/_websocket/reader_py.py | 1 + aiohttp/client_proto.py | 1 + .../check_for_client_response_leak.py | 47 +++++++++++++++++++ tests/test_leaks.py | 25 ++++++++++ 5 files changed, 77 insertions(+) create mode 100644 CHANGES/10556.bugfix.rst create mode 100644 tests/isolated/check_for_client_response_leak.py create mode 100644 tests/test_leaks.py diff --git a/CHANGES/10556.bugfix.rst b/CHANGES/10556.bugfix.rst new file mode 100644 index 00000000000..aad4eccbe48 --- /dev/null +++ b/CHANGES/10556.bugfix.rst @@ -0,0 +1,3 @@ +Break cyclic references at connection close when there was a traceback -- by :user:`bdraco`. + +Special thanks to :user:`availov` for reporting the issue. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 94d20010890..1645b3949b1 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -93,6 +93,7 @@ def _release_waiter(self) -> None: def feed_eof(self) -> None: self._eof = True self._release_waiter() + self._exception = None # Break cyclic references def feed_data(self, data: "WSMessage", size: "int_") -> None: self._size += size diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 79f033e3e12..2d64b3f3644 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -64,6 +64,7 @@ def force_close(self) -> None: self._should_close = True def close(self) -> None: + self._exception = None # Break cyclic references transport = self.transport if transport is not None: transport.close() diff --git a/tests/isolated/check_for_client_response_leak.py b/tests/isolated/check_for_client_response_leak.py new file mode 100644 index 00000000000..67393c2c2d8 --- /dev/null +++ b/tests/isolated/check_for_client_response_leak.py @@ -0,0 +1,47 @@ +import asyncio +import contextlib +import gc +import sys + +from aiohttp import ClientError, ClientSession, web +from aiohttp.test_utils import get_unused_port_socket + +gc.set_debug(gc.DEBUG_LEAK) + + +async def main() -> None: + app = web.Application() + + async def stream_handler(request: web.Request) -> web.Response: + assert request.transport is not None + request.transport.close() # Forcefully closing connection + return web.Response() + + app.router.add_get("/stream", stream_handler) + sock = get_unused_port_socket("127.0.0.1") + port = sock.getsockname()[1] + + runner = web.AppRunner(app) + await runner.setup() + site = web.SockSite(runner, sock) + await site.start() + + session = ClientSession() + + async def fetch_stream(url: str) -> None: + """Fetch a stream and read a few bytes from it.""" + with contextlib.suppress(ClientError): + await session.get(url) + + client_task = asyncio.create_task(fetch_stream(f"http://localhost:{port}/stream")) + await client_task + gc.collect() + client_response_present = any( + type(obj).__name__ == "ClientResponse" for obj in gc.garbage + ) + await session.close() + await runner.cleanup() + sys.exit(1 if client_response_present else 0) + + +asyncio.run(main()) diff --git a/tests/test_leaks.py b/tests/test_leaks.py new file mode 100644 index 00000000000..a3b6b624346 --- /dev/null +++ b/tests/test_leaks.py @@ -0,0 +1,25 @@ +import pathlib +import platform +import subprocess +import sys + +import pytest + +IS_PYPY = platform.python_implementation() == "PyPy" + + +@pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") +def test_client_response_does_not_leak_on_server_disconnected_error() -> None: + """Test that ClientResponse is collected after server disconnects. + + https://github.com/aio-libs/aiohttp/issues/10535 + """ + leak_test_script = pathlib.Path(__file__).parent.joinpath( + "isolated", "check_for_client_response_leak.py" + ) + + with subprocess.Popen( + [sys.executable, "-u", str(leak_test_script)], + stdout=subprocess.PIPE, + ) as proc: + assert proc.wait() == 0, "ClientResponse leaked" From 771d203472fa356c4f53dde30c5ae94212817838 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 11:39:44 -1000 Subject: [PATCH 1260/1511] [PR #10556/9d4e1161 backport][3.11] Break cyclic references at connection close when there was a traceback (#10566) **This is a backport of PR #10556 as merged into master (9d4e11617a1e596926d11e483280183224fc7ee6).** <!-- Thank you for your contribution! --> ## What do these changes do? Clears the exception on the `DataQueue` and `WebSocketDataQueue` when the connection is closed to break cyclic references. ## Are there changes in behavior for the user? bugfix ## Is it a substantial burden for the maintainers to support this? no ## Related issue number fixes #10535 ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10556.bugfix.rst | 3 ++ aiohttp/_websocket/reader_py.py | 1 + aiohttp/client_proto.py | 1 + .../check_for_client_response_leak.py | 47 +++++++++++++++++++ tests/test_leaks.py | 25 ++++++++++ 5 files changed, 77 insertions(+) create mode 100644 CHANGES/10556.bugfix.rst create mode 100644 tests/isolated/check_for_client_response_leak.py create mode 100644 tests/test_leaks.py diff --git a/CHANGES/10556.bugfix.rst b/CHANGES/10556.bugfix.rst new file mode 100644 index 00000000000..aad4eccbe48 --- /dev/null +++ b/CHANGES/10556.bugfix.rst @@ -0,0 +1,3 @@ +Break cyclic references at connection close when there was a traceback -- by :user:`bdraco`. + +Special thanks to :user:`availov` for reporting the issue. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 94d20010890..1645b3949b1 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -93,6 +93,7 @@ def _release_waiter(self) -> None: def feed_eof(self) -> None: self._eof = True self._release_waiter() + self._exception = None # Break cyclic references def feed_data(self, data: "WSMessage", size: "int_") -> None: self._size += size diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 79f033e3e12..2d64b3f3644 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -64,6 +64,7 @@ def force_close(self) -> None: self._should_close = True def close(self) -> None: + self._exception = None # Break cyclic references transport = self.transport if transport is not None: transport.close() diff --git a/tests/isolated/check_for_client_response_leak.py b/tests/isolated/check_for_client_response_leak.py new file mode 100644 index 00000000000..67393c2c2d8 --- /dev/null +++ b/tests/isolated/check_for_client_response_leak.py @@ -0,0 +1,47 @@ +import asyncio +import contextlib +import gc +import sys + +from aiohttp import ClientError, ClientSession, web +from aiohttp.test_utils import get_unused_port_socket + +gc.set_debug(gc.DEBUG_LEAK) + + +async def main() -> None: + app = web.Application() + + async def stream_handler(request: web.Request) -> web.Response: + assert request.transport is not None + request.transport.close() # Forcefully closing connection + return web.Response() + + app.router.add_get("/stream", stream_handler) + sock = get_unused_port_socket("127.0.0.1") + port = sock.getsockname()[1] + + runner = web.AppRunner(app) + await runner.setup() + site = web.SockSite(runner, sock) + await site.start() + + session = ClientSession() + + async def fetch_stream(url: str) -> None: + """Fetch a stream and read a few bytes from it.""" + with contextlib.suppress(ClientError): + await session.get(url) + + client_task = asyncio.create_task(fetch_stream(f"http://localhost:{port}/stream")) + await client_task + gc.collect() + client_response_present = any( + type(obj).__name__ == "ClientResponse" for obj in gc.garbage + ) + await session.close() + await runner.cleanup() + sys.exit(1 if client_response_present else 0) + + +asyncio.run(main()) diff --git a/tests/test_leaks.py b/tests/test_leaks.py new file mode 100644 index 00000000000..a3b6b624346 --- /dev/null +++ b/tests/test_leaks.py @@ -0,0 +1,25 @@ +import pathlib +import platform +import subprocess +import sys + +import pytest + +IS_PYPY = platform.python_implementation() == "PyPy" + + +@pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") +def test_client_response_does_not_leak_on_server_disconnected_error() -> None: + """Test that ClientResponse is collected after server disconnects. + + https://github.com/aio-libs/aiohttp/issues/10535 + """ + leak_test_script = pathlib.Path(__file__).parent.joinpath( + "isolated", "check_for_client_response_leak.py" + ) + + with subprocess.Popen( + [sys.executable, "-u", str(leak_test_script)], + stdout=subprocess.PIPE, + ) as proc: + assert proc.wait() == 0, "ClientResponse leaked" From 6ae2570bf2b53ff07d0fac2bbf1ffe81837ef415 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 22:11:37 +0000 Subject: [PATCH 1261/1511] [PR #10569/dfbf782b backport][3.11] Break cyclic references when there is an exception handling a request (#10571) **This is a backport of PR #10569 as merged into master (dfbf782ba4ea3eabfe052e6224727cf83efdffb5).** <!-- Thank you for your contribution! --> ## What do these changes do? This is a partial fix for #10548 - There is still another case for `SystemRoute`s that needs to be addressed. No reproducer available yet. - There is also another case on the client side on connection refused that still needs to be addressed https://github.com/aio-libs/aiohttp/issues/10548#issuecomment-2727643763 ## Are there changes in behavior for the user? fixes memory leak ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10569.bugfix.rst | 1 + aiohttp/web_protocol.py | 14 ++++++-- tests/isolated/check_for_request_leak.py | 41 ++++++++++++++++++++++++ tests/test_leaks.py | 17 ++++++++++ 4 files changed, 70 insertions(+), 3 deletions(-) create mode 100644 CHANGES/10569.bugfix.rst create mode 100644 tests/isolated/check_for_request_leak.py diff --git a/CHANGES/10569.bugfix.rst b/CHANGES/10569.bugfix.rst new file mode 100644 index 00000000000..7d817e867d4 --- /dev/null +++ b/CHANGES/10569.bugfix.rst @@ -0,0 +1 @@ +Break cyclic references when there is an exception handling a request -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index e4c347e5a9e..1dba9606ea0 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -520,8 +520,6 @@ async def start(self) -> None: keep_alive(True) specified. """ loop = self._loop - handler = asyncio.current_task(loop) - assert handler is not None manager = self._manager assert manager is not None keepalive_timeout = self._keepalive_timeout @@ -551,7 +549,16 @@ async def start(self) -> None: else: request_handler = self._request_handler - request = self._request_factory(message, payload, self, writer, handler) + # Important don't hold a reference to the current task + # as on traceback it will prevent the task from being + # collected and will cause a memory leak. + request = self._request_factory( + message, + payload, + self, + writer, + self._task_handler or asyncio.current_task(loop), # type: ignore[arg-type] + ) try: # a new task is used for copy context vars (#3406) coro = self._handle_request(request, start, request_handler) @@ -617,6 +624,7 @@ async def start(self) -> None: self.force_close() raise finally: + request._task = None # type: ignore[assignment] # Break reference cycle in case of exception if self.transport is None and resp is not None: self.log_debug("Ignored premature client disconnection.") diff --git a/tests/isolated/check_for_request_leak.py b/tests/isolated/check_for_request_leak.py new file mode 100644 index 00000000000..6f340a05277 --- /dev/null +++ b/tests/isolated/check_for_request_leak.py @@ -0,0 +1,41 @@ +import asyncio +import gc +import sys +from typing import NoReturn + +from aiohttp import ClientSession, web +from aiohttp.test_utils import get_unused_port_socket + +gc.set_debug(gc.DEBUG_LEAK) + + +async def main() -> None: + app = web.Application() + + async def handler(request: web.Request) -> NoReturn: + await request.json() + assert False + + app.router.add_route("GET", "/json", handler) + sock = get_unused_port_socket("127.0.0.1") + port = sock.getsockname()[1] + + runner = web.AppRunner(app) + await runner.setup() + site = web.SockSite(runner, sock) + await site.start() + + async with ClientSession() as session: + async with session.get(f"http://127.0.0.1:{port}/json") as resp: + await resp.read() + + # Give time for the cancelled task to be collected + await asyncio.sleep(0.5) + gc.collect() + request_present = any(type(obj).__name__ == "Request" for obj in gc.garbage) + await session.close() + await runner.cleanup() + sys.exit(1 if request_present else 0) + + +asyncio.run(main()) diff --git a/tests/test_leaks.py b/tests/test_leaks.py index a3b6b624346..f527ce18cae 100644 --- a/tests/test_leaks.py +++ b/tests/test_leaks.py @@ -23,3 +23,20 @@ def test_client_response_does_not_leak_on_server_disconnected_error() -> None: stdout=subprocess.PIPE, ) as proc: assert proc.wait() == 0, "ClientResponse leaked" + + +@pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") +def test_request_does_not_leak_when_request_handler_raises() -> None: + """Test that the Request object is collected when the handler raises. + + https://github.com/aio-libs/aiohttp/issues/10548 + """ + leak_test_script = pathlib.Path(__file__).parent.joinpath( + "isolated", "check_for_request_leak.py" + ) + + with subprocess.Popen( + [sys.executable, "-u", str(leak_test_script)], + stdout=subprocess.PIPE, + ) as proc: + assert proc.wait() == 0, "Request leaked" From 833792744a06ef725cc06804cc9a06582c6116cd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 12:26:18 -1000 Subject: [PATCH 1262/1511] [PR #10569/dfbf782b backport][3.12] Break cyclic references when there is an exception handling a request (#10572) **This is a backport of PR #10569 as merged into master (dfbf782ba4ea3eabfe052e6224727cf83efdffb5).** <!-- Thank you for your contribution! --> ## What do these changes do? fixes #10548 ## Are there changes in behavior for the user? fixes a potential memory leak ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10569.bugfix.rst | 1 + aiohttp/web_protocol.py | 14 ++++++-- tests/isolated/check_for_request_leak.py | 41 ++++++++++++++++++++++++ tests/test_leaks.py | 17 ++++++++++ 4 files changed, 70 insertions(+), 3 deletions(-) create mode 100644 CHANGES/10569.bugfix.rst create mode 100644 tests/isolated/check_for_request_leak.py diff --git a/CHANGES/10569.bugfix.rst b/CHANGES/10569.bugfix.rst new file mode 100644 index 00000000000..7d817e867d4 --- /dev/null +++ b/CHANGES/10569.bugfix.rst @@ -0,0 +1 @@ +Break cyclic references when there is an exception handling a request -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index e4c347e5a9e..1dba9606ea0 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -520,8 +520,6 @@ async def start(self) -> None: keep_alive(True) specified. """ loop = self._loop - handler = asyncio.current_task(loop) - assert handler is not None manager = self._manager assert manager is not None keepalive_timeout = self._keepalive_timeout @@ -551,7 +549,16 @@ async def start(self) -> None: else: request_handler = self._request_handler - request = self._request_factory(message, payload, self, writer, handler) + # Important don't hold a reference to the current task + # as on traceback it will prevent the task from being + # collected and will cause a memory leak. + request = self._request_factory( + message, + payload, + self, + writer, + self._task_handler or asyncio.current_task(loop), # type: ignore[arg-type] + ) try: # a new task is used for copy context vars (#3406) coro = self._handle_request(request, start, request_handler) @@ -617,6 +624,7 @@ async def start(self) -> None: self.force_close() raise finally: + request._task = None # type: ignore[assignment] # Break reference cycle in case of exception if self.transport is None and resp is not None: self.log_debug("Ignored premature client disconnection.") diff --git a/tests/isolated/check_for_request_leak.py b/tests/isolated/check_for_request_leak.py new file mode 100644 index 00000000000..6f340a05277 --- /dev/null +++ b/tests/isolated/check_for_request_leak.py @@ -0,0 +1,41 @@ +import asyncio +import gc +import sys +from typing import NoReturn + +from aiohttp import ClientSession, web +from aiohttp.test_utils import get_unused_port_socket + +gc.set_debug(gc.DEBUG_LEAK) + + +async def main() -> None: + app = web.Application() + + async def handler(request: web.Request) -> NoReturn: + await request.json() + assert False + + app.router.add_route("GET", "/json", handler) + sock = get_unused_port_socket("127.0.0.1") + port = sock.getsockname()[1] + + runner = web.AppRunner(app) + await runner.setup() + site = web.SockSite(runner, sock) + await site.start() + + async with ClientSession() as session: + async with session.get(f"http://127.0.0.1:{port}/json") as resp: + await resp.read() + + # Give time for the cancelled task to be collected + await asyncio.sleep(0.5) + gc.collect() + request_present = any(type(obj).__name__ == "Request" for obj in gc.garbage) + await session.close() + await runner.cleanup() + sys.exit(1 if request_present else 0) + + +asyncio.run(main()) diff --git a/tests/test_leaks.py b/tests/test_leaks.py index a3b6b624346..f527ce18cae 100644 --- a/tests/test_leaks.py +++ b/tests/test_leaks.py @@ -23,3 +23,20 @@ def test_client_response_does_not_leak_on_server_disconnected_error() -> None: stdout=subprocess.PIPE, ) as proc: assert proc.wait() == 0, "ClientResponse leaked" + + +@pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") +def test_request_does_not_leak_when_request_handler_raises() -> None: + """Test that the Request object is collected when the handler raises. + + https://github.com/aio-libs/aiohttp/issues/10548 + """ + leak_test_script = pathlib.Path(__file__).parent.joinpath( + "isolated", "check_for_request_leak.py" + ) + + with subprocess.Popen( + [sys.executable, "-u", str(leak_test_script)], + stdout=subprocess.PIPE, + ) as proc: + assert proc.wait() == 0, "Request leaked" From 240fb9e7b58225f58c62bfb27cbbbf577e2b0b52 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 16 Mar 2025 12:39:33 -1000 Subject: [PATCH 1263/1511] [PR #10534/3b9bb1cd backport][3.12] Replace tcp_sockopts with socket_factory (#10574) replaces and closes #10565 Instead of TCPConnector taking a list of sockopts to be applied sockets created, take a socket_factory callback that allows the caller to implement socket creation entirely. Fixes #10520 <!-- Thank you for your contribution! --> Replace `tcp_sockopts` parameter with a `socket_factory` parameter that is a callback allowing the caller to own socket creation. If passed, all sockets created by `TCPConnector` are expected to come from the `socket_factory` callback. <!-- Please give a short brief about these changes. --> The only users to experience a change in behavior are those who are using the un-released `tcp_sockopts` argument to `TCPConnector`. However, using unreleased code comes with caveat emptor, and is why I felt entitled to remove the option entirely without warning. <!-- Outline any notable behaviour for the end users. --> The burden will be minimal and would only arise if `aiohappyeyeballs` changes their interface. <!-- Stop right there! Pause. Just for a minute... Can you think of anything obvious that would complicate the ongoing development of this project? Try to consider if you'd be able to maintain it throughout the next 5 years. Does it seem viable? Tell us your thoughts! We'd very much love to hear what the consequences of merging this patch might be... This will help us assess if your change is something we'd want to entertain early in the review process. Thank you in advance! --> <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. --------- Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit 3b9bb1cd5677a8c8443d16184ed36856ae105cd7) <!-- Thank you for your contribution! --> ## What do these changes do? <!-- Please give a short brief about these changes. --> ## Are there changes in behavior for the user? <!-- Outline any notable behaviour for the end users. --> ## Is it a substantial burden for the maintainers to support this? <!-- Stop right there! Pause. Just for a minute... Can you think of anything obvious that would complicate the ongoing development of this project? Try to consider if you'd be able to maintain it throughout the next 5 years. Does it seem viable? Tell us your thoughts! We'd very much love to hear what the consequences of merging this patch might be... This will help us assess if your change is something we'd want to entertain early in the review process. Thank you in advance! --> ## Related issue number <!-- Are there any issues opened that will be resolved by merging this change? --> <!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Tim Menninger <tmenninger22@gmail.com> --- CHANGES/10474.feature.rst | 2 -- CHANGES/10520.feature.rst | 2 ++ aiohttp/__init__.py | 6 ++++ aiohttp/connector.py | 29 +++++++++------ docs/client_advanced.rst | 23 +++++++----- docs/client_reference.rst | 36 ++++++++++++++++--- docs/conf.py | 3 ++ requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- tests/test_connector.py | 69 ++++++++++++++++++++++++++---------- 10 files changed, 128 insertions(+), 46 deletions(-) delete mode 100644 CHANGES/10474.feature.rst create mode 100644 CHANGES/10520.feature.rst diff --git a/CHANGES/10474.feature.rst b/CHANGES/10474.feature.rst deleted file mode 100644 index d5d6e4b40b9..00000000000 --- a/CHANGES/10474.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added ``tcp_sockopts`` to ``TCPConnector`` to allow specifying custom socket options --- by :user:`TimMenninger`. diff --git a/CHANGES/10520.feature.rst b/CHANGES/10520.feature.rst new file mode 100644 index 00000000000..3d2877b5c09 --- /dev/null +++ b/CHANGES/10520.feature.rst @@ -0,0 +1,2 @@ +Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options +-- by :user:`TimMenninger`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 49eaf4541de..66645143fc9 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -47,6 +47,10 @@ WSServerHandshakeError, request, ) +from .connector import ( + AddrInfoType as AddrInfoType, + SocketFactoryType as SocketFactoryType, +) from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar from .formdata import FormData as FormData from .helpers import BasicAuth, ChainMapProxy, ETag @@ -126,6 +130,7 @@ __all__: Tuple[str, ...] = ( "hdrs", # client + "AddrInfoType", "BaseConnector", "ClientConnectionError", "ClientConnectionResetError", @@ -161,6 +166,7 @@ "ServerDisconnectedError", "ServerFingerprintMismatch", "ServerTimeoutError", + "SocketFactoryType", "SocketTimeoutError", "TCPConnector", "TooManyRedirects", diff --git a/aiohttp/connector.py b/aiohttp/connector.py index de9062e8ae3..1c2d8d73e07 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -19,7 +19,6 @@ DefaultDict, Deque, Dict, - Iterable, Iterator, List, Literal, @@ -33,6 +32,7 @@ ) import aiohappyeyeballs +from aiohappyeyeballs import AddrInfoType, SocketFactoryType from . import hdrs, helpers from .abc import AbstractResolver, ResolveResult @@ -95,7 +95,14 @@ # which first appeared in Python 3.12.7 and 3.13.1 -__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") +__all__ = ( + "BaseConnector", + "TCPConnector", + "UnixConnector", + "NamedPipeConnector", + "AddrInfoType", + "SocketFactoryType", +) if TYPE_CHECKING: @@ -834,8 +841,9 @@ class TCPConnector(BaseConnector): the happy eyeballs algorithm, set to None. interleave - “First Address Family Count” as defined in RFC 8305 loop - Optional event loop. - tcp_sockopts - List of tuples of sockopts applied to underlying - socket + socket_factory - A SocketFactoryType function that, if supplied, + will be used to create sockets given an + AddrInfoType. """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) @@ -861,7 +869,7 @@ def __init__( timeout_ceil_threshold: float = 5, happy_eyeballs_delay: Optional[float] = 0.25, interleave: Optional[int] = None, - tcp_sockopts: Iterable[Tuple[int, int, Union[int, Buffer]]] = [], + socket_factory: Optional[SocketFactoryType] = None, ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -888,7 +896,7 @@ def __init__( self._happy_eyeballs_delay = happy_eyeballs_delay self._interleave = interleave self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() - self._tcp_sockopts = tcp_sockopts + self._socket_factory = socket_factory def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" @@ -1112,7 +1120,7 @@ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: async def _wrap_create_connection( self, *args: Any, - addr_infos: List[aiohappyeyeballs.AddrInfoType], + addr_infos: List[AddrInfoType], req: ClientRequest, timeout: "ClientTimeout", client_error: Type[Exception] = ClientConnectorError, @@ -1129,9 +1137,8 @@ async def _wrap_create_connection( happy_eyeballs_delay=self._happy_eyeballs_delay, interleave=self._interleave, loop=self._loop, + socket_factory=self._socket_factory, ) - for sockopt in self._tcp_sockopts: - sock.setsockopt(*sockopt) connection = await self._loop.create_connection( *args, **kwargs, sock=sock ) @@ -1331,13 +1338,13 @@ async def _start_tls_connection( def _convert_hosts_to_addr_infos( self, hosts: List[ResolveResult] - ) -> List[aiohappyeyeballs.AddrInfoType]: + ) -> List[AddrInfoType]: """Converts the list of hosts to a list of addr_infos. The list of hosts is the result of a DNS lookup. The list of addr_infos is the result of a call to `socket.getaddrinfo()`. """ - addr_infos: List[aiohappyeyeballs.AddrInfoType] = [] + addr_infos: List[AddrInfoType] = [] for hinfo in hosts: host = hinfo["host"] is_ipv6 = ":" in host diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index eeb0ee98574..1116e0bdc45 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -461,19 +461,26 @@ If your HTTP server uses UNIX domain sockets you can use session = aiohttp.ClientSession(connector=conn) -Setting socket options +Custom socket creation ^^^^^^^^^^^^^^^^^^^^^^ -Socket options passed to the :class:`~aiohttp.TCPConnector` will be passed -to the underlying socket when creating a connection. For example, we may -want to change the conditions under which we consider a connection dead. -The following would change that to 9*7200 = 18 hours:: +If the default socket is insufficient for your use case, pass an optional +`socket_factory` to the :class:`~aiohttp.TCPConnector`, which implements +`SocketFactoryType`. This will be used to create all sockets for the +lifetime of the class object. For example, we may want to change the +conditions under which we consider a connection dead. The following would +make all sockets respect 9*7200 = 18 hours:: import socket - conn = aiohttp.TCPConnector(tcp_sockopts=[(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True), - (socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7200), - (socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 9) ]) + def socket_factory(addr_info): + family, type_, proto, _, _, _ = addr_info + sock = socket.socket(family=family, type=type_, proto=proto) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7200) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 9) + return sock + conn = aiohttp.TCPConnector(socket_factory=socket_factory) Named pipes in Windows diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 1e49b014007..42b45e589ff 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1138,6 +1138,34 @@ is controlled by *force_close* constructor's parameter). overridden in subclasses. +.. autodata:: AddrInfoType + +.. note:: + + Refer to :py:data:`aiohappyeyeballs.AddrInfoType` for more info. + +.. warning:: + + Be sure to use ``aiohttp.AddrInfoType`` rather than + ``aiohappyeyeballs.AddrInfoType`` to avoid import breakage, as + it is likely to be removed from ``aiohappyeyeballs`` in the + future. + + +.. autodata:: SocketFactoryType + +.. note:: + + Refer to :py:data:`aiohappyeyeballs.SocketFactoryType` for more info. + +.. warning:: + + Be sure to use ``aiohttp.SocketFactoryType`` rather than + ``aiohappyeyeballs.SocketFactoryType`` to avoid import breakage, + as it is likely to be removed from ``aiohappyeyeballs`` in the + future. + + .. class:: TCPConnector(*, ssl=True, verify_ssl=True, fingerprint=None, \ use_dns_cache=True, ttl_dns_cache=10, \ family=0, ssl_context=None, local_addr=None, \ @@ -1145,7 +1173,7 @@ is controlled by *force_close* constructor's parameter). force_close=False, limit=100, limit_per_host=0, \ enable_cleanup_closed=False, timeout_ceil_threshold=5, \ happy_eyeballs_delay=0.25, interleave=None, loop=None, \ - tcp_sockopts=[]) + socket_factory=None) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. @@ -1266,9 +1294,9 @@ is controlled by *force_close* constructor's parameter). .. versionadded:: 3.10 - :param list tcp_sockopts: options applied to the socket when a connection is - created. This should be a list of 3-tuples, each a ``(level, optname, value)``. - Each tuple is deconstructed and passed verbatim to ``<socket>.setsockopt``. + :param :py:data:``SocketFactoryType`` socket_factory: This function takes an + :py:data:``AddrInfoType`` and is used in lieu of ``socket.socket()`` when + creating TCP connections. .. versionadded:: 3.12 diff --git a/docs/conf.py b/docs/conf.py index f60c8ffcf8c..dcab6acf247 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -54,6 +54,7 @@ # ones. extensions = [ # stdlib-party extensions: + "sphinx.ext.autodoc", "sphinx.ext.extlinks", "sphinx.ext.graphviz", "sphinx.ext.intersphinx", @@ -83,6 +84,7 @@ "aiohttpsession": ("https://aiohttp-session.readthedocs.io/en/stable/", None), "aiohttpdemos": ("https://aiohttp-demos.readthedocs.io/en/latest/", None), "aiojobs": ("https://aiojobs.readthedocs.io/en/stable/", None), + "aiohappyeyeballs": ("https://aiohappyeyeballs.readthedocs.io/en/stable/", None), } # Add any paths that contain templates here, relative to this directory. @@ -441,6 +443,7 @@ ("py:exc", "HTTPMethodNotAllowed"), # undocumented ("py:class", "HTTPMethodNotAllowed"), # undocumented ("py:class", "HTTPUnavailableForLegalReasons"), # undocumented + ("py:class", "socket.SocketKind"), # undocumented ] # -- Options for towncrier_draft extension ----------------------------------- diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 50c6e41f9e4..425abdc85f6 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,7 +1,7 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" -aiohappyeyeballs >= 2.3.0 +aiohappyeyeballs >= 2.5.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 diff --git a/setup.cfg b/setup.cfg index a2b9e3b29e1..9da34e0b5ce 100644 --- a/setup.cfg +++ b/setup.cfg @@ -51,7 +51,7 @@ zip_safe = False include_package_data = True install_requires = - aiohappyeyeballs >= 2.3.0 + aiohappyeyeballs >= 2.5.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 diff --git a/tests/test_connector.py b/tests/test_connector.py index 2aaa50985a1..f148fdf0bbe 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,11 +10,20 @@ from collections import defaultdict, deque from concurrent import futures from contextlib import closing, suppress -from typing import Any, DefaultDict, Deque, List, Literal, Optional, Sequence, Tuple +from typing import ( + Any, + Callable, + DefaultDict, + Deque, + List, + Literal, + Optional, + Sequence, + Tuple, +) from unittest import mock import pytest -from aiohappyeyeballs import AddrInfoType from pytest_mock import MockerFixture from yarl import URL @@ -26,6 +35,7 @@ from aiohttp.connector import ( _SSL_CONTEXT_UNVERIFIED, _SSL_CONTEXT_VERIFIED, + AddrInfoType, Connection, TCPConnector, _DNSCacheTable, @@ -3663,27 +3673,48 @@ def test_connect() -> Literal[True]: assert raw_response_list == [True, True] -async def test_tcp_connector_setsockopts( +async def test_tcp_connector_socket_factory( loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock ) -> None: - """Check that sockopts get passed to socket""" - conn = aiohttp.TCPConnector( - tcp_sockopts=[(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 2)] - ) - - with mock.patch.object( - conn._loop, "create_connection", autospec=True, spec_set=True - ) as create_connection: - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - start_connection.return_value = s - create_connection.return_value = mock.Mock(), mock.Mock() - - req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) + """Check that socket factory is called""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + start_connection.return_value = s + + local_addr = None + socket_factory: Callable[[AddrInfoType], socket.socket] = lambda _: s + happy_eyeballs_delay = 0.123 + interleave = 3 + conn = aiohttp.TCPConnector( + interleave=interleave, + local_addr=local_addr, + happy_eyeballs_delay=happy_eyeballs_delay, + socket_factory=socket_factory, + ) + with mock.patch.object( + conn._loop, + "create_connection", + autospec=True, + spec_set=True, + return_value=(mock.Mock(), mock.Mock()), + ): + host = "127.0.0.1" + port = 443 + req = ClientRequest("GET", URL(f"https://{host}:{port}"), loop=loop) with closing(await conn.connect(req, [], ClientTimeout())): - assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT) == 2 - - await conn.close() + pass + await conn.close() + + start_connection.assert_called_with( + addr_infos=[ + (socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", (host, port)) + ], + local_addr_infos=local_addr, + happy_eyeballs_delay=happy_eyeballs_delay, + interleave=interleave, + loop=loop, + socket_factory=socket_factory, + ) def test_default_ssl_context_creation_without_ssl() -> None: From 4005080efe8b8f42c0eb367d3edc4086b9b127ed Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 22:49:25 +0000 Subject: [PATCH 1264/1511] [PR #10564/a59e74b7 backport][3.11] Log offending websocket client address when no protocols overlap (#10575) **This is a backport of PR #10564 as merged into master (a59e74b7b504375bc0ac3daf1dc1306d5d056d28).** <!-- Thank you for your contribution! --> ## What do these changes do? Logs the remote address of a WebSocket client that has no overlapping protocols ## Are there changes in behavior for the user? Which client has the problem should be a bit more discoverable ## Is it a substantial burden for the maintainers to support this? no ## Related issue number closes #10563 Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10564.feature.rst | 1 + aiohttp/web_ws.py | 3 ++- tests/test_websocket_handshake.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10564.feature.rst diff --git a/CHANGES/10564.feature.rst b/CHANGES/10564.feature.rst new file mode 100644 index 00000000000..24e2ecad76d --- /dev/null +++ b/CHANGES/10564.feature.rst @@ -0,0 +1 @@ +Improved logging on non-overlapping WebSocket client protocols to include the remote address -- by :user:`bdraco`. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index a448bca101e..439b8049987 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -252,7 +252,8 @@ def _handshake( else: # No overlap found: Return no protocol as per spec ws_logger.warning( - "Client protocols %r don’t overlap server-known ones %r", + "%s: Client protocols %r don’t overlap server-known ones %r", + request.remote, req_protocols, self._protocols, ) diff --git a/tests/test_websocket_handshake.py b/tests/test_websocket_handshake.py index bbfa1d9260d..53d5d9152bb 100644 --- a/tests/test_websocket_handshake.py +++ b/tests/test_websocket_handshake.py @@ -174,7 +174,7 @@ async def test_handshake_protocol_unsupported(caplog) -> None: assert ( caplog.records[-1].msg - == "Client protocols %r don’t overlap server-known ones %r" + == "%s: Client protocols %r don’t overlap server-known ones %r" ) assert ws.ws_protocol is None From 70036a9557cc52cb80d660219913921636145246 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 23:09:38 +0000 Subject: [PATCH 1265/1511] [PR #10564/a59e74b7 backport][3.12] Log offending websocket client address when no protocols overlap (#10576) **This is a backport of PR #10564 as merged into master (a59e74b7b504375bc0ac3daf1dc1306d5d056d28).** <!-- Thank you for your contribution! --> ## What do these changes do? Logs the remote address of a WebSocket client that has no overlapping protocols ## Are there changes in behavior for the user? Which client has the problem should be a bit more discoverable ## Is it a substantial burden for the maintainers to support this? no ## Related issue number closes #10563 Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10564.feature.rst | 1 + aiohttp/web_ws.py | 3 ++- tests/test_websocket_handshake.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10564.feature.rst diff --git a/CHANGES/10564.feature.rst b/CHANGES/10564.feature.rst new file mode 100644 index 00000000000..24e2ecad76d --- /dev/null +++ b/CHANGES/10564.feature.rst @@ -0,0 +1 @@ +Improved logging on non-overlapping WebSocket client protocols to include the remote address -- by :user:`bdraco`. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index a448bca101e..439b8049987 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -252,7 +252,8 @@ def _handshake( else: # No overlap found: Return no protocol as per spec ws_logger.warning( - "Client protocols %r don’t overlap server-known ones %r", + "%s: Client protocols %r don’t overlap server-known ones %r", + request.remote, req_protocols, self._protocols, ) diff --git a/tests/test_websocket_handshake.py b/tests/test_websocket_handshake.py index bbfa1d9260d..53d5d9152bb 100644 --- a/tests/test_websocket_handshake.py +++ b/tests/test_websocket_handshake.py @@ -174,7 +174,7 @@ async def test_handshake_protocol_unsupported(caplog) -> None: assert ( caplog.records[-1].msg - == "Client protocols %r don’t overlap server-known ones %r" + == "%s: Client protocols %r don’t overlap server-known ones %r" ) assert ws.ws_protocol is None From 9396ef1547eb6908cc5212bb8768a9a1eae627f8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 23:31:18 +0000 Subject: [PATCH 1266/1511] [PR #10542/e1d2d77c backport][3.11] only use `AI_ADDRCONFIG` when supported by getaddrinfo (#10578) --- CHANGES/10542.bugfix | 1 + aiohttp/resolver.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10542.bugfix diff --git a/CHANGES/10542.bugfix b/CHANGES/10542.bugfix new file mode 100644 index 00000000000..fc3192308ad --- /dev/null +++ b/CHANGES/10542.bugfix @@ -0,0 +1 @@ +Fixed DNS resolution on platforms that don't support ``socket.AI_ADDRCONFIG`` -- by :user:`maxbachmann`. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 9c744514fae..e14179cc8a2 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -18,6 +18,9 @@ _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV _NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV +_AI_ADDRCONFIG = socket.AI_ADDRCONFIG +if hasattr(socket, "AI_MASK"): + _AI_ADDRCONFIG &= socket.AI_MASK class ThreadedResolver(AbstractResolver): @@ -38,7 +41,7 @@ async def resolve( port, type=socket.SOCK_STREAM, family=family, - flags=socket.AI_ADDRCONFIG, + flags=_AI_ADDRCONFIG, ) hosts: List[ResolveResult] = [] @@ -105,7 +108,7 @@ async def resolve( port=port, type=socket.SOCK_STREAM, family=family, - flags=socket.AI_ADDRCONFIG, + flags=_AI_ADDRCONFIG, ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" From 666523b0fc6d6b19b4bf39058f04148ebbf12002 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 23:51:44 +0000 Subject: [PATCH 1267/1511] [PR #10542/e1d2d77c backport][3.12] only use `AI_ADDRCONFIG` when supported by getaddrinfo (#10579) --- CHANGES/10542.bugfix | 1 + aiohttp/resolver.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10542.bugfix diff --git a/CHANGES/10542.bugfix b/CHANGES/10542.bugfix new file mode 100644 index 00000000000..fc3192308ad --- /dev/null +++ b/CHANGES/10542.bugfix @@ -0,0 +1 @@ +Fixed DNS resolution on platforms that don't support ``socket.AI_ADDRCONFIG`` -- by :user:`maxbachmann`. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 9c744514fae..e14179cc8a2 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -18,6 +18,9 @@ _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV _NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV +_AI_ADDRCONFIG = socket.AI_ADDRCONFIG +if hasattr(socket, "AI_MASK"): + _AI_ADDRCONFIG &= socket.AI_MASK class ThreadedResolver(AbstractResolver): @@ -38,7 +41,7 @@ async def resolve( port, type=socket.SOCK_STREAM, family=family, - flags=socket.AI_ADDRCONFIG, + flags=_AI_ADDRCONFIG, ) hosts: List[ResolveResult] = [] @@ -105,7 +108,7 @@ async def resolve( port=port, type=socket.SOCK_STREAM, family=family, - flags=socket.AI_ADDRCONFIG, + flags=_AI_ADDRCONFIG, ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" From c6fedfa1b4c8e942ae7bf1cbad572c9a6a48052a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 13:56:29 -1000 Subject: [PATCH 1268/1511] [PR #10577/3c60cd22 backport][3.12] Parametrize leak tests (#10581) **This is a backport of PR #10577 as merged into master (3c60cd220a8a393e5ab7cff1d39087ca77639166).** Small cleanup to the leak tests https://github.com/aio-libs/aiohttp/pull/10569#discussion_r1997747632 Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_leaks.py | 47 ++++++++++++++++++++------------------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/tests/test_leaks.py b/tests/test_leaks.py index f527ce18cae..07b506bdb99 100644 --- a/tests/test_leaks.py +++ b/tests/test_leaks.py @@ -9,34 +9,29 @@ @pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") -def test_client_response_does_not_leak_on_server_disconnected_error() -> None: - """Test that ClientResponse is collected after server disconnects. - - https://github.com/aio-libs/aiohttp/issues/10535 - """ - leak_test_script = pathlib.Path(__file__).parent.joinpath( - "isolated", "check_for_client_response_leak.py" - ) - - with subprocess.Popen( - [sys.executable, "-u", str(leak_test_script)], - stdout=subprocess.PIPE, - ) as proc: - assert proc.wait() == 0, "ClientResponse leaked" - - -@pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") -def test_request_does_not_leak_when_request_handler_raises() -> None: - """Test that the Request object is collected when the handler raises. - - https://github.com/aio-libs/aiohttp/issues/10548 - """ - leak_test_script = pathlib.Path(__file__).parent.joinpath( - "isolated", "check_for_request_leak.py" - ) +@pytest.mark.parametrize( + ("script", "message"), + [ + ( + # Test that ClientResponse is collected after server disconnects. + # https://github.com/aio-libs/aiohttp/issues/10535 + "check_for_client_response_leak.py", + "ClientResponse leaked", + ), + ( + # Test that Request object is collected when the handler raises. + # https://github.com/aio-libs/aiohttp/issues/10548 + "check_for_request_leak.py", + "Request leaked", + ), + ], +) +def test_leak(script: str, message: str) -> None: + """Run isolated leak test script and check for leaks.""" + leak_test_script = pathlib.Path(__file__).parent.joinpath("isolated", script) with subprocess.Popen( [sys.executable, "-u", str(leak_test_script)], stdout=subprocess.PIPE, ) as proc: - assert proc.wait() == 0, "Request leaked" + assert proc.wait() == 0, message From d40e2270f96eae7ae4bfea9ced14062694ecfdc3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 00:01:29 +0000 Subject: [PATCH 1269/1511] [PR #10577/3c60cd22 backport][3.11] Parametrize leak tests (#10580) **This is a backport of PR #10577 as merged into master (3c60cd220a8a393e5ab7cff1d39087ca77639166).** Small cleanup to the leak tests https://github.com/aio-libs/aiohttp/pull/10569#discussion_r1997747632 Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_leaks.py | 47 ++++++++++++++++++++------------------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/tests/test_leaks.py b/tests/test_leaks.py index f527ce18cae..07b506bdb99 100644 --- a/tests/test_leaks.py +++ b/tests/test_leaks.py @@ -9,34 +9,29 @@ @pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") -def test_client_response_does_not_leak_on_server_disconnected_error() -> None: - """Test that ClientResponse is collected after server disconnects. - - https://github.com/aio-libs/aiohttp/issues/10535 - """ - leak_test_script = pathlib.Path(__file__).parent.joinpath( - "isolated", "check_for_client_response_leak.py" - ) - - with subprocess.Popen( - [sys.executable, "-u", str(leak_test_script)], - stdout=subprocess.PIPE, - ) as proc: - assert proc.wait() == 0, "ClientResponse leaked" - - -@pytest.mark.skipif(IS_PYPY, reason="gc.DEBUG_LEAK not available on PyPy") -def test_request_does_not_leak_when_request_handler_raises() -> None: - """Test that the Request object is collected when the handler raises. - - https://github.com/aio-libs/aiohttp/issues/10548 - """ - leak_test_script = pathlib.Path(__file__).parent.joinpath( - "isolated", "check_for_request_leak.py" - ) +@pytest.mark.parametrize( + ("script", "message"), + [ + ( + # Test that ClientResponse is collected after server disconnects. + # https://github.com/aio-libs/aiohttp/issues/10535 + "check_for_client_response_leak.py", + "ClientResponse leaked", + ), + ( + # Test that Request object is collected when the handler raises. + # https://github.com/aio-libs/aiohttp/issues/10548 + "check_for_request_leak.py", + "Request leaked", + ), + ], +) +def test_leak(script: str, message: str) -> None: + """Run isolated leak test script and check for leaks.""" + leak_test_script = pathlib.Path(__file__).parent.joinpath("isolated", script) with subprocess.Popen( [sys.executable, "-u", str(leak_test_script)], stdout=subprocess.PIPE, ) as proc: - assert proc.wait() == 0, "Request leaked" + assert proc.wait() == 0, message From 1a48a62fbbd4920a154451b99d90a2589c441512 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 16 Mar 2025 14:46:05 -1000 Subject: [PATCH 1270/1511] Release 3.11.14 (#10582) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit <img width="466" alt="Screenshot 2025-03-16 at 2 32 10 PM" src="https://github.com/user-attachments/assets/635511fd-6b63-49c7-bb1a-cf514545b604" /> --- CHANGES.rst | 77 +++++++++++++++++++++++++++++++++++++++ CHANGES/10529.bugfix.rst | 2 - CHANGES/10542.bugfix | 1 - CHANGES/10551.bugfix.rst | 1 - CHANGES/10552.misc.rst | 1 - CHANGES/10556.bugfix.rst | 3 -- CHANGES/10564.feature.rst | 1 - CHANGES/10569.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 9 files changed, 78 insertions(+), 11 deletions(-) delete mode 100644 CHANGES/10529.bugfix.rst delete mode 100644 CHANGES/10542.bugfix delete mode 100644 CHANGES/10551.bugfix.rst delete mode 100644 CHANGES/10552.misc.rst delete mode 100644 CHANGES/10556.bugfix.rst delete mode 100644 CHANGES/10564.feature.rst delete mode 100644 CHANGES/10569.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 39c45196c26..3c8c12b8d95 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,83 @@ .. towncrier release notes start +3.11.14 (2025-03-16) +==================== + +Bug fixes +--------- + +- Fixed an issue where dns queries were delayed indefinitely when an exception occurred in a ``trace.send_dns_cache_miss`` + -- by :user:`logioniz`. + + + *Related issues and pull requests on GitHub:* + :issue:`10529`. + + + +- Fixed DNS resolution on platforms that don't support ``socket.AI_ADDRCONFIG`` -- by :user:`maxbachmann`. + + + *Related issues and pull requests on GitHub:* + :issue:`10542`. + + + +- The connector now raises :exc:`aiohttp.ClientConnectionError` instead of :exc:`OSError` when failing to explicitly close the socket after :py:meth:`asyncio.loop.create_connection` fails -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10551`. + + + +- Break cyclic references at connection close when there was a traceback -- by :user:`bdraco`. + + Special thanks to :user:`availov` for reporting the issue. + + + *Related issues and pull requests on GitHub:* + :issue:`10556`. + + + +- Break cyclic references when there is an exception handling a request -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10569`. + + + + +Features +-------- + +- Improved logging on non-overlapping WebSocket client protocols to include the remote address -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10564`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of parsing content types by adding a cache in the same manner currently done with mime types -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10552`. + + + + +---- + + 3.11.13 (2025-02-24) ==================== diff --git a/CHANGES/10529.bugfix.rst b/CHANGES/10529.bugfix.rst deleted file mode 100644 index d6714ffd043..00000000000 --- a/CHANGES/10529.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed an issue where dns queries were delayed indefinitely when an exception occurred in a ``trace.send_dns_cache_miss`` --- by :user:`logioniz`. diff --git a/CHANGES/10542.bugfix b/CHANGES/10542.bugfix deleted file mode 100644 index fc3192308ad..00000000000 --- a/CHANGES/10542.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed DNS resolution on platforms that don't support ``socket.AI_ADDRCONFIG`` -- by :user:`maxbachmann`. diff --git a/CHANGES/10551.bugfix.rst b/CHANGES/10551.bugfix.rst deleted file mode 100644 index 8f3eb24d6ae..00000000000 --- a/CHANGES/10551.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -The connector now raises :exc:`aiohttp.ClientConnectionError` instead of :exc:`OSError` when failing to explicitly close the socket after :py:meth:`asyncio.loop.create_connection` fails -- by :user:`bdraco`. diff --git a/CHANGES/10552.misc.rst b/CHANGES/10552.misc.rst deleted file mode 100644 index 6755cbf7396..00000000000 --- a/CHANGES/10552.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of parsing content types by adding a cache in the same manner currently done with mime types -- by :user:`bdraco`. diff --git a/CHANGES/10556.bugfix.rst b/CHANGES/10556.bugfix.rst deleted file mode 100644 index aad4eccbe48..00000000000 --- a/CHANGES/10556.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Break cyclic references at connection close when there was a traceback -- by :user:`bdraco`. - -Special thanks to :user:`availov` for reporting the issue. diff --git a/CHANGES/10564.feature.rst b/CHANGES/10564.feature.rst deleted file mode 100644 index 24e2ecad76d..00000000000 --- a/CHANGES/10564.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improved logging on non-overlapping WebSocket client protocols to include the remote address -- by :user:`bdraco`. diff --git a/CHANGES/10569.bugfix.rst b/CHANGES/10569.bugfix.rst deleted file mode 100644 index 7d817e867d4..00000000000 --- a/CHANGES/10569.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Break cyclic references when there is an exception handling a request -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 96eced5960d..0628433d35b 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.14.dev0" +__version__ = "3.11.14" from typing import TYPE_CHECKING, Tuple From 9920bc248ed023fbfd485f1851b653f1e47f80dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 16 Mar 2025 17:07:45 -1000 Subject: [PATCH 1271/1511] Increment version to 3.11.15.dev0 (#10584) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 0628433d35b..4ff7bbbc759 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.14" +__version__ = "3.11.15.dev0" from typing import TYPE_CHECKING, Tuple From e5e280b44aba7029a40b887d225fc928098c70de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 11:51:28 +0000 Subject: [PATCH 1272/1511] Bump coverage from 7.6.12 to 7.7.0 (#10589) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.12 to 7.7.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.7.0 — 2025-03-16</h2> <ul> <li> <p>The Coverage object has a new method, :meth:<code>.Coverage.branch_stats</code> for getting simple branch information for a module. Closes <code>issue 1888</code>_.</p> </li> <li> <p>The :class:<code>Coverage constructor<.Coverage></code> now has a <code>plugins</code> parameter for passing in plugin objects directly, thanks to <code>Alex Gaynor <pull 1919_></code>_.</p> </li> <li> <p>Many constant tests in if statements are now recognized as being optimized away. For example, previously <code>if 13:</code> would have been considered a branch with one path not taken. Now it is understood as always true and no coverage is missing.</p> </li> <li> <p>The experimental sys.monitoring support now works for branch coverage if you are using Python 3.14.0 alpha 6 or newer. This should reduce the overhead coverage.py imposes on your test suite. Set the environment variable <code>COVERAGE_CORE=sysmon</code> to try it out.</p> </li> <li> <p>Confirmed support for PyPy 3.11. Thanks Michał Górny.</p> </li> </ul> <p>.. _issue 1888: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1888">nedbat/coveragepy#1888</a> .. _pull 1919: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1919">nedbat/coveragepy#1919</a></p> <p>.. _changes_7-6-12:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/61dcf7188c18db699ebf542f362f00a5d0481281"><code>61dcf71</code></a> docs: sample HTML for 7.7.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/35a31c5c2d642280e4f247469779a76ee5d03709"><code>35a31c5</code></a> docs: prep for 7.7.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/8f6e00641e607307d77ae16cbe3cd5afb555c58c"><code>8f6e006</code></a> docs: edit the changelog</li> <li><a href="https://github.com/nedbat/coveragepy/commit/33f12dfa365f3faa349d844e603b0432ba3476d3"><code>33f12df</code></a> feat: Coverage.branch_stats() <a href="https://redirect.github.com/nedbat/coveragepy/issues/1888">#1888</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/c4919cbf84efef68c1e9572d8438204e64b32d4c"><code>c4919cb</code></a> lint: somehow these snuck through</li> <li><a href="https://github.com/nedbat/coveragepy/commit/7e9f2f20c81a6ab49a19c42b1cd3c5ba4282d223"><code>7e9f2f2</code></a> perf(sysmon): silly mistake</li> <li><a href="https://github.com/nedbat/coveragepy/commit/1e99d2881b1b60af1b889ea364e1ede018401740"><code>1e99d28</code></a> perf(sysmon): improve speed</li> <li><a href="https://github.com/nedbat/coveragepy/commit/c9908d731187f515093105f76b5b4cc39dbff9ba"><code>c9908d7</code></a> test: benchmark improvements</li> <li><a href="https://github.com/nedbat/coveragepy/commit/0973f44cc4a73bab859af74a3dd42078b0b2cc85"><code>0973f44</code></a> chore: bump the action-dependencies group with 2 updates (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1935">#1935</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/824b3ba3efbbe07f188ce13e9a8143604747fece"><code>824b3ba</code></a> style: fix one change from updated pylint</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.6.12...7.7.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.6.12&new-version=7.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4f88e08420a..5dcd2e87602 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.12 +coverage==7.7.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 1a70923b154..fc0ffdab494 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.6.12 +coverage==7.7.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index fb20a21e251..6d053ed54cb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.6.12 +coverage==7.7.0 # via # -r requirements/test.in # pytest-cov From 2dc0d9e225da3f2a7bbe7190b82c0b2d1477b1bb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 09:54:22 -1000 Subject: [PATCH 1273/1511] Bump pypa/cibuildwheel from 2.23.0 to 2.23.1 (#10586) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.23.0 to 2.23.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.23.1</h2> <ul> <li>⚠️ Added warnings when the shorthand values <code>manylinux1</code>, <code>manylinux2010</code>, <code>manylinux_2_24</code>, and <code>musllinux_1_1</code> are used to specify the images in linux builds. The shorthand to these (unmaintainted) images will be removed in v3.0. If you want to keep using these images, explicitly opt-in using the full image URL, which can be found in <a href="https://github.com/pypa/cibuildwheel/blob/v2.23.1/cibuildwheel/resources/pinned_docker_images.cfg">this file</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2312">#2312</a>)</li> <li>🛠 Dependency updates, including a manylinux update which fixes an <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2303">issue with rustup</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2315">#2315</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/v2.23.1/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.23.1</h3> <p><em>15 March 2025</em></p> <ul> <li>⚠️ Added warnings when the shorthand values <code>manylinux1</code>, <code>manylinux2010</code>, <code>manylinux_2_24</code>, and <code>musllinux_1_1</code> are used to specify the images in linux builds. The shorthand to these (unmaintainted) images will be removed in v3.0. If you want to keep using these images, explicitly opt-in using the full image URL, which can be found in <a href="https://github.com/pypa/cibuildwheel/blob/v2.23.1/cibuildwheel/resources/pinned_docker_images.cfg">this file</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2312">#2312</a>)</li> <li>🛠 Dependency updates, including a manylinux update which fixes an <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2303">issue with rustup</a>. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2315">#2315</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/42728e866bbc80d544a70825bd9990b9a26f1a50"><code>42728e8</code></a> Bump version: v2.23.1</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/6e1527b153c481c51f987a5e3a1bed216b16a260"><code>6e1527b</code></a> Fix unit test when other warnings are present</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/c25fe603855d4d4dbbb013375765345f346aece0"><code>c25fe60</code></a> fix: image deprecation warning (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2314">#2314</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/a880bf5105e70f0add65d840db82a7cc6c1555e4"><code>a880bf5</code></a> fix: warn on deprecated images being set (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2312">#2312</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/c087d85b69012f624e0321468b40345d921c72e5"><code>c087d85</code></a> Update dependencies</li> <li>See full diff in <a href="https://github.com/pypa/cibuildwheel/compare/v2.23.0...v2.23.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.23.0&new-version=2.23.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index cade6835b92..a61a67137c2 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -414,7 +414,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.23.0 + uses: pypa/cibuildwheel@v2.23.1 env: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 From 87e9248f362176250bda3ebb1932cda712ccfa2c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 10:51:19 +0000 Subject: [PATCH 1274/1511] Bump setuptools from 76.0.0 to 76.1.0 (#10590) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 76.0.0 to 76.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v76.1.0</h1> <h2>Features</h2> <ul> <li>In setuptools.msvc.EnvironmentInfo, now honor the correct paths when on an ARM host. (<a href="https://redirect.github.com/pypa/setuptools/issues/4786">#4786</a>)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Restored implicit distutils.ccompiler import for g-ir-scanner. (<a href="https://redirect.github.com/pypa/setuptools/issues/4871">#4871</a>)</li> <li>Restore <code>distutils.ccompiler.compiler_class</code> -- by :user:<code>Avasam</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4876">#4876</a>)</li> </ul> <h1>v75.3.2</h1> <ul> <li>Fixed version error in changelog.</li> </ul> <h1>v75.3.1</h1> <h2>Bugfixes</h2> <ul> <li>Fix wheel file naming to follow binary distribution specification -- by :user:<code>di</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4877">#4877</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/07e96b107adb799a754122e49701c73dc34fbabe"><code>07e96b1</code></a> Bump version: 76.0.0 → 76.1.0</li> <li><a href="https://github.com/pypa/setuptools/commit/3018692baac909355c46f9e8724845c72f6ca7e2"><code>3018692</code></a> Fix reference in changelog.</li> <li><a href="https://github.com/pypa/setuptools/commit/aeefe34600bec7f156ed105ace57d145a6ea3879"><code>aeefe34</code></a> Merge <a href="https://github.com/pypa/distutils">https://github.com/pypa/distutils</a></li> <li><a href="https://github.com/pypa/setuptools/commit/6c0427b8e8870ab33872b15ae12ad0685061b2c0"><code>6c0427b</code></a> Merge pull request <a href="https://redirect.github.com/pypa/distutils/issues/337">pypa/distutils#337</a> from Avasam/add-back-compiler.compiler_...</li> <li><a href="https://github.com/pypa/setuptools/commit/408274b81b15e3e10a99a3067d3dc00753a06124"><code>408274b</code></a> Restore missing public symbols after compilers move</li> <li><a href="https://github.com/pypa/setuptools/commit/b6a539a7620880b9ea41102983e1ab6a2ca2ef29"><code>b6a539a</code></a> Restore implicit expectation that importing unixccompiler makes distutils.cco...</li> <li><a href="https://github.com/pypa/setuptools/commit/a36b7bac0a8ba62bf90a3d71a992ba7e3839583c"><code>a36b7ba</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4786">#4786</a> from swt2c/fix_msvc_host_dir_arm64</li> <li><a href="https://github.com/pypa/setuptools/commit/5b66977169135a986f87d96a7df49a2378192203"><code>5b66977</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/3218d666e49245b0878db8b19ba20d8cd72d6424"><code>3218d66</code></a> Calculate host_id in one expression.</li> <li><a href="https://github.com/pypa/setuptools/commit/d69bf04f749a310bef197df3237901b924c8ffd7"><code>d69bf04</code></a> Merge failing test into fix_msvc_host_dir_arm64.</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v76.0.0...v76.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=76.0.0&new-version=76.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5dcd2e87602..d4d993c66c3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==76.0.0 +setuptools==76.1.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index fc0ffdab494..90f21ec5529 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==76.0.0 +setuptools==76.1.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index ff527ae486b..9d69778d2d0 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==76.0.0 +setuptools==76.1.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index f00d523f092..ff57762abf7 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==76.0.0 +setuptools==76.1.0 # via incremental From 310a7dd7e9e1e68bcccfa764494d315bd24340d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 11:15:06 +0000 Subject: [PATCH 1275/1511] Bump multidict from 6.1.0 to 6.2.0 (#10593) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.1.0 to 6.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.2.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>Fixed <code>in</code> checks throwing an exception instead of returning :data:<code>False</code> when testing non-strings.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1045">#1045</a>.</p> </li> <li> <p>Fix a leak when the last accessed module in <code>PyInit__multidict</code> init is not released.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1061">#1061</a>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1015">#1015</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Started publishing wheels made for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1015">#1015</a>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Used stricter typing across the code base, resulting in improved typing accuracy across multidict classes. Funded by an <code>NLnet</code> grant.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1046">#1046</a>.</p> </li> </ul> <hr /> <h1>6.1.0 (2024-09-09)</h1> <p>Bug fixes</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.2.0</h1> <p><em>(2025-03-17)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Fixed <code>in</code> checks throwing an exception instead of returning :data:<code>False</code> when testing non-strings.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1045</code>.</p> </li> <li> <p>Fix a leak when the last accessed module in <code>PyInit__multidict</code> init is not released.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1061</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1015</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Started publishing wheels made for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1015</code>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>Used stricter typing across the code base, resulting in improved typing accuracy across multidict classes. Funded by an <code>NLnet</code> grant.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1046</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/af07d5b2b18bb30e041983db2bea1fcb3e1c4b79"><code>af07d5b</code></a> Synchronize towncrier settings (<a href="https://redirect.github.com/aio-libs/multidict/issues/1067">#1067</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/8314bdc8ce55ba745fe4640be17ef145639cc9f4"><code>8314bdc</code></a> Tune CHANGES markup (<a href="https://redirect.github.com/aio-libs/multidict/issues/1066">#1066</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/09693c12e8028c94b5ac7e54fd1b33bbc41a0826"><code>09693c1</code></a> Fix towncrier markup (<a href="https://redirect.github.com/aio-libs/multidict/issues/1065">#1065</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/0e8d24aa4862d7fc34d1814ba307bd274b3ec0e8"><code>0e8d24a</code></a> Increase timeout for qemu based builds (<a href="https://redirect.github.com/aio-libs/multidict/issues/1064">#1064</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/a9bb3a0fea4baf3bf9f10f0e9514be1dbd46acb7"><code>a9bb3a0</code></a> Release 6.2.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1062">#1062</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/f893493ca25b21fed156bce6566ddfeb537a75ba"><code>f893493</code></a> Fix a leak when the last accessed module in multidict init is not released (#...</li> <li><a href="https://github.com/aio-libs/multidict/commit/14cf7662d7ab4edae38cc7227bfccadae8ecb6c1"><code>14cf766</code></a> Bump pypa/cibuildwheel from 2.22.0 to 2.23.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1059">#1059</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/1256899739190ae7ed53a43912a7a1862ed22115"><code>1256899</code></a> Bump pytest from 8.3.4 to 8.3.5 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1058">#1058</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/c79c379ed1127f6f2d8ff184fbd7774eea360951"><code>c79c379</code></a> Bump sphinx from 8.2.1 to 8.2.3 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1057">#1057</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/4cb2f08584909a0b30d770aeee805eae399d40ef"><code>4cb2f08</code></a> Relax test_leak to run reliable against free-threaded python (<a href="https://redirect.github.com/aio-libs/multidict/issues/1060">#1060</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/multidict/compare/v6.1.0...v6.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.1.0&new-version=6.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index db4712426a9..7b5bc1ea8bd 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.1.0 +multidict==6.2.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d4d993c66c3..5a836f3542d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -111,7 +111,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.2.0 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index b34cde941f8..fc290ab6688 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.12 # via -r requirements/cython.in -multidict==6.1.0 +multidict==6.2.0 # via -r requirements/multidict.in typing-extensions==4.12.2 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index 90f21ec5529..caaae25cdbb 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.2.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index e9f433aa07d..be4d86595fc 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.1.0 +multidict==6.2.0 # via -r requirements/multidict.in typing-extensions==4.12.2 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index c3863f8e5e3..1b515fc1178 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.5.0 # aiosignal idna==3.4 # via yarl -multidict==6.1.0 +multidict==6.2.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 6d053ed54cb..e6f0277889e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.2.0 # via # -r requirements/runtime-deps.in # yarl From 0c7a9c73f23bcbe09f709b0447254f5ca7a0631f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 11:22:54 +0000 Subject: [PATCH 1276/1511] Bump blockbuster from 1.5.23 to 1.5.24 (#10594) Bumps [blockbuster](https://github.com/cbornet/blockbuster) from 1.5.23 to 1.5.24. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/cbornet/blockbuster/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=blockbuster&package-manager=pip&previous-version=1.5.23&new-version=1.5.24)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5a836f3542d..68de2c8a86d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,7 +26,7 @@ attrs==25.3.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -blockbuster==1.5.23 +blockbuster==1.5.24 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index caaae25cdbb..15458d10b65 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ attrs==25.3.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -blockbuster==1.5.23 +blockbuster==1.5.24 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 964750637c3..e1562ae4221 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey -blockbuster==1.5.23 +blockbuster==1.5.24 # via -r requirements/lint.in cffi==1.17.1 # via diff --git a/requirements/test.txt b/requirements/test.txt index e6f0277889e..8962714e994 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.3.0 # via -r requirements/runtime-deps.in -blockbuster==1.5.23 +blockbuster==1.5.24 # via -r requirements/test.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 84bb351974f1ba8e35b4c430b418f9d4e4c1d2e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 11:12:13 +0000 Subject: [PATCH 1277/1511] Bump pre-commit from 4.1.0 to 4.2.0 (#10598) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 4.1.0 to 4.2.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pre-commit/pre-commit/releases">pre-commit's releases</a>.</em></p> <blockquote> <h2>pre-commit v4.2.0</h2> <h3>Features</h3> <ul> <li>For <code>language: python</code> first attempt a versioned python executable for the default language version before consulting a potentially unversioned <code>sys.executable</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3430">#3430</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h3>Fixes</h3> <ul> <li>Handle error during conflict detection when a file is named "HEAD" <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3425">#3425</a> PR by <a href="https://github.com/tusharsadhwani"><code>@​tusharsadhwani</code></a>.</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md">pre-commit's changelog</a>.</em></p> <blockquote> <h1>4.2.0 - 2025-03-18</h1> <h3>Features</h3> <ul> <li>For <code>language: python</code> first attempt a versioned python executable for the default language version before consulting a potentially unversioned <code>sys.executable</code>. <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3430">#3430</a> PR by <a href="https://github.com/asottile"><code>@​asottile</code></a>.</li> </ul> </li> </ul> <h3>Fixes</h3> <ul> <li>Handle error during conflict detection when a file is named "HEAD" <ul> <li><a href="https://redirect.github.com/pre-commit/pre-commit/issues/3425">#3425</a> PR by <a href="https://github.com/tusharsadhwani"><code>@​tusharsadhwani</code></a>.</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/pre-commit/commit/aa48766b888990e7b118d12cf757109d96e65a7e"><code>aa48766</code></a> v4.2.0</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/bf6f11dc6ce59f2f12e5d02a6449ea2449aa64c4"><code>bf6f11d</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3430">#3430</a> from pre-commit/preferential-sys-impl</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/3e8d0f5e1c449381272b80241140e985631f9912"><code>3e8d0f5</code></a> adjust python default_language_version to prefer versioned exe</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/ff7256cedf8c78b326f4503373d142a5a9827e90"><code>ff7256c</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3425">#3425</a> from tusharsadhwani/ambiguous-ref</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/b7eb412c798424a94ca83c72eed6f97271545dc4"><code>b7eb412</code></a> fix: crash on ambiguous ref 'HEAD'</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/7b88c63ae691cb243c3137bce8fb870523e0a884"><code>7b88c63</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3404">#3404</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/94b97e28f7cc7d9bcb536d7a3cf7ef6311e076fd"><code>94b97e2</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/2f93b804849e9237561417fbca29cb8d8ea4c905"><code>2f93b80</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3401">#3401</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/4f90a1e88a80dd460f36e21d774d06bf0e73921b"><code>4f90a1e</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pre-commit/pre-commit/commit/aba1ce04e70162ca48b12f809ceffb253b788fe6"><code>aba1ce0</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/pre-commit/issues/3396">#3396</a> from pre-commit/all-repos_autofix_all-repos-sed</li> <li>Additional commits viewable in <a href="https://github.com/pre-commit/pre-commit/compare/v4.1.0...v4.2.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pre-commit&package-manager=pip&previous-version=4.1.0&new-version=4.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 68de2c8a86d..7e746233c35 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -136,7 +136,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==4.1.0 +pre-commit==4.2.0 # via -r requirements/lint.in propcache==0.3.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 15458d10b65..dbc2cfce58e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -133,7 +133,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==4.1.0 +pre-commit==4.2.0 # via -r requirements/lint.in propcache==0.3.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index e1562ae4221..40f1aa23193 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -55,7 +55,7 @@ platformdirs==4.3.6 # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==4.1.0 +pre-commit==4.2.0 # via -r requirements/lint.in pycares==4.5.0 # via aiodns From 71d903d5934378c001b9b3cdf3484b93785556ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 10:34:32 +0000 Subject: [PATCH 1278/1511] Bump actions/cache from 4.2.2 to 4.2.3 (#10604) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.2.2 to 4.2.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/releases">actions/cache's releases</a>.</em></p> <blockquote> <h2>v4.2.3</h2> <h2>What's Changed</h2> <ul> <li>Update to use <code>@​actions/cache</code> 4.0.3 package & prepare for new release by <a href="https://github.com/salmanmkc"><code>@​salmanmkc</code></a> in <a href="https://redirect.github.com/actions/cache/pull/1577">actions/cache#1577</a> (SAS tokens for cache entries are now masked in debug logs)</li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/salmanmkc"><code>@​salmanmkc</code></a> made their first contribution in <a href="https://redirect.github.com/actions/cache/pull/1577">actions/cache#1577</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/actions/cache/compare/v4.2.2...v4.2.3">https://github.com/actions/cache/compare/v4.2.2...v4.2.3</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/actions/cache/blob/main/RELEASES.md">actions/cache's changelog</a>.</em></p> <blockquote> <h3>4.2.3</h3> <ul> <li>Bump <code>@actions/cache</code> to v4.0.3 (obfuscates SAS token in debug logs for cache entries)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/actions/cache/commit/5a3ec84eff668545956fd18022155c47e93e2684"><code>5a3ec84</code></a> Merge pull request <a href="https://redirect.github.com/actions/cache/issues/1577">#1577</a> from salmanmkc/salmanmkc/4-test</li> <li><a href="https://github.com/actions/cache/commit/7de21022a7b6824c106a9847befcbd8154b45b6a"><code>7de2102</code></a> Update releases.md</li> <li><a href="https://github.com/actions/cache/commit/76d40dd347779762a1c829bbeeda5da4d81ca8c1"><code>76d40dd</code></a> Update to use the latest version of the cache package to obfuscate the SAS</li> <li><a href="https://github.com/actions/cache/commit/76dd5eb692f606c28d4b7a4ea7cfdffc926ba06a"><code>76dd5eb</code></a> update cache with main</li> <li><a href="https://github.com/actions/cache/commit/8c80c27c5e4498d5675b05fb1eff96a56c593b06"><code>8c80c27</code></a> new package</li> <li><a href="https://github.com/actions/cache/commit/45cfd0e7fffd1869ea4d5bfb54a464d825c1f742"><code>45cfd0e</code></a> updates</li> <li><a href="https://github.com/actions/cache/commit/edd449b9cf39c2a20dc7c3d505ff6dc193c48a02"><code>edd449b</code></a> updated cache with latest changes</li> <li><a href="https://github.com/actions/cache/commit/0576707e373f92196b81695442ed3f80c347f9c7"><code>0576707</code></a> latest test before pr</li> <li><a href="https://github.com/actions/cache/commit/3105dc9754dd9cd935ffcf45c091ed2cadbf42b9"><code>3105dc9</code></a> update</li> <li><a href="https://github.com/actions/cache/commit/9450d42d15022999ad2fa60a8b91f01fc92a0563"><code>9450d42</code></a> mask</li> <li>Additional commits viewable in <a href="https://github.com/actions/cache/compare/v4.2.2...v4.2.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.2.2&new-version=4.2.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a61a67137c2..67c98e21878 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -163,7 +163,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.2.2 + uses: actions/cache@v4.2.3 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From e7a7188b21665d70975188c93510856fe1cfa466 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 11:25:11 +0000 Subject: [PATCH 1279/1511] Bump setuptools from 76.1.0 to 77.0.1 (#10607) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 76.1.0 to 77.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v77.0.1</h1> <h2>Bugfixes</h2> <ul> <li>Manually fix news fragment entries causing CI to crash when building docs. (<a href="https://redirect.github.com/pypa/setuptools/issues/4891">#4891</a>)</li> </ul> <h1>v77.0.0</h1> <h2>Features</h2> <ul> <li>Added initial support for license expression (PEP :pep:<code>639 <639#add-license-expression-field></code>). -- by :user:<code>cdce8p</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4706">#4706</a>)</li> <li>Store <code>License-File</code>\s in <code>.dist-info/licenses</code> subfolder and added support for recursive globs for <code>license_files</code> (PEP :pep:<code>639 <639#add-license-expression-field></code>). -- by :user:<code>cdce8p</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4728">#4728</a>)</li> <li>Bump core metadata version to <code>2.4</code>. -- by :user:<code>cdce8p</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4830">#4830</a>)</li> <li>Updated vendored copy of <code>wheel</code> to <code>v0.45.1</code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/4869">#4869</a>)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Added initial implementation of :pep:<code>639</code>. Users relying on pre- :pep:<code>639</code> implementation details (like precise license file paths inside <code>dist-info</code> directory) may need to adjust their code base to avoid problems. Deprecations and stronger validation were also introduced (<a href="https://redirect.github.com/pypa/setuptools/issues/4829">#4829</a>).</li> <li>Added exception (or warning) when deprecated license classifiers are used, according to PEP :pep:<code>639 <639#deprecate-license-classifiers></code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/4833">#4833</a>)</li> <li>Deprecated <code>tools.setuptools.license-files</code> in favor of <code>project.license-files</code> and added exception if <code>project.license-files</code> and <code>tools.setuptools.license-files</code> are used together. -- by :user:<code>cdce8p</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4837">#4837</a>)</li> <li>Deprecated <code>project.license</code> as a TOML table in <code>pyproject.toml</code>. Users are expected to move towards using <code>project.license-files</code> and/or SPDX expressions (as strings) in <code>pyproject.license</code>. See PEP :pep:<code>639 <639#deprecate-license-key-table-subkeys></code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/4840">#4840</a>)</li> <li>Added simple validation for given glob patterns in <code>license-files</code>: a warning will be generated if no file is matched. Invalid glob patterns can raise an exception. -- thanks :user:<code>cdce8p</code> for contributions. (<a href="https://redirect.github.com/pypa/setuptools/issues/4838">#4838</a>)</li> </ul> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4734">#4734</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/f57746186b1476fac7701490f3e8c23bd0eea491"><code>f577461</code></a> Bump version: 77.0.0 → 77.0.1</li> <li><a href="https://github.com/pypa/setuptools/commit/85677af8f6922bed5c2f2c6608749a4771c94e56"><code>85677af</code></a> Manually fix news fragment entries (<a href="https://redirect.github.com/pypa/setuptools/issues/4891">#4891</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/ce2e283acfef4d87eee63b5f745f81fc0904342c"><code>ce2e283</code></a> Change news fragment name to imply patch version bump</li> <li><a href="https://github.com/pypa/setuptools/commit/76531493d84c27461b73589e6a4e54c56d0cd6fb"><code>7653149</code></a> Add news fragments</li> <li><a href="https://github.com/pypa/setuptools/commit/7db26a1a5148ef57c8471a410f57258cad11b336"><code>7db26a1</code></a> Manually fix news fragment entries</li> <li><a href="https://github.com/pypa/setuptools/commit/5d58b454a5f720f9afa09b47fe15913d0bef8cc4"><code>5d58b45</code></a> Bump version: 76.1.0 → 77.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/f49d58939a249bf3571992e18a7c237eea27d5f7"><code>f49d589</code></a> Update URL in warning</li> <li><a href="https://github.com/pypa/setuptools/commit/dee0a5e7e41b0eedc2bd4f754098140044748482"><code>dee0a5e</code></a> Add news fragment for PEP 639 marking as 'breaking'</li> <li><a href="https://github.com/pypa/setuptools/commit/74725de239297dc54da770c612981845ad339b34"><code>74725de</code></a> Update vendored copy of wheel (<a href="https://redirect.github.com/pypa/setuptools/issues/4869">#4869</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/5585c1c1f603e04ad778fc72066fad45c7366233"><code>5585c1c</code></a> Add news fragment</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v76.1.0...v77.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=76.1.0&new-version=77.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7e746233c35..1499be4d611 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==76.1.0 +setuptools==77.0.1 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index dbc2cfce58e..cdf10d2f365 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==76.1.0 +setuptools==77.0.1 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9d69778d2d0..9f4b1f71c47 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==76.1.0 +setuptools==77.0.1 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index ff57762abf7..22882176b08 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==76.1.0 +setuptools==77.0.1 # via incremental From 4c5a82ca97b12fe962a836d77f2f248722663de8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 11:39:49 +0000 Subject: [PATCH 1280/1511] Bump iniconfig from 2.0.0 to 2.1.0 (#10609) Bumps [iniconfig](https://github.com/pytest-dev/iniconfig) from 2.0.0 to 2.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/iniconfig/blob/main/CHANGELOG">iniconfig's changelog</a>.</em></p> <blockquote> <h1>2.1.0</h1> <ul> <li>fix artifact building - pin minimal version of hatch</li> <li>drop eol python 3.8</li> <li>add python 3.12 and 3.13</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/iniconfig/commit/34793a6417e35e511d0c17ffdd8a02ade0f9568e"><code>34793a6</code></a> pre-commit</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/136435d0a4345bb048dca98e7c2f4e938d74521b"><code>136435d</code></a> update changelog</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/0bb99ad0f7a6b21d5eb24214a260132317a11e43"><code>0bb99ad</code></a> fix <a href="https://redirect.github.com/pytest-dev/iniconfig/issues/62">#62</a>: require a minimal hatch version with correct metadata</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/16793eaddac67de0b8d621ae4e42e05b927e8d67"><code>16793ea</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/iniconfig/issues/56">#56</a> from hugovk/add-3.12</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/3dc2b2d6bf03e4b7cc92949a8ef0983fdd7ee442"><code>3dc2b2d</code></a> Add support for Python 3.13</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/2eb8abfd9d3843100d7f138efe5bcd989a885024"><code>2eb8abf</code></a> Bump GitHub Actions</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/8c4bb5bf57dde613c8cdcf0e02a09b2fff630907"><code>8c4bb5b</code></a> Set python-version for pre-commit to remove CI warning</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/58b22b2d672564ff1925df51f4a8e07486765ed2"><code>58b22b2</code></a> Drop support for EOL Python 3.7</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/4a53042252a109bcdd64422058de91c5dffff763"><code>4a53042</code></a> Add support for Python 3.12</li> <li><a href="https://github.com/pytest-dev/iniconfig/commit/9cae43103df70bac6fde7b9f35ad11a9f1be0cb4"><code>9cae431</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/iniconfig/issues/54">#54</a> from webknjaz/patch-1</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/iniconfig/compare/v2.0.0...v2.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=iniconfig&package-manager=pip&previous-version=2.0.0&new-version=2.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1499be4d611..b0832a39ff4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -99,7 +99,7 @@ imagesize==1.4.1 # via sphinx incremental==24.7.2 # via towncrier -iniconfig==2.0.0 +iniconfig==2.1.0 # via pytest jinja2==3.1.6 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index cdf10d2f365..ebb0dbbca54 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -97,7 +97,7 @@ imagesize==1.4.1 # via sphinx incremental==24.7.2 # via towncrier -iniconfig==2.0.0 +iniconfig==2.1.0 # via pytest jinja2==3.1.6 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 40f1aa23193..c1bbc71c01f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -37,7 +37,7 @@ identify==2.6.9 # via pre-commit idna==3.7 # via trustme -iniconfig==2.0.0 +iniconfig==2.1.0 # via pytest markdown-it-py==3.0.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 8962714e994..6b3a91d4e72 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -51,7 +51,7 @@ idna==3.4 # via # trustme # yarl -iniconfig==2.0.0 +iniconfig==2.1.0 # via pytest markdown-it-py==3.0.0 # via rich From 57250118ddf8fe402c0f5279597ad516bf6fb3f7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 11:45:08 +0000 Subject: [PATCH 1281/1511] Bump platformdirs from 4.3.6 to 4.3.7 (#10610) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/tox-dev/platformdirs) from 4.3.6 to 4.3.7. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/platformdirs/releases">platformdirs's releases</a>.</em></p> <blockquote> <h2>4.3.7</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Chunk dependabot updates into a single PR by <a href="https://github.com/ofek"><code>@​ofek</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/311">tox-dev/platformdirs#311</a></li> <li>Drop support for EOL Python 3.8 by <a href="https://github.com/hugovk"><code>@​hugovk</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/330">tox-dev/platformdirs#330</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.6...4.3.7">https://github.com/tox-dev/platformdirs/compare/4.3.6...4.3.7</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/platformdirs/commit/00c695b99a2776e0f271bfcb43d15f908bbf361b"><code>00c695b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/342">#342</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/6a81b38d7926f8ba99ebf82c837034bdd8d27039"><code>6a81b38</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/341">#341</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/59d09e5ac0802a62636b7600c9ffbde9894e14db"><code>59d09e5</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/340">#340</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/263f12a29c87c3c6607a9a66964272fd5ead6b63"><code>263f12a</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/339">#339</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/11e9d9a1a60338e5abc21ea8fa5973e8e8200171"><code>11e9d9a</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/338">#338</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/330deb217bfc64fb2875f733514bebb6deb47448"><code>330deb2</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/336">#336</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/9bfd6e8d3bd0478759ffe3fd8723271c894e094b"><code>9bfd6e8</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/335">#335</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/6e544b3cee6baabd9756b14eb9ddc3a49e5b1f54"><code>6e544b3</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/333">#333</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/55f7c0f021d02072d460684f1c6191af0d537b07"><code>55f7c0f</code></a> Bump pypa/gh-action-pypi-publish from 1.12.3 to 1.12.4 in the all group (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/332">#332</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/d0ab5502c891d43c9864a7b1d1f885c24794bdfe"><code>d0ab550</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/326">#326</a>)</li> <li>Additional commits viewable in <a href="https://github.com/tox-dev/platformdirs/compare/4.3.6...4.3.7">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.3.6&new-version=4.3.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b0832a39ff4..66ed58c695b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -132,7 +132,7 @@ packaging==24.2 # sphinx pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.6 +platformdirs==4.3.7 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/dev.txt b/requirements/dev.txt index ebb0dbbca54..10332f99ed7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -129,7 +129,7 @@ packaging==24.2 # sphinx pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.6 +platformdirs==4.3.7 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/lint.txt b/requirements/lint.txt index c1bbc71c01f..c568abeb027 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -51,7 +51,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.2 # via pytest -platformdirs==4.3.6 +platformdirs==4.3.7 # via virtualenv pluggy==1.5.0 # via pytest From 7ae00b080644c4a6363994ee8e2dd37c064dfd63 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 14:00:37 +0000 Subject: [PATCH 1282/1511] [PR #10602/b6f34d4b backport][3.12] Replace "requests" to "aiohttp" in docs (#10613) **This is a backport of PR #10602 as merged into master (b6f34d4b27ffc45c138bdba428f6e1a5cf9367e4).** Co-authored-by: Hypercube <hypercube@0x01.me> --- docs/client_quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index f99339cf4a6..0e03f104e90 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -93,7 +93,7 @@ Passing Parameters In URLs You often want to send some sort of data in the URL's query string. If you were constructing the URL by hand, this data would be given as key/value pairs in the URL after a question mark, e.g. ``httpbin.org/get?key=val``. -Requests allows you to provide these arguments as a :class:`dict`, using the +aiohttp allows you to provide these arguments as a :class:`dict`, using the ``params`` keyword argument. As an example, if you wanted to pass ``key1=value1`` and ``key2=value2`` to ``httpbin.org/get``, you would use the following code:: From 7b79f0c02e91fd172aa0d75a3d8315f0717b5c2d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 14:00:51 +0000 Subject: [PATCH 1283/1511] [PR #10602/b6f34d4b backport][3.11] Replace "requests" to "aiohttp" in docs (#10612) **This is a backport of PR #10602 as merged into master (b6f34d4b27ffc45c138bdba428f6e1a5cf9367e4).** Co-authored-by: Hypercube <hypercube@0x01.me> --- docs/client_quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index f99339cf4a6..0e03f104e90 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -93,7 +93,7 @@ Passing Parameters In URLs You often want to send some sort of data in the URL's query string. If you were constructing the URL by hand, this data would be given as key/value pairs in the URL after a question mark, e.g. ``httpbin.org/get?key=val``. -Requests allows you to provide these arguments as a :class:`dict`, using the +aiohttp allows you to provide these arguments as a :class:`dict`, using the ``params`` keyword argument. As an example, if you wanted to pass ``key1=value1`` and ``key2=value2`` to ``httpbin.org/get``, you would use the following code:: From 5e20fe12c8d9990c253fa43231b2e70481c1ecc2 Mon Sep 17 00:00:00 2001 From: Dmitry Marakasov <474217+AMDmi3@users.noreply.github.com> Date: Fri, 21 Mar 2025 00:34:16 +0300 Subject: [PATCH 1284/1511] [PR #10597/01b2a86a backport][3.12] Skip test_autobahn if python_on_whales is missing (#10614) (cherry picked from commit 01b2a86a6f9e768b24ca3c6f880defe8b895af65) --- tests/autobahn/test_autobahn.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/autobahn/test_autobahn.py b/tests/autobahn/test_autobahn.py index 651183d5f92..0b70aacd471 100644 --- a/tests/autobahn/test_autobahn.py +++ b/tests/autobahn/test_autobahn.py @@ -2,12 +2,16 @@ import subprocess import sys from pathlib import Path -from typing import Any, Dict, Generator, List +from typing import TYPE_CHECKING, Any, Dict, Generator, List import pytest -import python_on_whales from pytest import TempPathFactory +if TYPE_CHECKING: + import python_on_whales +else: + python_on_whales = pytest.importorskip("python_on_whales") + @pytest.fixture(scope="session") def report_dir(tmp_path_factory: TempPathFactory) -> Path: From 8a5efd4cd873172ba375d5a25523983b310cf487 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 10:50:51 +0000 Subject: [PATCH 1285/1511] Bump setuptools from 77.0.1 to 77.0.3 (#10615) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 77.0.1 to 77.0.3. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v77.0.3</h1> <h2>Bugfixes</h2> <ul> <li>Temporarily convert error for license glob patterns containing <code>../</code> into a deprecation warning to allow an accomodation period. (<a href="https://redirect.github.com/pypa/setuptools/issues/4896">#4896</a>)</li> <li>Better error messages for <code>packaging.licenses</code> import errors in environments with <code>packaging<24.2</code>. The import statement was also deferred to spare users that are not using license expressions. (<a href="https://redirect.github.com/pypa/setuptools/issues/4898">#4898</a>)</li> <li>Avoided eagerly raising an exception when <code>license-files</code> is defined simultaneously inside and outside of <code>pyproject.toml</code>. Instead we rely on the existing deprecation error. (<a href="https://redirect.github.com/pypa/setuptools/issues/4899">#4899</a>)</li> </ul> <h1>v77.0.2</h1> <h2>Bugfixes</h2> <ul> <li>Restore <code>distutils.ccompiler._default_compilers</code> -- by :user:<code>ManiacDC</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4876">#4876</a>)</li> <li>Fixed copy pasta in <code>msvc.shared_lib_format</code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/4885">#4885</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/7c859e017368360ba66c8cc591279d8964c031bc"><code>7c859e0</code></a> Bump version: 77.0.2 → 77.0.3</li> <li><a href="https://github.com/pypa/setuptools/commit/f9b0e50eaeb46eebdd936f0f2f982c4c62ba5866"><code>f9b0e50</code></a> Convert error for ../ in license paths into deprecation warning (<a href="https://redirect.github.com/pypa/setuptools/issues/4896">#4896</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/5fe9c32c51839ba22c3d0ad5e4e03922aad35766"><code>5fe9c32</code></a> Defer import error for packaging.licenses in environments with packaging<24.2...</li> <li><a href="https://github.com/pypa/setuptools/commit/44303b6cf469fdec5d268efe712f543e874f9677"><code>44303b6</code></a> Avoid raising exception when license-files is defined outside of `pyproject.t...</li> <li><a href="https://github.com/pypa/setuptools/commit/676362d62a9e7b2bb57e0332ec9b4b0f8539a727"><code>676362d</code></a> Refactor fallback for packaging.licenses</li> <li><a href="https://github.com/pypa/setuptools/commit/53fc322820862c83687d5fee9a56c6a14c8e8d84"><code>53fc322</code></a> Silence typechecking in complicated statement</li> <li><a href="https://github.com/pypa/setuptools/commit/64612bf280b97e51b9014a7c55a393b686227518"><code>64612bf</code></a> Remove duplicated dependency</li> <li><a href="https://github.com/pypa/setuptools/commit/627a869d19828c17390e7c6c0a7bdd43922723a5"><code>627a869</code></a> Attempt to avoid typing error</li> <li><a href="https://github.com/pypa/setuptools/commit/4622b5aaf7f607c61de40cdfdca769e48b32f731"><code>4622b5a</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/f123312f6cb27f9813491349323ed276b0bc167c"><code>f123312</code></a> Defer import error for packaging.licenses in environments with packagin<24.2</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v77.0.1...v77.0.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=77.0.1&new-version=77.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 66ed58c695b..4fb176ea480 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==77.0.1 +setuptools==77.0.3 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 10332f99ed7..3fcf744cb32 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==77.0.1 +setuptools==77.0.3 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9f4b1f71c47..30494820213 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==77.0.1 +setuptools==77.0.3 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 22882176b08..8caaa0eac3f 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==77.0.1 +setuptools==77.0.3 # via incremental From 28429bda7b3d3e478aa0441e63801aea28311947 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Mar 2025 11:26:26 +0000 Subject: [PATCH 1286/1511] Bump coverage from 7.7.0 to 7.7.1 (#10618) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.7.0 to 7.7.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.7.1 — 2025-03-21</h2> <ul> <li>A few small tweaks to the sys.monitoring support for Python 3.14. Please test!</li> </ul> <p>.. _changes_7-7-0:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/5e0fd514aa9d49d39afc9b1e57008c20c6c45663"><code>5e0fd51</code></a> docs: sample HTML for 7.7.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9b82965ff218ea13a63386b585814bda67e542ef"><code>9b82965</code></a> docs: prep for 7.7.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/1be53a8083146ae80471e72486dbe7da6f1c98ff"><code>1be53a8</code></a> docs: add clarification about missing line numbers in the text report</li> <li><a href="https://github.com/nedbat/coveragepy/commit/87bc26bc1f0148ba9ff746d8b82584ffecdc67f8"><code>87bc26b</code></a> refactor: use f-strings more</li> <li><a href="https://github.com/nedbat/coveragepy/commit/7ea1535f7ed5abc1aea9a65bbd557b8f20b2346f"><code>7ea1535</code></a> refactor: remove some needless checks</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f503dc5285692c6073de6299898738ec2b2d6006"><code>f503dc5</code></a> perf: collect more stats in sysmon</li> <li><a href="https://github.com/nedbat/coveragepy/commit/97e68a0cf89751b1dff74106ce555ce4022c907c"><code>97e68a0</code></a> build: bump version to 7.7.1</li> <li>See full diff in <a href="https://github.com/nedbat/coveragepy/compare/7.7.0...7.7.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.7.0&new-version=7.7.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4fb176ea480..3b3392e7fbe 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.7.0 +coverage==7.7.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 3fcf744cb32..7e49b8cff57 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.7.0 +coverage==7.7.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 6b3a91d4e72..f08b2a1568a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.7.0 +coverage==7.7.1 # via # -r requirements/test.in # pytest-cov From 67a265bc5dcd2e4c2b8b125bf10a1f13ee7a3580 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Mar 2025 11:07:21 +0000 Subject: [PATCH 1287/1511] Bump pypa/cibuildwheel from 2.23.1 to 2.23.2 (#10622) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.23.1 to 2.23.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.23.2</h2> <ul> <li>🐛 Workaround an issue with pyodide builds when running cibuildwheel with a Python that was installed via UV (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2328">#2328</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2331">#2331</a>)</li> <li>🛠 Dependency updates, including a manylinux update that fixes an <a href="https://redirect.github.com/pypa/manylinux/issues/1760">'undefined symbol' error</a> in gcc-toolset (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2334">#2334</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.23.2</h3> <p><em>24 March 2025</em></p> <ul> <li>🐛 Workaround an issue with pyodide builds when running cibuildwheel with a Python that was installed via UV (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2328">#2328</a> via <a href="https://redirect.github.com/pypa/cibuildwheel/issues/2331">#2331</a>)</li> <li>🛠 Dependency updates, including a manylinux update that fixes an <a href="https://redirect.github.com/pypa/manylinux/issues/1760">'undefined symbol' error</a> in gcc-toolset (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2334">#2334</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/d04cacbc9866d432033b1d09142936e6a0e2121a"><code>d04cacb</code></a> Bump version: v2.23.2</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/5f4e019684661085adb6558969c7fd389a532174"><code>5f4e019</code></a> [2.x] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2334">#2334</a>)</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/2efa648f38e83a421aae82bc80002f8cabf92be7"><code>2efa648</code></a> fix: always resolve --python argument (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2328">#2328</a>) (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2331">#2331</a>)</li> <li>See full diff in <a href="https://github.com/pypa/cibuildwheel/compare/v2.23.1...v2.23.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.23.1&new-version=2.23.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 67c98e21878..b00051b8668 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -414,7 +414,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.23.1 + uses: pypa/cibuildwheel@v2.23.2 env: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 From 9ad688ebfe547ed4549a5bfa9fe03d8f99718b6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Mar 2025 11:27:04 +0000 Subject: [PATCH 1288/1511] Bump setuptools from 77.0.3 to 78.0.2 (#10623) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 77.0.3 to 78.0.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v78.0.2</h1> <h2>Bugfixes</h2> <ul> <li>Postponed removals of deprecated dash-separated and uppercase fields in <code>setup.cfg</code>. All packages with deprecated configurations are advised to move before 2026. (<a href="https://redirect.github.com/pypa/setuptools/issues/4911">#4911</a>)</li> </ul> <h1>v78.0.1</h1> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4909">#4909</a></li> </ul> <h1>v78.0.0</h1> <h2>Bugfixes</h2> <ul> <li>Reverted distutils changes that broke the monkey patching of command classes. (<a href="https://redirect.github.com/pypa/setuptools/issues/4902">#4902</a>)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li> <p>Setuptools no longer accepts options containing uppercase or dash characters in <code>setup.cfg</code>. Please ensure to write the options in <code>setup.cfg</code> using the :wiki:<code>lower_snake_case <Snake_case></code> convention (e.g. <code>Name => name</code>, <code>install-requires => install_requires</code>). This is a follow-up on deprecations introduced in <code>v54.1.0 <https://setuptools.pypa.io/en/latest/history.html#v54-1-0></code>_ (see <a href="https://redirect.github.com/pypa/setuptools/issues/1608">#1608</a>) and <code>v54.1.1 <https://setuptools.pypa.io/en/latest/history.html#v54-1-1></code>_ (see <a href="https://redirect.github.com/pypa/setuptools/issues/2592">#2592</a>).</p> <p>.. note:: This change <em>does not affect configurations in</em> <code>pyproject.toml</code> (which uses the :wiki:<code>lower-kebab-case <Letter_case#Kebab_case></code> convention following the precedent set in :pep:<code>517</code>/:pep:<code>518</code>). (<a href="https://redirect.github.com/pypa/setuptools/issues/4870">#4870</a>)</p> </li> </ul> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4904">#4904</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/3c88de1c62420c1e0161f48e34af6424ac009aa5"><code>3c88de1</code></a> Bump version: 78.0.1 → 78.0.2</li> <li><a href="https://github.com/pypa/setuptools/commit/a219451f7354816fe479cf2b0fd5bc47de0a57ee"><code>a219451</code></a> Revert removals introduced in <code>v78.0.0</code> (<a href="https://redirect.github.com/pypa/setuptools/issues/4911">#4911</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/d4326dd6896caa87dbf928610fa160a536b69323"><code>d4326dd</code></a> Allow PyPy to fail on CI.</li> <li><a href="https://github.com/pypa/setuptools/commit/92e22dd5765b54e1ce7c9f25c419371e666b15dc"><code>92e22dd</code></a> Manually remove file to try to stop flaky tests on PyPy</li> <li><a href="https://github.com/pypa/setuptools/commit/caa48ab040420be2885e9d2a14ce6615bd17f992"><code>caa48ab</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/4e9b8caec323aba5b2b3764ef97018a8b1596a4b"><code>4e9b8ca</code></a> Revert removals introduced in v78.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/5450f57f1cefa44c961fb50fc18e9826c95a5d59"><code>5450f57</code></a> Bump version: 78.0.0 → 78.0.1</li> <li><a href="https://github.com/pypa/setuptools/commit/f3d0b10f20356a63d03de2043322a54c84ff7ee1"><code>f3d0b10</code></a> Temporarily remove <code>requests</code> from integration tests (<a href="https://redirect.github.com/pypa/setuptools/issues/4909">#4909</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/54b4c79bf40099bac3f5005352215acdbcd12e17"><code>54b4c79</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/e771c64c613f148b9b46f45acf00955093dce4c2"><code>e771c64</code></a> Temporarily remove 'requests' from integration tests</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v77.0.3...v78.0.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=77.0.3&new-version=78.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3b3392e7fbe..72c0cf6b280 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==77.0.3 +setuptools==78.0.2 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 7e49b8cff57..d11e8ac175f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==77.0.3 +setuptools==78.0.2 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 30494820213..448e36e6fb9 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==77.0.3 +setuptools==78.0.2 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 8caaa0eac3f..4554d2a1b68 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==77.0.3 +setuptools==78.0.2 # via incremental From b672096f2812821d50ae1fb6db457b3e6363c4ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 10:46:40 +0000 Subject: [PATCH 1289/1511] Bump propcache from 0.3.0 to 0.3.1 (#10627) Bumps [propcache](https://github.com/aio-libs/propcache) from 0.3.0 to 0.3.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/propcache/releases">propcache's releases</a>.</em></p> <blockquote> <h2>0.3.1</h2> <h2>Bug fixes</h2> <ul> <li> <p>Improved typing annotations, fixing some type errors under correct usage and improving typing robustness generally -- by :user:<code>Dreamsorcerer</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/propcache/issues/103">#103</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/propcache/blob/master/CHANGES.rst">propcache's changelog</a>.</em></p> <blockquote> <h1>0.3.1</h1> <p><em>(2025-03-25)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Improved typing annotations, fixing some type errors under correct usage and improving typing robustness generally -- by :user:<code>Dreamsorcerer</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>103</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/propcache/commit/79088e6c9c5c1b5eae2be83dfe0cfc2315f8cc8e"><code>79088e6</code></a> Fix release</li> <li><a href="https://github.com/aio-libs/propcache/commit/f9d877d0a98dea0ec58250e9ab4c1f8970d88a67"><code>f9d877d</code></a> Release v0.3.1 (<a href="https://redirect.github.com/aio-libs/propcache/issues/108">#108</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/b884edab8d353c86d050e0cb6f08e527109379e1"><code>b884eda</code></a> Bump pypa/cibuildwheel from 2.23.1 to 2.23.2 (<a href="https://redirect.github.com/aio-libs/propcache/issues/107">#107</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/4fdda24c62f15f45fe622fc9eaa273f6eebbffbe"><code>4fdda24</code></a> Allow TypedDict for cache implementation (<a href="https://redirect.github.com/aio-libs/propcache/issues/105">#105</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/f89c519abe942f98e558e16e752f0baa62870b94"><code>f89c519</code></a> Bump pre-commit from 4.1.0 to 4.2.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/106">#106</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/7c65ce0b1e6b0cab9488e801f3cad9059ee325cc"><code>7c65ce0</code></a> Bump pypa/cibuildwheel from 2.23.0 to 2.23.1 (<a href="https://redirect.github.com/aio-libs/propcache/issues/104">#104</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/92b4baf3510da6a7a86eae6809dceecb8fec8d0d"><code>92b4baf</code></a> Bump pypa/cibuildwheel from 2.22.0 to 2.23.0 (<a href="https://redirect.github.com/aio-libs/propcache/issues/102">#102</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/317e5a4b0372e223654761adac66d5bd3de384e7"><code>317e5a4</code></a> Bump pytest from 8.3.4 to 8.3.5 (<a href="https://redirect.github.com/aio-libs/propcache/issues/101">#101</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/c5d43c7edf13b8db89189b55308ad3c7677b434d"><code>c5d43c7</code></a> Bump sphinx from 8.2.1 to 8.2.3 (<a href="https://redirect.github.com/aio-libs/propcache/issues/100">#100</a>)</li> <li><a href="https://github.com/aio-libs/propcache/commit/eb64b6577e4d4a46762b86703f2f707eb7660102"><code>eb64b65</code></a> Strict typing (<a href="https://redirect.github.com/aio-libs/propcache/issues/103">#103</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/propcache/compare/v0.3.0...v0.3.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=propcache&package-manager=pip&previous-version=0.3.0&new-version=0.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7b5bc1ea8bd..6c8b21f5aa8 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,7 +32,7 @@ multidict==6.2.0 # yarl packaging==24.2 # via gunicorn -propcache==0.3.0 +propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 72c0cf6b280..70903ae315c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -138,7 +138,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in -propcache==0.3.0 +propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/dev.txt b/requirements/dev.txt index d11e8ac175f..d38446b4604 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -135,7 +135,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in -propcache==0.3.0 +propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 1b515fc1178..0575278acab 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -28,7 +28,7 @@ multidict==6.2.0 # via # -r requirements/runtime-deps.in # yarl -propcache==0.3.0 +propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index f08b2a1568a..0eafec9cdad 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,7 +71,7 @@ packaging==24.2 # pytest pluggy==1.5.0 # via pytest -propcache==0.3.0 +propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl From 3dba55ea21c527204d25bd8ec4f69523b4c09bde Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 10:56:57 +0000 Subject: [PATCH 1290/1511] Bump setuptools from 78.0.2 to 78.1.0 (#10628) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 78.0.2 to 78.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v78.1.0</h1> <h2>Features</h2> <ul> <li>Restore access to _get_vc_env with a warning. (<a href="https://redirect.github.com/pypa/setuptools/issues/4874">#4874</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/6ead555c5fb29bc57fe6105b1bffc163f56fd558"><code>6ead555</code></a> Bump version: 78.0.2 → 78.1.0</li> <li><a href="https://github.com/pypa/setuptools/commit/d37cd9ede4c0b529f4239d0c1e26c2a5996131c4"><code>d37cd9e</code></a> Merge <a href="https://github.com/pypa/distutils">https://github.com/pypa/distutils</a></li> <li><a href="https://github.com/pypa/setuptools/commit/efa2eb231c82f6630468ad358cfe4b65a013b690"><code>efa2eb2</code></a> Restore access to _get_vc_env with a warning.</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v78.0.2...v78.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=78.0.2&new-version=78.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 70903ae315c..4c18d0e8071 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -288,7 +288,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==78.0.2 +setuptools==78.1.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index d38446b4604..66a2b01a266 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ yarl==1.18.3 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==78.0.2 +setuptools==78.1.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 448e36e6fb9..dfd7f09765d 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==78.0.2 +setuptools==78.1.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 4554d2a1b68..15356c89a9e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.3.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==78.0.2 +setuptools==78.1.0 # via incremental From b2965de1ad18fc114591d30c482f1d59891b1f64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Mar 2025 11:10:39 +0000 Subject: [PATCH 1291/1511] Bump python-on-whales from 0.75.1 to 0.76.1 (#10632) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.75.1 to 0.76.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/gabrieldemarmiesse/python-on-whales/releases">python-on-whales's releases</a>.</em></p> <blockquote> <h2>v0.76.1</h2> <h2>What's Changed</h2> <ul> <li>Fixed secrets and added tests by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/673">gabrieldemarmiesse/python-on-whales#673</a></li> <li>Add support for multi-nodes on builders by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/674">gabrieldemarmiesse/python-on-whales#674</a></li> <li>Fix secrets specs labels being capitalized by <a href="https://github.com/gabrieldemarmiesse"><code>@​gabrieldemarmiesse</code></a> in <a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/pull/675">gabrieldemarmiesse/python-on-whales#675</a></li> </ul> <h3>Breaking changes:</h3> <p>Some breaking changes were introduced in this version. Those are rarely used features but it might impact you.</p> <p><code>python_on_whales.Builder</code>:</p> <ul> <li><code>my_builder.status</code> -> <code>my_builder.nodes[0].status</code></li> <li><code>my_builder.platforms</code> -> <code>my_builder.nodes[0].platforms</code></li> </ul> <p><code>python_on_whales.Secret</code>:</p> <ul> <li><code>my_secret.spec["name"]</code> -> <code>my_secret.spec.name</code></li> <li><code>my_secret.spec["labels"]</code> -> <code>my_secret.spec.labels</code></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.75.1...v0.76.1">https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.75.1...v0.76.1</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/de653e6591bb7a306eedd94d659ce373b8b8727d"><code>de653e6</code></a> Bump version to 0.76.1</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/0e0dbafc0e6dba9faec179b054e922e451cd013c"><code>0e0dbaf</code></a> Fix secrets specs labels being capitalized (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/675">#675</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/38370abea3c054d1c17fb89b6769e2e3bffba959"><code>38370ab</code></a> Bump version to 0.76.0</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/9e0d8baa2be9f5cfa082cbd7b7c7ae076ffa7b96"><code>9e0d8ba</code></a> Add support for multi-nodes on builders (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/674">#674</a>)</li> <li><a href="https://github.com/gabrieldemarmiesse/python-on-whales/commit/3e95100df068a0ad97dce67a9a0b0d2c510d818a"><code>3e95100</code></a> Fixed secrets and added tests (<a href="https://redirect.github.com/gabrieldemarmiesse/python-on-whales/issues/673">#673</a>)</li> <li>See full diff in <a href="https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.75.1...v0.76.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.75.1&new-version=0.76.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4c18d0e8071..33d561d5063 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.75.1 +python-on-whales==0.76.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 66a2b01a266..87ed47811ed 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -183,7 +183,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.75.1 +python-on-whales==0.76.1 # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index c568abeb027..40fde1ab340 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -78,7 +78,7 @@ pytest-mock==3.14.0 # via -r requirements/lint.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.75.1 +python-on-whales==0.76.1 # via -r requirements/lint.in pyyaml==6.0.2 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 0eafec9cdad..cef97799aee 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -104,7 +104,7 @@ pytest-xdist==3.6.1 # via -r requirements/test.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.75.1 +python-on-whales==0.76.1 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From fca089740872c3ac6894491553dfdc0b60c7b533 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 21:39:53 +0000 Subject: [PATCH 1292/1511] [PR #10625/4599b87f backport][3.11] Improve performance of serializing headers (#10635) **This is a backport of PR #10625 as merged into master (4599b87f44569079942542c99c46779ca6e8bef7).** Improve performance of serializing headers by moving the check for `\r` and `\n` into the write loop instead of making a separate call to check each disallowed character in the Python string. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10625.misc.rst | 1 + aiohttp/_http_writer.pyx | 38 ++++++++++++++++++-------------------- tests/test_http_writer.py | 29 ++++++++++++++++++++++++++++- 3 files changed, 47 insertions(+), 21 deletions(-) create mode 100644 CHANGES/10625.misc.rst diff --git a/CHANGES/10625.misc.rst b/CHANGES/10625.misc.rst new file mode 100644 index 00000000000..30cd7f0f3a6 --- /dev/null +++ b/CHANGES/10625.misc.rst @@ -0,0 +1 @@ +Improved performance of serializing headers -- by :user:`bdraco`. diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index 287371334f8..4a3ae1f9e68 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -97,27 +97,34 @@ cdef inline int _write_str(Writer* writer, str s): return -1 -# --------------- _serialize_headers ---------------------- - -cdef str to_str(object s): +cdef inline int _write_str_raise_on_nlcr(Writer* writer, object s): + cdef Py_UCS4 ch + cdef str out_str if type(s) is str: - return <str>s + out_str = <str>s elif type(s) is _istr: - return PyObject_Str(s) + out_str = PyObject_Str(s) elif not isinstance(s, str): raise TypeError("Cannot serialize non-str key {!r}".format(s)) else: - return str(s) + out_str = str(s) + + for ch in out_str: + if ch == 0x0D or ch == 0x0A: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + if _write_utf8(writer, ch) < 0: + return -1 +# --------------- _serialize_headers ---------------------- def _serialize_headers(str status_line, headers): cdef Writer writer cdef object key cdef object val - cdef bytes ret - cdef str key_str - cdef str val_str _init_writer(&writer) @@ -130,22 +137,13 @@ def _serialize_headers(str status_line, headers): raise for key, val in headers.items(): - key_str = to_str(key) - val_str = to_str(val) - - if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str: - raise ValueError( - "Newline or carriage return character detected in HTTP status message or " - "header. This is a potential security issue." - ) - - if _write_str(&writer, key_str) < 0: + if _write_str_raise_on_nlcr(&writer, key) < 0: raise if _write_byte(&writer, b':') < 0: raise if _write_byte(&writer, b' ') < 0: raise - if _write_str(&writer, val_str) < 0: + if _write_str_raise_on_nlcr(&writer, val) < 0: raise if _write_byte(&writer, b'\r') < 0: raise diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index c39fe3c7251..420816b3137 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -8,8 +8,9 @@ import pytest from multidict import CIMultiDict -from aiohttp import ClientConnectionResetError, http +from aiohttp import ClientConnectionResetError, hdrs, http from aiohttp.base_protocol import BaseProtocol +from aiohttp.http_writer import _serialize_headers from aiohttp.test_utils import make_mocked_coro @@ -534,3 +535,29 @@ async def test_set_eof_after_write_headers( msg.set_eof() await msg.write_eof() assert not transport.write.called + + +@pytest.mark.parametrize( + "char", + [ + "\n", + "\r", + ], +) +def test_serialize_headers_raises_on_new_line_or_carriage_return(char: str) -> None: + """Verify serialize_headers raises on cr or nl in the headers.""" + status_line = "HTTP/1.1 200 OK" + headers = CIMultiDict( + { + hdrs.CONTENT_TYPE: f"text/plain{char}", + } + ) + + with pytest.raises( + ValueError, + match=( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ), + ): + _serialize_headers(status_line, headers) From 64efce269d6c7b04d9a23a4ed775cb168f56f8e5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 21:58:18 +0000 Subject: [PATCH 1293/1511] [PR #10568/8ac48306 backport][3.12] Docs fixups following implement socket factory (#10534) (#10637) **This is a backport of PR #10568 as merged into master (8ac483068ea24f6a709b3ead51ec87e3660a3b24).** Co-authored-by: Tim Menninger <tmenninger22@gmail.com> --- CHANGES/10474.feature.rst | 1 + docs/client_advanced.rst | 4 ++-- docs/client_reference.rst | 18 +++++++----------- docs/conf.py | 3 ++- 4 files changed, 12 insertions(+), 14 deletions(-) create mode 120000 CHANGES/10474.feature.rst diff --git a/CHANGES/10474.feature.rst b/CHANGES/10474.feature.rst new file mode 120000 index 00000000000..7c4f9a7b83b --- /dev/null +++ b/CHANGES/10474.feature.rst @@ -0,0 +1 @@ +10520.feature.rst \ No newline at end of file diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 1116e0bdc45..39cd259dc9e 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -465,8 +465,8 @@ Custom socket creation ^^^^^^^^^^^^^^^^^^^^^^ If the default socket is insufficient for your use case, pass an optional -`socket_factory` to the :class:`~aiohttp.TCPConnector`, which implements -`SocketFactoryType`. This will be used to create all sockets for the +``socket_factory`` to the :class:`~aiohttp.TCPConnector`, which implements +:class:`SocketFactoryType`. This will be used to create all sockets for the lifetime of the class object. For example, we may want to change the conditions under which we consider a connection dead. The following would make all sockets respect 9*7200 = 18 hours:: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 42b45e589ff..a99db06764b 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1138,9 +1138,7 @@ is controlled by *force_close* constructor's parameter). overridden in subclasses. -.. autodata:: AddrInfoType - -.. note:: +.. py:class:: AddrInfoType Refer to :py:data:`aiohappyeyeballs.AddrInfoType` for more info. @@ -1148,13 +1146,11 @@ is controlled by *force_close* constructor's parameter). Be sure to use ``aiohttp.AddrInfoType`` rather than ``aiohappyeyeballs.AddrInfoType`` to avoid import breakage, as - it is likely to be removed from ``aiohappyeyeballs`` in the + it is likely to be removed from :mod:`aiohappyeyeballs` in the future. -.. autodata:: SocketFactoryType - -.. note:: +.. py:class:: SocketFactoryType Refer to :py:data:`aiohappyeyeballs.SocketFactoryType` for more info. @@ -1162,7 +1158,7 @@ is controlled by *force_close* constructor's parameter). Be sure to use ``aiohttp.SocketFactoryType`` rather than ``aiohappyeyeballs.SocketFactoryType`` to avoid import breakage, - as it is likely to be removed from ``aiohappyeyeballs`` in the + as it is likely to be removed from :mod:`aiohappyeyeballs` in the future. @@ -1294,9 +1290,9 @@ is controlled by *force_close* constructor's parameter). .. versionadded:: 3.10 - :param :py:data:``SocketFactoryType`` socket_factory: This function takes an - :py:data:``AddrInfoType`` and is used in lieu of ``socket.socket()`` when - creating TCP connections. + :param SocketFactoryType socket_factory: This function takes an + :py:data:`AddrInfoType` and is used in lieu of + :py:func:`socket.socket` when creating TCP connections. .. versionadded:: 3.12 diff --git a/docs/conf.py b/docs/conf.py index dcab6acf247..595f02efb89 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -84,7 +84,7 @@ "aiohttpsession": ("https://aiohttp-session.readthedocs.io/en/stable/", None), "aiohttpdemos": ("https://aiohttp-demos.readthedocs.io/en/latest/", None), "aiojobs": ("https://aiojobs.readthedocs.io/en/stable/", None), - "aiohappyeyeballs": ("https://aiohappyeyeballs.readthedocs.io/en/stable/", None), + "aiohappyeyeballs": ("https://aiohappyeyeballs.readthedocs.io/en/latest/", None), } # Add any paths that contain templates here, relative to this directory. @@ -420,6 +420,7 @@ ("py:class", "aiohttp.web.MatchedSubAppResource"), # undocumented ("py:attr", "body"), # undocumented ("py:class", "socket.socket"), # undocumented + ("py:func", "socket.socket"), # undocumented ("py:class", "socket.AddressFamily"), # undocumented ("py:obj", "logging.DEBUG"), # undocumented ("py:class", "aiohttp.abc.AbstractAsyncAccessLogger"), # undocumented From 8d54f1f1fe513a72a6c11300416ee4f14ddd0b22 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 22:11:33 +0000 Subject: [PATCH 1294/1511] [PR #10625/4599b87f backport][3.12] Improve performance of serializing headers (#10636) **This is a backport of PR #10625 as merged into master (4599b87f44569079942542c99c46779ca6e8bef7).** Improve performance of serializing headers by moving the check for `\r` and `\n` into the write loop instead of making a separate call to check each disallowed character in the Python string. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10625.misc.rst | 1 + aiohttp/_http_writer.pyx | 38 ++++++++++++++++++-------------------- tests/test_http_writer.py | 29 ++++++++++++++++++++++++++++- 3 files changed, 47 insertions(+), 21 deletions(-) create mode 100644 CHANGES/10625.misc.rst diff --git a/CHANGES/10625.misc.rst b/CHANGES/10625.misc.rst new file mode 100644 index 00000000000..30cd7f0f3a6 --- /dev/null +++ b/CHANGES/10625.misc.rst @@ -0,0 +1 @@ +Improved performance of serializing headers -- by :user:`bdraco`. diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx index 287371334f8..4a3ae1f9e68 100644 --- a/aiohttp/_http_writer.pyx +++ b/aiohttp/_http_writer.pyx @@ -97,27 +97,34 @@ cdef inline int _write_str(Writer* writer, str s): return -1 -# --------------- _serialize_headers ---------------------- - -cdef str to_str(object s): +cdef inline int _write_str_raise_on_nlcr(Writer* writer, object s): + cdef Py_UCS4 ch + cdef str out_str if type(s) is str: - return <str>s + out_str = <str>s elif type(s) is _istr: - return PyObject_Str(s) + out_str = PyObject_Str(s) elif not isinstance(s, str): raise TypeError("Cannot serialize non-str key {!r}".format(s)) else: - return str(s) + out_str = str(s) + + for ch in out_str: + if ch == 0x0D or ch == 0x0A: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + if _write_utf8(writer, ch) < 0: + return -1 +# --------------- _serialize_headers ---------------------- def _serialize_headers(str status_line, headers): cdef Writer writer cdef object key cdef object val - cdef bytes ret - cdef str key_str - cdef str val_str _init_writer(&writer) @@ -130,22 +137,13 @@ def _serialize_headers(str status_line, headers): raise for key, val in headers.items(): - key_str = to_str(key) - val_str = to_str(val) - - if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str: - raise ValueError( - "Newline or carriage return character detected in HTTP status message or " - "header. This is a potential security issue." - ) - - if _write_str(&writer, key_str) < 0: + if _write_str_raise_on_nlcr(&writer, key) < 0: raise if _write_byte(&writer, b':') < 0: raise if _write_byte(&writer, b' ') < 0: raise - if _write_str(&writer, val_str) < 0: + if _write_str_raise_on_nlcr(&writer, val) < 0: raise if _write_byte(&writer, b'\r') < 0: raise diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index c39fe3c7251..420816b3137 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -8,8 +8,9 @@ import pytest from multidict import CIMultiDict -from aiohttp import ClientConnectionResetError, http +from aiohttp import ClientConnectionResetError, hdrs, http from aiohttp.base_protocol import BaseProtocol +from aiohttp.http_writer import _serialize_headers from aiohttp.test_utils import make_mocked_coro @@ -534,3 +535,29 @@ async def test_set_eof_after_write_headers( msg.set_eof() await msg.write_eof() assert not transport.write.called + + +@pytest.mark.parametrize( + "char", + [ + "\n", + "\r", + ], +) +def test_serialize_headers_raises_on_new_line_or_carriage_return(char: str) -> None: + """Verify serialize_headers raises on cr or nl in the headers.""" + status_line = "HTTP/1.1 200 OK" + headers = CIMultiDict( + { + hdrs.CONTENT_TYPE: f"text/plain{char}", + } + ) + + with pytest.raises( + ValueError, + match=( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ), + ): + _serialize_headers(status_line, headers) From 9d4c09a2e866839466895657c5df2c55e17298fc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 22:16:09 +0000 Subject: [PATCH 1295/1511] [PR #10601/f7cac7e6 backport][3.12] Reduce WebSocket buffer slicing overhead (#10640) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10601 as merged into master (f7cac7e63f18691e4261af353e84f9073b16624a).** <!-- Thank you for your contribution! --> ## What do these changes do? Use a `const unsigned char *` for the buffer (Cython will automatically extract is using `__Pyx_PyBytes_AsUString`) as its a lot faster than copying around `PyBytes` objects. We do need to be careful that all slices are bounded and we bound check everything to make sure we do not do an out of bounds read since Cython does not bounds check C strings. I checked that all accesses to `buf_cstr` are proceeded by a bounds check but it would be good to get another set of eyes on that to verify in the `self._state == READ_PAYLOAD` block that we will never try to read out of bounds. <img width="376" alt="Screenshot 2025-03-19 at 10 21 54 AM" src="https://github.com/user-attachments/assets/a340ffa2-f09b-4aff-a4f7-c487dae186c8" /> ## Are there changes in behavior for the user? performance improvement ## Is it a substantial burden for the maintainers to support this? no There is a small risk that someone could remove a bounds check in the future and create a memory safety issue, however in this case its likely we would already be trying to read data that wasn't there if we are missing the bounds checking so the pure python version would throw if we are testing properly. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10601.misc.rst | 1 + aiohttp/_websocket/reader_c.pxd | 1 + aiohttp/_websocket/reader_py.py | 20 +++++++++++--------- 3 files changed, 13 insertions(+), 9 deletions(-) create mode 100644 CHANGES/10601.misc.rst diff --git a/CHANGES/10601.misc.rst b/CHANGES/10601.misc.rst new file mode 100644 index 00000000000..c0d21082724 --- /dev/null +++ b/CHANGES/10601.misc.rst @@ -0,0 +1 @@ +Improved performance of WebSocket buffer handling -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 461e658e116..f156a7ff704 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -93,6 +93,7 @@ cdef class WebSocketReader: chunk_size="unsigned int", chunk_len="unsigned int", buf_length="unsigned int", + buf_cstr="const unsigned char *", first_byte="unsigned char", second_byte="unsigned char", end_pos="unsigned int", diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 1645b3949b1..b2689e86614 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -328,14 +328,15 @@ def parse_frame( start_pos: int = 0 buf_length = len(buf) + buf_cstr = buf while True: # read header if self._state == READ_HEADER: if buf_length - start_pos < 2: break - first_byte = buf[start_pos] - second_byte = buf[start_pos + 1] + first_byte = buf_cstr[start_pos] + second_byte = buf_cstr[start_pos + 1] start_pos += 2 fin = (first_byte >> 7) & 1 @@ -400,14 +401,14 @@ def parse_frame( if length_flag == 126: if buf_length - start_pos < 2: break - first_byte = buf[start_pos] - second_byte = buf[start_pos + 1] + first_byte = buf_cstr[start_pos] + second_byte = buf_cstr[start_pos + 1] start_pos += 2 self._payload_length = first_byte << 8 | second_byte elif length_flag > 126: if buf_length - start_pos < 8: break - data = buf[start_pos : start_pos + 8] + data = buf_cstr[start_pos : start_pos + 8] start_pos += 8 self._payload_length = UNPACK_LEN3(data)[0] else: @@ -419,7 +420,7 @@ def parse_frame( if self._state == READ_PAYLOAD_MASK: if buf_length - start_pos < 4: break - self._frame_mask = buf[start_pos : start_pos + 4] + self._frame_mask = buf_cstr[start_pos : start_pos + 4] start_pos += 4 self._state = READ_PAYLOAD @@ -435,10 +436,10 @@ def parse_frame( if self._frame_payload_len: if type(self._frame_payload) is not bytearray: self._frame_payload = bytearray(self._frame_payload) - self._frame_payload += buf[start_pos:end_pos] + self._frame_payload += buf_cstr[start_pos:end_pos] else: # Fast path for the first frame - self._frame_payload = buf[start_pos:end_pos] + self._frame_payload = buf_cstr[start_pos:end_pos] self._frame_payload_len += end_pos - start_pos start_pos = end_pos @@ -464,6 +465,7 @@ def parse_frame( self._frame_payload_len = 0 self._state = READ_HEADER - self._tail = buf[start_pos:] if start_pos < buf_length else b"" + # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. + self._tail = buf_cstr[start_pos:buf_length] if start_pos < buf_length else b"" return frames From 34cb977400c826845fc27b14eb3ff54165c026d7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 22:49:06 +0000 Subject: [PATCH 1296/1511] [PR #10638/caa5792a backport][3.11] Convert format calls to f-strings in WebSocket reader (#10642) **This is a backport of PR #10638 as merged into master (caa5792a55e6a380cbb27d907d7d09e8785b7312).** Small code cleanup Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/_websocket/reader_py.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 1645b3949b1..d0708696708 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -194,9 +194,8 @@ def _feed_data(self, data: bytes) -> None: if self._max_msg_size and len(self._partial) >= self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), + f"Message size {len(self._partial)} " + f"exceeds limit {self._max_msg_size}", ) continue @@ -215,7 +214,7 @@ def _feed_data(self, data: bytes) -> None: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), + f"to be zero, got {opcode!r}", ) assembled_payload: Union[bytes, bytearray] @@ -228,9 +227,8 @@ def _feed_data(self, data: bytes) -> None: if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(assembled_payload), self._max_msg_size - ), + f"Message size {len(assembled_payload)} " + f"exceeds limit {self._max_msg_size}", ) # Decompress process must to be done after all packets @@ -247,9 +245,8 @@ def _feed_data(self, data: bytes) -> None: left = len(self._decompressobj.unconsumed_tail) raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), + f"Decompressed message size {self._max_msg_size + left}" + f" exceeds limit {self._max_msg_size}", ) elif type(assembled_payload) is bytes: payload_merged = assembled_payload From 12925c6b0c1063721cd425d7bad42cec480e3a3c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 22:52:32 +0000 Subject: [PATCH 1297/1511] [PR #10638/caa5792a backport][3.12] Convert format calls to f-strings in WebSocket reader (#10643) **This is a backport of PR #10638 as merged into master (caa5792a55e6a380cbb27d907d7d09e8785b7312).** Small code cleanup Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/_websocket/reader_py.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index b2689e86614..92ad47a52f0 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -194,9 +194,8 @@ def _feed_data(self, data: bytes) -> None: if self._max_msg_size and len(self._partial) >= self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), + f"Message size {len(self._partial)} " + f"exceeds limit {self._max_msg_size}", ) continue @@ -215,7 +214,7 @@ def _feed_data(self, data: bytes) -> None: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), + f"to be zero, got {opcode!r}", ) assembled_payload: Union[bytes, bytearray] @@ -228,9 +227,8 @@ def _feed_data(self, data: bytes) -> None: if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(assembled_payload), self._max_msg_size - ), + f"Message size {len(assembled_payload)} " + f"exceeds limit {self._max_msg_size}", ) # Decompress process must to be done after all packets @@ -247,9 +245,8 @@ def _feed_data(self, data: bytes) -> None: left = len(self._decompressobj.unconsumed_tail) raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), + f"Decompressed message size {self._max_msg_size + left}" + f" exceeds limit {self._max_msg_size}", ) elif type(assembled_payload) is bytes: payload_merged = assembled_payload From bc813e66238daae2302dcf98d479e8f70f01a79b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 23:01:42 +0000 Subject: [PATCH 1298/1511] [PR #10601/f7cac7e6 backport][3.11] Reduce WebSocket buffer slicing overhead (#10639) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #10601 as merged into master (f7cac7e63f18691e4261af353e84f9073b16624a).** <!-- Thank you for your contribution! --> ## What do these changes do? Use a `const unsigned char *` for the buffer (Cython will automatically extract is using `__Pyx_PyBytes_AsUString`) as its a lot faster than copying around `PyBytes` objects. We do need to be careful that all slices are bounded and we bound check everything to make sure we do not do an out of bounds read since Cython does not bounds check C strings. I checked that all accesses to `buf_cstr` are proceeded by a bounds check but it would be good to get another set of eyes on that to verify in the `self._state == READ_PAYLOAD` block that we will never try to read out of bounds. <img width="376" alt="Screenshot 2025-03-19 at 10 21 54 AM" src="https://github.com/user-attachments/assets/a340ffa2-f09b-4aff-a4f7-c487dae186c8" /> ## Are there changes in behavior for the user? performance improvement ## Is it a substantial burden for the maintainers to support this? no There is a small risk that someone could remove a bounds check in the future and create a memory safety issue, however in this case its likely we would already be trying to read data that wasn't there if we are missing the bounds checking so the pure python version would throw if we are testing properly. Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10601.misc.rst | 1 + aiohttp/_websocket/reader_c.pxd | 1 + aiohttp/_websocket/reader_py.py | 20 +++++++++++--------- 3 files changed, 13 insertions(+), 9 deletions(-) create mode 100644 CHANGES/10601.misc.rst diff --git a/CHANGES/10601.misc.rst b/CHANGES/10601.misc.rst new file mode 100644 index 00000000000..c0d21082724 --- /dev/null +++ b/CHANGES/10601.misc.rst @@ -0,0 +1 @@ +Improved performance of WebSocket buffer handling -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 461e658e116..f156a7ff704 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -93,6 +93,7 @@ cdef class WebSocketReader: chunk_size="unsigned int", chunk_len="unsigned int", buf_length="unsigned int", + buf_cstr="const unsigned char *", first_byte="unsigned char", second_byte="unsigned char", end_pos="unsigned int", diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index d0708696708..92ad47a52f0 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -325,14 +325,15 @@ def parse_frame( start_pos: int = 0 buf_length = len(buf) + buf_cstr = buf while True: # read header if self._state == READ_HEADER: if buf_length - start_pos < 2: break - first_byte = buf[start_pos] - second_byte = buf[start_pos + 1] + first_byte = buf_cstr[start_pos] + second_byte = buf_cstr[start_pos + 1] start_pos += 2 fin = (first_byte >> 7) & 1 @@ -397,14 +398,14 @@ def parse_frame( if length_flag == 126: if buf_length - start_pos < 2: break - first_byte = buf[start_pos] - second_byte = buf[start_pos + 1] + first_byte = buf_cstr[start_pos] + second_byte = buf_cstr[start_pos + 1] start_pos += 2 self._payload_length = first_byte << 8 | second_byte elif length_flag > 126: if buf_length - start_pos < 8: break - data = buf[start_pos : start_pos + 8] + data = buf_cstr[start_pos : start_pos + 8] start_pos += 8 self._payload_length = UNPACK_LEN3(data)[0] else: @@ -416,7 +417,7 @@ def parse_frame( if self._state == READ_PAYLOAD_MASK: if buf_length - start_pos < 4: break - self._frame_mask = buf[start_pos : start_pos + 4] + self._frame_mask = buf_cstr[start_pos : start_pos + 4] start_pos += 4 self._state = READ_PAYLOAD @@ -432,10 +433,10 @@ def parse_frame( if self._frame_payload_len: if type(self._frame_payload) is not bytearray: self._frame_payload = bytearray(self._frame_payload) - self._frame_payload += buf[start_pos:end_pos] + self._frame_payload += buf_cstr[start_pos:end_pos] else: # Fast path for the first frame - self._frame_payload = buf[start_pos:end_pos] + self._frame_payload = buf_cstr[start_pos:end_pos] self._frame_payload_len += end_pos - start_pos start_pos = end_pos @@ -461,6 +462,7 @@ def parse_frame( self._frame_payload_len = 0 self._state = READ_HEADER - self._tail = buf[start_pos:] if start_pos < buf_length else b"" + # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. + self._tail = buf_cstr[start_pos:buf_length] if start_pos < buf_length else b"" return frames From b93993d8b7ded348ccd042300872fca6bfb02321 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 02:29:06 +0000 Subject: [PATCH 1299/1511] [PR #10644/8c4e60b0 backport][3.11] Add benchmarks for streaming API (#10645) **This is a backport of PR #10644 as merged into master (8c4e60b04a8edea54d6dcbfbd201aa3204ea6c13).** <!-- Thank you for your contribution! --> ## What do these changes do? Add benchmarks for streaming API. I get asked all the time if someone should use `iter_chunked`, `iter_any`, or `iter_chunks`. Its nice to be able to point people at benchmarks, especially ones they can alter with their numbers and run locally. Unsurprisingly `memcpy` is where the performance issues are so avoiding it as much as possible will give better performance. In almost all cases `iter_chunks` is going to give the best performance if you can trust the chunk sizes aren't going to be too large. ## Are there changes in behavior for the user? no ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 128 ++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index ae89bc1f667..aa3536be820 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -346,3 +346,131 @@ async def run_client_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_any( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_any.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_any(): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_chunked_4096( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_chunked 4096.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size, 4096 iter_chunked + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunked(4096): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_chunked_65536( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_chunked 65536.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size, 64 KiB iter_chunked + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunked(65536): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_chunks( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_chunks.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunks(): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From 91d3d76d96ba79740c09b7b09591edf7ea60b5eb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 02:36:22 +0000 Subject: [PATCH 1300/1511] [PR #10644/8c4e60b0 backport][3.12] Add benchmarks for streaming API (#10646) **This is a backport of PR #10644 as merged into master (8c4e60b04a8edea54d6dcbfbd201aa3204ea6c13).** <!-- Thank you for your contribution! --> ## What do these changes do? Add benchmarks for streaming API. I get asked all the time if someone should use `iter_chunked`, `iter_any`, or `iter_chunks`. Its nice to be able to point people at benchmarks, especially ones they can alter with their numbers and run locally. Unsurprisingly `memcpy` is where the performance issues are so avoiding it as much as possible will give better performance. In almost all cases `iter_chunks` is going to give the best performance if you can trust the chunk sizes aren't going to be too large. ## Are there changes in behavior for the user? no ## Is it a substantial burden for the maintainers to support this? no Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 128 ++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index ae89bc1f667..aa3536be820 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -346,3 +346,131 @@ async def run_client_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_any( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_any.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_any(): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_chunked_4096( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_chunked 4096.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size, 4096 iter_chunked + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunked(4096): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_chunked_65536( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_chunked 65536.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size, 64 KiB iter_chunked + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunked(65536): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_ten_streamed_responses_iter_chunks( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 10 streamed responses using iter_chunks.""" + message_count = 10 + data = b"x" * 65536 # 64 KiB chunk size + + async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + await resp.prepare(request) + for _ in range(10): + await resp.write(data) + return resp + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client = await aiohttp_client(app) + for _ in range(message_count): + resp = await client.get("/") + async for _ in resp.content.iter_chunks(): + pass + await client.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) From e93214704fab8184477e72d25f5c9336629f2c67 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 11:20:12 +0000 Subject: [PATCH 1301/1511] Bump coverage from 7.7.1 to 7.8.0 (#10651) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.7.1 to 7.8.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.8.0 — 2025-03-30</h2> <ul> <li> <p>Added a new <code>source_dirs</code> setting for symmetry with the existing <code>source_pkgs</code> setting. It's preferable to the existing <code>source</code> setting, because you'll get a clear error when directories don't exist. Fixes <code>issue 1942</code><em>. Thanks, <code>Jeremy Fleischman <pull 1943_></code></em>.</p> </li> <li> <p>Fix: the PYTHONSAFEPATH environment variable new in Python 3.11 is properly supported, closing <code>issue 1696</code><em>. Thanks, <code>Philipp A. <pull 1700_></code></em>. This works properly except for a detail when using the <code>coverage</code> command on Windows. There you can use <code>python -m coverage</code> instead if you need exact emulation.</p> </li> </ul> <p>.. _issue 1696: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1696">nedbat/coveragepy#1696</a> .. _pull 1700: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1700">nedbat/coveragepy#1700</a> .. _issue 1942: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1942">nedbat/coveragepy#1942</a> .. _pull 1943: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1943">nedbat/coveragepy#1943</a></p> <p>.. _changes_7-7-1:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/6d5ced933f116d6ced5497ffbe7616db05b63e12"><code>6d5ced9</code></a> docs: sample HTML for 7.8.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/49c194fbb225039f3c2c029faecbc187aba37a9c"><code>49c194f</code></a> docs: prep for 7.8.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/38782cb5e481e24e139bd6cf08ec06e0438be4cd"><code>38782cb</code></a> docs: finish up source_dirs. bump to 7.8.0</li> <li><a href="https://github.com/nedbat/coveragepy/commit/7aea2f311eb073a74b0efb26065933f8572b1a2a"><code>7aea2f3</code></a> feat: add new <code>source_dirs</code> option (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1943">#1943</a>)</li> <li><a href="https://github.com/nedbat/coveragepy/commit/f464155a3e43b4640c2ead9fb06674f33f61858a"><code>f464155</code></a> test: some simple bytecode tests</li> <li><a href="https://github.com/nedbat/coveragepy/commit/cf1dec0f05aaf581e9e6f7c707c7fa77ba77ade9"><code>cf1dec0</code></a> refactor: these pypy modules are available in all our versions</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a87605265039b46570ae617f06941cfdbb95cba6"><code>a876052</code></a> test: a general helper for iterating over our own source files</li> <li><a href="https://github.com/nedbat/coveragepy/commit/82cff3e34836ff7248f4fb2e348c5f954e82b78e"><code>82cff3e</code></a> perf: sets are better than lists</li> <li><a href="https://github.com/nedbat/coveragepy/commit/a66bd61be0a01874dacf4238c1de5ef67ef325fe"><code>a66bd61</code></a> refactor: move bytecode code into bytecode.py</li> <li><a href="https://github.com/nedbat/coveragepy/commit/d64ce5f95473ec2c24485bb0261c536f55d0cb4a"><code>d64ce5f</code></a> chore: bump the action-dependencies group with 3 updates (<a href="https://redirect.github.com/nedbat/coveragepy/issues/1940">#1940</a>)</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.7.1...7.8.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.7.1&new-version=7.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 33d561d5063..91b57adb86b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.7.1 +coverage==7.8.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 87ed47811ed..64782d4714d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.7.1 +coverage==7.8.0 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index cef97799aee..73c30f6e728 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.7.1 +coverage==7.8.0 # via # -r requirements/test.in # pytest-cov From d042abdcda4b57fb61f3e824daa32fe257f0718f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 21:38:16 +0000 Subject: [PATCH 1302/1511] Bump rich from 13.9.4 to 14.0.0 (#10652) Bumps [rich](https://github.com/Textualize/rich) from 13.9.4 to 14.0.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/releases">rich's releases</a>.</em></p> <blockquote> <h2>The ENVy of all other releases</h2> <p>Mostly updates to Traceback rendering, to add support for features introduced in Python3.11</p> <p>We also have a new env var that I am proposing to become a standard. <code>TTY_COMPATIBLE=1</code> tells Rich to write ansi-escape sequences even if it detects it is not writing to a terminal. This is intended for use with GitHub Actions / CI, which can interpret escape sequences, but aren't a terminal.</p> <p>There is also a change to how NO_COLOR and FORCE_COLOR are interpreted, which is the reason for the major version bump.</p> <h2>[14.0.0] - 2025-03-30</h2> <h3>Added</h3> <ul> <li>Added env var <code>TTY_COMPATIBLE</code> to override auto-detection of TTY support (See console.rst for details). <a href="https://redirect.github.com/Textualize/rich/pull/3675">Textualize/rich#3675</a></li> </ul> <h3>Changed</h3> <ul> <li>An empty <code>NO_COLOR</code> env var is now considered disabled. <a href="https://redirect.github.com/Textualize/rich/pull/3675">Textualize/rich#3675</a></li> <li>An empty <code>FORCE_COLOR</code> env var is now considered disabled. <a href="https://redirect.github.com/Textualize/rich/pull/3675">Textualize/rich#3675</a></li> <li>Rich tracebacks will now render notes on Python 3.11 onwards (added with <code>Exception.add_note</code>) <a href="https://redirect.github.com/Textualize/rich/pull/3676">Textualize/rich#3676</a></li> <li>Indentation in exceptions won't be underlined <a href="https://redirect.github.com/Textualize/rich/pull/3678">Textualize/rich#3678</a></li> <li>Rich tracebacks will now render Exception Groups <a href="https://redirect.github.com/Textualize/rich/pull/3677">Textualize/rich#3677</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/Textualize/rich/blob/master/CHANGELOG.md">rich's changelog</a>.</em></p> <blockquote> <h2>[14.0.0] - 2025-03-30</h2> <h3>Added</h3> <ul> <li>Added env var <code>TTY_COMPATIBLE</code> to override auto-detection of TTY support (See console.rst for details). <a href="https://redirect.github.com/Textualize/rich/pull/3675">Textualize/rich#3675</a></li> </ul> <h3>Changed</h3> <ul> <li>An empty <code>NO_COLOR</code> env var is now considered disabled. <a href="https://redirect.github.com/Textualize/rich/pull/3675">Textualize/rich#3675</a></li> <li>An empty <code>FORCE_COLOR</code> env var is now considered disabled. <a href="https://redirect.github.com/Textualize/rich/pull/3675">Textualize/rich#3675</a></li> <li>Rich tracebacks will now render notes on Python 3.11 onwards (added with <code>Exception.add_note</code>) <a href="https://redirect.github.com/Textualize/rich/pull/3676">Textualize/rich#3676</a></li> <li>Indentation in exceptions won't be underlined <a href="https://redirect.github.com/Textualize/rich/pull/3678">Textualize/rich#3678</a></li> <li>Rich tracebacks will now render Exception Groups <a href="https://redirect.github.com/Textualize/rich/pull/3677">Textualize/rich#3677</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/Textualize/rich/commit/72e3bb33d44fd96881f7742b77137983907a942f"><code>72e3bb3</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3681">#3681</a> from Textualize/bump14.0.0</li> <li><a href="https://github.com/Textualize/rich/commit/859d77bd6fbe67a72ddeacc300c85deed3b26598"><code>859d77b</code></a> bump to 13.0.0</li> <li><a href="https://github.com/Textualize/rich/commit/2bae2fe3d861ef83d778c289f6e1e17733225b76"><code>2bae2fe</code></a> Update feature_request.md</li> <li><a href="https://github.com/Textualize/rich/commit/07b738a23005076c75ecaeae1202e3a7772905b6"><code>07b738a</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3610">#3610</a> from kotfu/master</li> <li><a href="https://github.com/Textualize/rich/commit/e6673492e52a2290d765438c33547df0a8b3e290"><code>e667349</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3624">#3624</a> from itamaro/patch-1</li> <li><a href="https://github.com/Textualize/rich/commit/a48a5b309f9273ffba85fb9491b974634c8f7ab9"><code>a48a5b3</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3677">#3677</a> from Textualize/exception-groups</li> <li><a href="https://github.com/Textualize/rich/commit/4de139ef0ec2581a6e1fe045949a86c183b734ac"><code>4de139e</code></a> Merge pull request <a href="https://redirect.github.com/Textualize/rich/issues/3679">#3679</a> from bcapener/remove-leftover-code</li> <li><a href="https://github.com/Textualize/rich/commit/8f68c848bf458a1bf371c073d1e366da574eccde"><code>8f68c84</code></a> changelog</li> <li><a href="https://github.com/Textualize/rich/commit/ec5d2f1589c4c20e0f51f600f7a67f196948bcd4"><code>ec5d2f1</code></a> Merge branch 'master' into exception-groups</li> <li><a href="https://github.com/Textualize/rich/commit/13f9b4f874985cc2aeafc7954251597acdaa8544"><code>13f9b4f</code></a> style tweak</li> <li>Additional commits viewable in <a href="https://github.com/Textualize/rich/compare/v13.9.4...v14.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=13.9.4&new-version=14.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 91b57adb86b..42f2ae68211 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -203,7 +203,7 @@ requests==2.32.3 # cherry-picker # sphinx # sphinxcontrib-spelling -rich==13.9.4 +rich==14.0.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 64782d4714d..b4fd71ff1f1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -197,7 +197,7 @@ requests==2.32.3 # via # cherry-picker # sphinx -rich==13.9.4 +rich==14.0.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 40fde1ab340..8ac5bce2df6 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -82,7 +82,7 @@ python-on-whales==0.76.1 # via -r requirements/lint.in pyyaml==6.0.2 # via pre-commit -rich==13.9.4 +rich==14.0.0 # via pytest-codspeed six==1.17.0 # via python-dateutil diff --git a/requirements/test.txt b/requirements/test.txt index 73c30f6e728..e43b57e9bc1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -110,7 +110,7 @@ re-assert==1.1.0 # via -r requirements/test.in regex==2024.11.6 # via re-assert -rich==13.9.4 +rich==14.0.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test.in From 7c3c536c8224e591c4e02b26374b09da264db1ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 21:46:16 +0000 Subject: [PATCH 1303/1511] Bump pydantic from 2.10.6 to 2.11.1 (#10650) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.10.6 to 2.11.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.11.1 2025-03-28</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Do not override <code>'definitions-ref'</code> schemas containing serialization schemas or metadata by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11644">pydantic/pydantic#11644</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.11.0...v2.11.1">https://github.com/pydantic/pydantic/compare/v2.11.0...v2.11.1</a></p> <h2>v2.11.0 2025-03-27</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <h3>Packaging</h3> <ul> <li>Re-enable memray related tests on Python 3.12+ by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11191">pydantic/pydantic#11191</a></li> <li>Bump astral-sh/setup-uv from 4 to 5 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11205">pydantic/pydantic#11205</a></li> <li>Add a <code>check_pydantic_core_version()</code> function by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11324">pydantic/pydantic#11324</a></li> <li>Remove <code>greenlet</code> development dependency by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11351">pydantic/pydantic#11351</a></li> <li>Bump ruff from 0.9.2 to 0.9.5 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11407">pydantic/pydantic#11407</a></li> <li>Improve release automation process by <a href="https://github.com/austinyu"><code>@​austinyu</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11427">pydantic/pydantic#11427</a></li> <li>Bump dawidd6/action-download-artifact from 8 to 9 by <a href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11513">pydantic/pydantic#11513</a></li> <li>Bump <code>pydantic-core</code> to v2.32.0 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11567">pydantic/pydantic#11567</a></li> </ul> <h3>New Features</h3> <ul> <li>Support unsubstituted type variables with both a default and a bound or constraints by <a href="https://github.com/FyZzyss"><code>@​FyZzyss</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10789">pydantic/pydantic#10789</a></li> <li>Add a <code>default_factory_takes_validated_data</code> property to <code>FieldInfo</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11034">pydantic/pydantic#11034</a></li> <li>Raise a better error when a generic alias is used inside <code>type[]</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11088">pydantic/pydantic#11088</a></li> <li>Properly support PEP 695 generics syntax by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11189">pydantic/pydantic#11189</a></li> <li>Properly support type variable defaults by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11332">pydantic/pydantic#11332</a></li> <li>Add support for validating v6, v7, v8 UUIDs by <a href="https://github.com/astei"><code>@​astei</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11436">pydantic/pydantic#11436</a></li> <li>Improve alias configuration APIs by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11468">pydantic/pydantic#11468</a></li> <li>Add experimental support for free threading by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11516">pydantic/pydantic#11516</a></li> <li>Add <code>encoded_string()</code> method to the URL types by <a href="https://github.com/YassinNouh21"><code>@​YassinNouh21</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11580">pydantic/pydantic#11580</a></li> <li>Add support for <code>defer_build</code> with <code>@validate_call</code> decorator by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11584">pydantic/pydantic#11584</a></li> <li>Allow <code>@with_config</code> decorator to be used with keyword arguments by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11608">pydantic/pydantic#11608</a></li> <li>Simplify customization of default value inclusion in JSON Schema generation by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11634">pydantic/pydantic#11634</a></li> <li>Add <code>generate_arguments_schema()</code> function by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11572">pydantic/pydantic#11572</a></li> </ul> <h3>Changes</h3> <ul> <li>Rework <code>create_model</code> field definitions format by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11032">pydantic/pydantic#11032</a></li> <li>Raise a deprecation warning when a field is annotated as final with a default value by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11168">pydantic/pydantic#11168</a></li> <li>Deprecate accessing <code>model_fields</code> and <code>model_computed_fields</code> on instances by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11169">pydantic/pydantic#11169</a></li> <li>Move core schema generation logic for path types inside the <code>GenerateSchema</code> class by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10846">pydantic/pydantic#10846</a></li> <li>Move <code>Mapping</code> schema gen to <code>GenerateSchema</code> to complete removal of <code>prepare_annotations_for_known_type</code> workaround by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11247">pydantic/pydantic#11247</a></li> <li>Remove Python 3.8 Support by <a href="https://github.com/sydney-runkle"><code>@​sydney-runkle</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11258">pydantic/pydantic#11258</a></li> <li>Optimize calls to <code>get_type_ref</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/10863">pydantic/pydantic#10863</a></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.11.1 (2025-03-28)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.1">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Do not override <code>'definitions-ref'</code> schemas containing serialization schemas or metadata by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11644">#11644</a></li> </ul> <h2>v2.11.0 (2025-03-27)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.0">GitHub release</a></p> <h3>What's Changed</h3> <p>Pydantic v2.11 is a version strongly focused on build time performance of Pydantic models (and core schema generation in general). See the <a href="https://pydantic.dev/articles/pydantic-v2-11-release">blog post</a> for more details.</p> <h4>Packaging</h4> <ul> <li>Bump <code>pydantic-core</code> to v2.33.0 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11631">#11631</a></li> </ul> <h4>New Features</h4> <ul> <li>Add <code>encoded_string()</code> method to the URL types by <a href="https://github.com/YassinNouh21"><code>@​YassinNouh21</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11580">#11580</a></li> <li>Add support for <code>defer_build</code> with <code>@validate_call</code> decorator by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11584">#11584</a></li> <li>Allow <code>@with_config</code> decorator to be used with keyword arguments by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11608">#11608</a></li> <li>Simplify customization of default value inclusion in JSON Schema generation by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11634">#11634</a></li> <li>Add <code>generate_arguments_schema()</code> function by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11572">#11572</a></li> </ul> <h4>Fixes</h4> <ul> <li>Allow generic typed dictionaries to be used for unpacked variadic keyword parameters by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11571">#11571</a></li> <li>Fix runtime error when computing model string representation involving cached properties and self-referenced models by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11579">#11579</a></li> <li>Preserve other steps when using the ellipsis in the pipeline API by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11626">#11626</a></li> <li>Fix deferred discriminator application logic by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11591">#11591</a></li> </ul> <h3>New Contributors</h3> <ul> <li><a href="https://github.com/cmenon12"><code>@​cmenon12</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11562">#11562</a></li> <li><a href="https://github.com/Jeukoh"><code>@​Jeukoh</code></a> made their first contribution in <a href="https://redirect.github.com/pydantic/pydantic/pull/11611">#11611</a></li> </ul> <h2>v2.11.0b2 (2025-03-17)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.0b2">GitHub release</a></p> <h3>What's Changed</h3> <h4>Packaging</h4> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/6c38dc93f40a47f4d1350adca9ec0d72502e223f"><code>6c38dc9</code></a> Prepare release v2.11.1 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11648">#11648</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/1dcddac2c5e1ac2361fc897f804f44338a1d8067"><code>1dcddac</code></a> Do not override <code>'definitions-ref'</code> schemas containing serialization schemas ...</li> <li><a href="https://github.com/pydantic/pydantic/commit/024fdae2b55bd41866418586d48009956cfa9e1b"><code>024fdae</code></a> Fix small typos (<a href="https://redirect.github.com/pydantic/pydantic/issues/11643">#11643</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/58e61fa3c60ffb8140d01ca9f74ff7528326a0c6"><code>58e61fa</code></a> Prepare release v2.11.0 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11635">#11635</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/e2c2e811e3cafb35d376c22e8830f2773d65ee58"><code>e2c2e81</code></a> Add <code>generate_arguments_schema()</code> experimental function (<a href="https://redirect.github.com/pydantic/pydantic/issues/11572">#11572</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/72bea3f22f8d5380cb10af017deae4a0e16709c0"><code>72bea3f</code></a> Add <code>mkdocs-llmstxt</code> documentation plugin (<a href="https://redirect.github.com/pydantic/pydantic/issues/11632">#11632</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/fcba83291a8fe7e1dcfde9bbcc8ea57f8ef322c0"><code>fcba832</code></a> Simplify customization of default value inclusion in JSON Schema generation (...</li> <li><a href="https://github.com/pydantic/pydantic/commit/6f11161524e495f6ed7597abcd4006f19a7cd2c1"><code>6f11161</code></a> Add support for extra keys validation for models (<a href="https://redirect.github.com/pydantic/pydantic/issues/11578">#11578</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/7917b11bd28706d77a5d0180381bc96b6b61b044"><code>7917b11</code></a> Disable third-party workflow issue report (<a href="https://redirect.github.com/pydantic/pydantic/issues/11629">#11629</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/f5226d294664788d1fbea13bec2dbc1ce6305c8e"><code>f5226d2</code></a> Bump <code>pydantic-core</code> to v2.33.0 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11631">#11631</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pydantic/pydantic/compare/v2.10.6...v2.11.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.10.6&new-version=2.11.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 7 +++++-- requirements/dev.txt | 7 +++++-- requirements/lint.txt | 7 +++++-- requirements/test.txt | 7 +++++-- 4 files changed, 20 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 42f2ae68211..a88d3895a16 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,9 +148,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.1 # via python-on-whales -pydantic-core==2.27.2 +pydantic-core==2.33.0 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling @@ -266,6 +266,9 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich + # typing-inspection +typing-inspection==0.4.0 + # via pydantic uritemplate==4.1.1 # via gidgethub urllib3==2.3.0 diff --git a/requirements/dev.txt b/requirements/dev.txt index b4fd71ff1f1..4414468a31f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,9 +145,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.1 # via python-on-whales -pydantic-core==2.27.2 +pydantic-core==2.33.0 # via pydantic pygments==2.19.1 # via @@ -257,6 +257,9 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich + # typing-inspection +typing-inspection==0.4.0 + # via pydantic uritemplate==4.1.1 # via gidgethub urllib3==2.3.0 diff --git a/requirements/lint.txt b/requirements/lint.txt index 8ac5bce2df6..ff910da444b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -61,9 +61,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.1 # via python-on-whales -pydantic-core==2.27.2 +pydantic-core==2.33.0 # via pydantic pygments==2.19.1 # via rich @@ -102,6 +102,9 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich + # typing-inspection +typing-inspection==0.4.0 + # via pydantic uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.0 diff --git a/requirements/test.txt b/requirements/test.txt index e43b57e9bc1..b2ea7bfff70 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -81,9 +81,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.1 # via python-on-whales -pydantic-core==2.27.2 +pydantic-core==2.33.0 # via pydantic pygments==2.19.1 # via rich @@ -131,6 +131,9 @@ typing-extensions==4.12.2 # pydantic-core # python-on-whales # rich + # typing-inspection +typing-inspection==0.4.0 + # via pydantic uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.3.0 From 7f7d68143a47daa4cff1f2abe94e8bf2c2c8d9b7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 03:19:46 +0000 Subject: [PATCH 1304/1511] [PR #10656/06db052e backport][3.11] Revert: Close the socket if there's a failure in start_connection() #10464 (#10657) **This is a backport of PR #10656 as merged into master (06db052eae399de1c7c34c0122d736e06c045ec7).** Reverts #10464 While this change improved the situation for uvloop users, it caused a regression with `SelectorEventLoop` (issue #10617) The alternative fix is https://github.com/MagicStack/uvloop/pull/646 (not merged at the time of this PR) issue #10617 appears to be very similar to https://github.com/python/cpython/commit/d5aeccf9767c1619faa29e8ed61c93bde7bc5e3f If someone can come up with a working reproducer for #10617 we can revisit this. cc @top-oai Minimal implementation that shows on cancellation the socket is cleaned up without the explicit `close` https://github.com/aio-libs/aiohttp/issues/10617#issuecomment-2767890703 so this should be unneeded unless I've missed something (very possible with all the moving parts here) ## Related issue number fixes #10617 Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10464.bugfix.rst | 1 + CHANGES/10617.bugfix.rst | 1 + CHANGES/10656.bugfix.rst | 3 +++ aiohttp/connector.py | 16 +------------ tests/test_connector.py | 50 ---------------------------------------- 5 files changed, 6 insertions(+), 65 deletions(-) create mode 120000 CHANGES/10464.bugfix.rst create mode 120000 CHANGES/10617.bugfix.rst create mode 100644 CHANGES/10656.bugfix.rst diff --git a/CHANGES/10464.bugfix.rst b/CHANGES/10464.bugfix.rst new file mode 120000 index 00000000000..18996eb3cac --- /dev/null +++ b/CHANGES/10464.bugfix.rst @@ -0,0 +1 @@ +10656.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10617.bugfix.rst b/CHANGES/10617.bugfix.rst new file mode 120000 index 00000000000..18996eb3cac --- /dev/null +++ b/CHANGES/10617.bugfix.rst @@ -0,0 +1 @@ +10656.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10656.bugfix.rst b/CHANGES/10656.bugfix.rst new file mode 100644 index 00000000000..ec3853107ad --- /dev/null +++ b/CHANGES/10656.bugfix.rst @@ -0,0 +1,3 @@ +Reverted explicitly closing sockets if an exception is raised during ``create_connection`` -- by :user:`bdraco`. + +This change originally appeared in aiohttp 3.11.13 diff --git a/aiohttp/connector.py b/aiohttp/connector.py index e5cf3674cba..7420bd6070a 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1108,7 +1108,6 @@ async def _wrap_create_connection( client_error: Type[Exception] = ClientConnectorError, **kwargs: Any, ) -> Tuple[asyncio.Transport, ResponseHandler]: - sock: Union[socket.socket, None] = None try: async with ceil_timeout( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold @@ -1120,11 +1119,7 @@ async def _wrap_create_connection( interleave=self._interleave, loop=self._loop, ) - connection = await self._loop.create_connection( - *args, **kwargs, sock=sock - ) - sock = None - return connection + return await self._loop.create_connection(*args, **kwargs, sock=sock) except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc except ssl_errors as exc: @@ -1133,15 +1128,6 @@ async def _wrap_create_connection( if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise raise client_error(req.connection_key, exc) from exc - finally: - if sock is not None: - # Will be hit if an exception is thrown before the event loop takes the socket. - # In that case, proactively close the socket to guard against event loop leaks. - # For example, see https://github.com/MagicStack/uvloop/issues/653. - try: - sock.close() - except OSError as exc: - raise client_error(req.connection_key, exc) from exc async def _wrap_existing_connection( self, diff --git a/tests/test_connector.py b/tests/test_connector.py index a86a2417423..a3fffc447ae 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -617,56 +617,6 @@ async def certificate_error(*args, **kwargs): await conn.close() -async def test_tcp_connector_closes_socket_on_error( - loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock -) -> None: - req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) - - conn = aiohttp.TCPConnector() - with ( - mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=ValueError, - ), - pytest.raises(ValueError), - ): - await conn.connect(req, [], ClientTimeout()) - - assert start_connection.return_value.close.called - - await conn.close() - - -async def test_tcp_connector_closes_socket_on_error_results_in_another_error( - loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock -) -> None: - """Test that when error occurs while closing the socket.""" - req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) - start_connection.return_value.close.side_effect = OSError( - 1, "error from closing socket" - ) - - conn = aiohttp.TCPConnector() - with ( - mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=ValueError, - ), - pytest.raises(aiohttp.ClientConnectionError, match="error from closing socket"), - ): - await conn.connect(req, [], ClientTimeout()) - - assert start_connection.return_value.close.called - - await conn.close() - - async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock ) -> None: From 79958318aad1aa6caa7b2ce793968e883a1465d8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 03:30:03 +0000 Subject: [PATCH 1305/1511] [PR #10656/06db052e backport][3.12] Revert: Close the socket if there's a failure in start_connection() #10464 (#10658) --- CHANGES/10464.bugfix.rst | 1 + CHANGES/10617.bugfix.rst | 1 + CHANGES/10656.bugfix.rst | 3 +++ aiohttp/connector.py | 16 +------------ tests/test_connector.py | 50 ---------------------------------------- 5 files changed, 6 insertions(+), 65 deletions(-) create mode 120000 CHANGES/10464.bugfix.rst create mode 120000 CHANGES/10617.bugfix.rst create mode 100644 CHANGES/10656.bugfix.rst diff --git a/CHANGES/10464.bugfix.rst b/CHANGES/10464.bugfix.rst new file mode 120000 index 00000000000..18996eb3cac --- /dev/null +++ b/CHANGES/10464.bugfix.rst @@ -0,0 +1 @@ +10656.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10617.bugfix.rst b/CHANGES/10617.bugfix.rst new file mode 120000 index 00000000000..18996eb3cac --- /dev/null +++ b/CHANGES/10617.bugfix.rst @@ -0,0 +1 @@ +10656.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10656.bugfix.rst b/CHANGES/10656.bugfix.rst new file mode 100644 index 00000000000..ec3853107ad --- /dev/null +++ b/CHANGES/10656.bugfix.rst @@ -0,0 +1,3 @@ +Reverted explicitly closing sockets if an exception is raised during ``create_connection`` -- by :user:`bdraco`. + +This change originally appeared in aiohttp 3.11.13 diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 1c2d8d73e07..2a41438ab6a 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1126,7 +1126,6 @@ async def _wrap_create_connection( client_error: Type[Exception] = ClientConnectorError, **kwargs: Any, ) -> Tuple[asyncio.Transport, ResponseHandler]: - sock: Union[socket.socket, None] = None try: async with ceil_timeout( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold @@ -1139,11 +1138,7 @@ async def _wrap_create_connection( loop=self._loop, socket_factory=self._socket_factory, ) - connection = await self._loop.create_connection( - *args, **kwargs, sock=sock - ) - sock = None - return connection + return await self._loop.create_connection(*args, **kwargs, sock=sock) except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc except ssl_errors as exc: @@ -1152,15 +1147,6 @@ async def _wrap_create_connection( if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise raise client_error(req.connection_key, exc) from exc - finally: - if sock is not None: - # Will be hit if an exception is thrown before the event loop takes the socket. - # In that case, proactively close the socket to guard against event loop leaks. - # For example, see https://github.com/MagicStack/uvloop/issues/653. - try: - sock.close() - except OSError as exc: - raise client_error(req.connection_key, exc) from exc async def _wrap_existing_connection( self, diff --git a/tests/test_connector.py b/tests/test_connector.py index f148fdf0bbe..28a2ae1d1d2 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -627,56 +627,6 @@ async def certificate_error(*args, **kwargs): await conn.close() -async def test_tcp_connector_closes_socket_on_error( - loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock -) -> None: - req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) - - conn = aiohttp.TCPConnector() - with ( - mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=ValueError, - ), - pytest.raises(ValueError), - ): - await conn.connect(req, [], ClientTimeout()) - - assert start_connection.return_value.close.called - - await conn.close() - - -async def test_tcp_connector_closes_socket_on_error_results_in_another_error( - loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock -) -> None: - """Test that when error occurs while closing the socket.""" - req = ClientRequest("GET", URL("https://127.0.0.1:443"), loop=loop) - start_connection.return_value.close.side_effect = OSError( - 1, "error from closing socket" - ) - - conn = aiohttp.TCPConnector() - with ( - mock.patch.object( - conn._loop, - "create_connection", - autospec=True, - spec_set=True, - side_effect=ValueError, - ), - pytest.raises(aiohttp.ClientConnectionError, match="error from closing socket"), - ): - await conn.connect(req, [], ClientTimeout()) - - assert start_connection.return_value.close.called - - await conn.close() - - async def test_tcp_connector_server_hostname_default( loop: Any, start_connection: mock.AsyncMock ) -> None: From d2d3621211348aecb3d568e634ba0abb016b6009 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 31 Mar 2025 17:54:45 -1000 Subject: [PATCH 1306/1511] Release 3.11.15 (#10659) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We yanked 3.11.13 and 3.11.14 and reverted #10464 because of #10617 so we are doing another release to make sure nobody has to go without the other fixes in .13 and .14 <img width="643" alt="Screenshot 2025-03-31 at 5 42 58 PM" src="https://github.com/user-attachments/assets/08317aa3-27f8-4400-87c1-15eeec0c3682" /> --- CHANGES.rst | 40 ++++++++++++++++++++++++++++++++++++++++ CHANGES/10464.bugfix.rst | 1 - CHANGES/10601.misc.rst | 1 - CHANGES/10617.bugfix.rst | 1 - CHANGES/10625.misc.rst | 1 - CHANGES/10656.bugfix.rst | 3 --- aiohttp/__init__.py | 2 +- 7 files changed, 41 insertions(+), 8 deletions(-) delete mode 120000 CHANGES/10464.bugfix.rst delete mode 100644 CHANGES/10601.misc.rst delete mode 120000 CHANGES/10617.bugfix.rst delete mode 100644 CHANGES/10625.misc.rst delete mode 100644 CHANGES/10656.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 3c8c12b8d95..c2654b99214 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,46 @@ .. towncrier release notes start +3.11.15 (2025-03-31) +==================== + +Bug fixes +--------- + +- Reverted explicitly closing sockets if an exception is raised during ``create_connection`` -- by :user:`bdraco`. + + This change originally appeared in aiohttp 3.11.13 + + + *Related issues and pull requests on GitHub:* + :issue:`10464`, :issue:`10617`, :issue:`10656`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of WebSocket buffer handling -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10601`. + + + +- Improved performance of serializing headers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10625`. + + + + +---- + + 3.11.14 (2025-03-16) ==================== diff --git a/CHANGES/10464.bugfix.rst b/CHANGES/10464.bugfix.rst deleted file mode 120000 index 18996eb3cac..00000000000 --- a/CHANGES/10464.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10656.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10601.misc.rst b/CHANGES/10601.misc.rst deleted file mode 100644 index c0d21082724..00000000000 --- a/CHANGES/10601.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of WebSocket buffer handling -- by :user:`bdraco`. diff --git a/CHANGES/10617.bugfix.rst b/CHANGES/10617.bugfix.rst deleted file mode 120000 index 18996eb3cac..00000000000 --- a/CHANGES/10617.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10656.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10625.misc.rst b/CHANGES/10625.misc.rst deleted file mode 100644 index 30cd7f0f3a6..00000000000 --- a/CHANGES/10625.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of serializing headers -- by :user:`bdraco`. diff --git a/CHANGES/10656.bugfix.rst b/CHANGES/10656.bugfix.rst deleted file mode 100644 index ec3853107ad..00000000000 --- a/CHANGES/10656.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Reverted explicitly closing sockets if an exception is raised during ``create_connection`` -- by :user:`bdraco`. - -This change originally appeared in aiohttp 3.11.13 diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4ff7bbbc759..aba86dc3a32 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.15.dev0" +__version__ = "3.11.15" from typing import TYPE_CHECKING, Tuple From 2082fbad0b52b7c612c1ccb9875ac04f1469b3dc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 31 Mar 2025 20:15:15 -1000 Subject: [PATCH 1307/1511] Increment version to 3.11.16.dev0 (#10661) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index aba86dc3a32..acfef952d61 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.15" +__version__ = "3.11.16.dev0" from typing import TYPE_CHECKING, Tuple From 83a9df574187b432589637a2596e029b0bcf6e33 Mon Sep 17 00:00:00 2001 From: layday <layday@protonmail.com> Date: Tue, 1 Apr 2025 12:25:50 +0200 Subject: [PATCH 1308/1511] =?UTF-8?q?Replace=20deprecated=20`asyncio.iscor?= =?UTF-8?q?outinefunction`=20with=20its=20counterpart=E2=80=A6=20(#10664)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … from `inspect` (#10634) (cherry picked from commit 77ad7d7ea173eda1306297d275b2d5f7348f9f60) --- CHANGES/10634.bugfix.rst | 2 ++ aiohttp/pytest_plugin.py | 6 +++--- aiohttp/web_urldispatcher.py | 8 +++++--- aiohttp/worker.py | 5 ++++- 4 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 CHANGES/10634.bugfix.rst diff --git a/CHANGES/10634.bugfix.rst b/CHANGES/10634.bugfix.rst new file mode 100644 index 00000000000..d6ec64a607e --- /dev/null +++ b/CHANGES/10634.bugfix.rst @@ -0,0 +1,2 @@ +Replaced deprecated ``asyncio.iscoroutinefunction`` with its counterpart from ``inspect`` +-- by :user:`layday`. diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 158fd684b7a..128dc46081d 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -98,7 +98,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] if inspect.isasyncgenfunction(func): # async generator fixture is_async_gen = True - elif asyncio.iscoroutinefunction(func): + elif inspect.iscoroutinefunction(func): # regular async fixture is_async_gen = False else: @@ -200,14 +200,14 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] """Fix pytest collecting for coroutines.""" - if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + if collector.funcnamefilter(name) and inspect.iscoroutinefunction(obj): return list(collector._genfunctions(name, obj)) def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] """Run coroutines in an event loop instead of a normal function call.""" fast = pyfuncitem.config.getoption("--aiohttp-fast") - if asyncio.iscoroutinefunction(pyfuncitem.function): + if inspect.iscoroutinefunction(pyfuncitem.function): existing_loop = pyfuncitem.funcargs.get( "proactor_loop" ) or pyfuncitem.funcargs.get("loop", None) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 6443c500a33..28ae2518fec 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -180,8 +180,8 @@ def __init__( if expect_handler is None: expect_handler = _default_expect_handler - assert asyncio.iscoroutinefunction( - expect_handler + assert inspect.iscoroutinefunction(expect_handler) or ( + sys.version_info < (3, 14) and asyncio.iscoroutinefunction(expect_handler) ), f"Coroutine is expected, got {expect_handler!r}" method = method.upper() @@ -189,7 +189,9 @@ def __init__( raise ValueError(f"{method} is not allowed HTTP method") assert callable(handler), handler - if asyncio.iscoroutinefunction(handler): + if inspect.iscoroutinefunction(handler) or ( + sys.version_info < (3, 14) and asyncio.iscoroutinefunction(handler) + ): pass elif inspect.isgeneratorfunction(handler): warnings.warn( diff --git a/aiohttp/worker.py b/aiohttp/worker.py index 8ed121ac955..f7281bfde75 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -1,6 +1,7 @@ """Async gunicorn worker for aiohttp.web""" import asyncio +import inspect import os import re import signal @@ -71,7 +72,9 @@ async def _run(self) -> None: runner = None if isinstance(self.wsgi, Application): app = self.wsgi - elif asyncio.iscoroutinefunction(self.wsgi): + elif inspect.iscoroutinefunction(self.wsgi) or ( + sys.version_info < (3, 14) and asyncio.iscoroutinefunction(self.wsgi) + ): wsgi = await self.wsgi() if isinstance(wsgi, web.AppRunner): runner = wsgi From a60d447aa0e518425b889eb16a2ed54f286e41fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 10:38:56 +0000 Subject: [PATCH 1309/1511] Bump virtualenv from 20.29.3 to 20.30.0 (#10666) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.29.3 to 20.30.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.30.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.29.3 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2855">pypa/virtualenv#2855</a></li> <li>Add GraalPy support by <a href="https://github.com/timfel"><code>@​timfel</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2859">pypa/virtualenv#2859</a></li> <li>Upgrade setuptools by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2863">pypa/virtualenv#2863</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/timfel"><code>@​timfel</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2859">pypa/virtualenv#2859</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.29.3...20.30.0">https://github.com/pypa/virtualenv/compare/20.29.3...20.30.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.30.0 (2025-03-31)</h2> <p>Features - 20.30.0</p> <pre><code>- Add support for `GraalPy <https://github.com/oracle/graalpython>`_. (:issue:`2832`) <p>Bugfixes - 20.30.0 </code></pre></p> <ul> <li> <p>Upgrade embedded wheels:</p> <ul> <li>setuptools to <code>78.1.0</code> from <code>75.3.2</code> (:issue:<code>2863</code>)</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/04163aed5477b7f81acb30390e1ecc6d740b54ea"><code>04163ae</code></a> release 20.30.0</li> <li><a href="https://github.com/pypa/virtualenv/commit/71adc15917e65d333e80e274a7f16f2192e3e16f"><code>71adc15</code></a> Upgrade setuptools (<a href="https://redirect.github.com/pypa/virtualenv/issues/2863">#2863</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/970b829037baa57f6dd79949f93d7f88e907d546"><code>970b829</code></a> Add GraalPy support (<a href="https://redirect.github.com/pypa/virtualenv/issues/2859">#2859</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/60a6956fea1d16ae1243e88417e1b4e56f4d1cde"><code>60a6956</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2858">#2858</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/0fe9b4c4f37dd3b32677942afb777eb14e61531b"><code>0fe9b4c</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2857">#2857</a> from pypa/pre-commit-ci-update-config</li> <li><a href="https://github.com/pypa/virtualenv/commit/4a6a1ff907331bb63b843ba6aa8bddecc51da665"><code>4a6a1ff</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li><a href="https://github.com/pypa/virtualenv/commit/773a18552cb4179b840311bba4bb19edcefacbdb"><code>773a185</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2855">#2855</a> from pypa/release-20.29.3</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.29.3...20.30.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.29.3&new-version=20.30.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a88d3895a16..142c09092b0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -279,7 +279,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.29.3 +virtualenv==20.30.0 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 4414468a31f..b9e52c24751 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -270,7 +270,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.29.3 +virtualenv==20.30.0 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index ff910da444b..c400e12cea0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -109,5 +109,5 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.29.3 +virtualenv==20.30.0 # via pre-commit From b65000da599aeb2c9f340387b499a83b3d4ea598 Mon Sep 17 00:00:00 2001 From: layday <layday@protonmail.com> Date: Tue, 1 Apr 2025 14:18:39 +0200 Subject: [PATCH 1310/1511] =?UTF-8?q?Replace=20deprecated=20`asyncio.iscor?= =?UTF-8?q?outinefunction`=20with=20its=20counterpart=E2=80=A6=20(#10663)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … from `inspect` (#10634) (cherry picked from commit 77ad7d7ea173eda1306297d275b2d5f7348f9f60) --- CHANGES/10634.bugfix.rst | 2 ++ aiohttp/pytest_plugin.py | 6 +++--- aiohttp/web_urldispatcher.py | 8 +++++--- aiohttp/worker.py | 5 ++++- 4 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 CHANGES/10634.bugfix.rst diff --git a/CHANGES/10634.bugfix.rst b/CHANGES/10634.bugfix.rst new file mode 100644 index 00000000000..d6ec64a607e --- /dev/null +++ b/CHANGES/10634.bugfix.rst @@ -0,0 +1,2 @@ +Replaced deprecated ``asyncio.iscoroutinefunction`` with its counterpart from ``inspect`` +-- by :user:`layday`. diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 7ce60faa4a4..21d6ea7bbcd 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -98,7 +98,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] if inspect.isasyncgenfunction(func): # async generator fixture is_async_gen = True - elif asyncio.iscoroutinefunction(func): + elif inspect.iscoroutinefunction(func): # regular async fixture is_async_gen = False else: @@ -200,14 +200,14 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] """Fix pytest collecting for coroutines.""" - if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + if collector.funcnamefilter(name) and inspect.iscoroutinefunction(obj): return list(collector._genfunctions(name, obj)) def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] """Run coroutines in an event loop instead of a normal function call.""" fast = pyfuncitem.config.getoption("--aiohttp-fast") - if asyncio.iscoroutinefunction(pyfuncitem.function): + if inspect.iscoroutinefunction(pyfuncitem.function): existing_loop = pyfuncitem.funcargs.get( "proactor_loop" ) or pyfuncitem.funcargs.get("loop", None) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 6443c500a33..28ae2518fec 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -180,8 +180,8 @@ def __init__( if expect_handler is None: expect_handler = _default_expect_handler - assert asyncio.iscoroutinefunction( - expect_handler + assert inspect.iscoroutinefunction(expect_handler) or ( + sys.version_info < (3, 14) and asyncio.iscoroutinefunction(expect_handler) ), f"Coroutine is expected, got {expect_handler!r}" method = method.upper() @@ -189,7 +189,9 @@ def __init__( raise ValueError(f"{method} is not allowed HTTP method") assert callable(handler), handler - if asyncio.iscoroutinefunction(handler): + if inspect.iscoroutinefunction(handler) or ( + sys.version_info < (3, 14) and asyncio.iscoroutinefunction(handler) + ): pass elif inspect.isgeneratorfunction(handler): warnings.warn( diff --git a/aiohttp/worker.py b/aiohttp/worker.py index 8ed121ac955..f7281bfde75 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -1,6 +1,7 @@ """Async gunicorn worker for aiohttp.web""" import asyncio +import inspect import os import re import signal @@ -71,7 +72,9 @@ async def _run(self) -> None: runner = None if isinstance(self.wsgi, Application): app = self.wsgi - elif asyncio.iscoroutinefunction(self.wsgi): + elif inspect.iscoroutinefunction(self.wsgi) or ( + sys.version_info < (3, 14) and asyncio.iscoroutinefunction(self.wsgi) + ): wsgi = await self.wsgi() if isinstance(wsgi, web.AppRunner): runner = wsgi From be154728df86462aa5b4bb74a508dd2025ec8bfd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Apr 2025 11:41:52 -1000 Subject: [PATCH 1311/1511] [PR #10672/35c979b backport][3.11] Fix headers being mutated if passed to web.Response as a CIMultiDict (#10673) If `CIMultiDict` is passed in we need to make a copy to avoid mutating it. In some cases we used to copy these twice which was fixed in #10045 but for this case that was the only copy being made and the source of this regression. fixes #10670 (cherry picked from commit 35c979be79dcc75ecbf7270ccc9fde264e5e2948) --- CHANGES/10672.bugfix.rst | 1 + aiohttp/web_response.py | 4 +--- tests/test_web_response.py | 14 +++++++++++++- 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10672.bugfix.rst diff --git a/CHANGES/10672.bugfix.rst b/CHANGES/10672.bugfix.rst new file mode 100644 index 00000000000..a4434f8c87a --- /dev/null +++ b/CHANGES/10672.bugfix.rst @@ -0,0 +1 @@ +Fixed :class:`multidict.CIMultiDict` being mutated when passed to :class:`aiohttp.web.Response` -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e498a905caf..367ac6e8c0a 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -629,10 +629,8 @@ def __init__( if headers is None: real_headers: CIMultiDict[str] = CIMultiDict() - elif not isinstance(headers, CIMultiDict): - real_headers = CIMultiDict(headers) else: - real_headers = headers # = cast('CIMultiDict[str]', headers) + real_headers = CIMultiDict(headers) if content_type is not None and "charset" in content_type: raise ValueError("charset must not be in content_type argument") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 0591426c57b..95769161804 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -10,7 +10,7 @@ import aiosignal import pytest -from multidict import CIMultiDict, CIMultiDictProxy +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict from re_assert import Matches from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs @@ -1479,3 +1479,15 @@ def test_text_is_json_encoded(self) -> None: def test_content_type_is_overrideable(self) -> None: resp = json_response({"foo": 42}, content_type="application/vnd.json+api") assert "application/vnd.json+api" == resp.content_type + + +@pytest.mark.parametrize("loose_header_type", (MultiDict, CIMultiDict, dict)) +async def test_passing_cimultidict_to_web_response_not_mutated( + loose_header_type: type, +) -> None: + req = make_request("GET", "/") + headers = loose_header_type({}) + resp = Response(body=b"answer", headers=headers) + await resp.prepare(req) + assert resp.content_length == 6 + assert not headers From 6a3d83579923271acd33749be2a9c5c81c175dc4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Apr 2025 12:10:22 -1000 Subject: [PATCH 1312/1511] [PR #10675/048ef4c backport][3.11] Remove useless nonlocal statements in tests (#10677) discovered by new flake8 in https://github.com/aio-libs/aiohttp/pull/10653 (cherry picked from commit 048ef4c617130ef5a29a70ad67e34d9395891ac1) --- tests/test_client_ws_functional.py | 3 +-- tests/test_helpers.py | 1 - tests/test_web_server.py | 3 +-- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 54cd5e92f80..0ca57ab3ab2 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -315,7 +315,6 @@ async def test_concurrent_close(aiohttp_client) -> None: client_ws = None async def handler(request): - nonlocal client_ws ws = web.WebSocketResponse() await ws.prepare(request) @@ -936,7 +935,7 @@ async def delayed_send_frame( message: bytes, opcode: int, compress: Optional[int] = None ) -> None: assert opcode == WSMsgType.PING - nonlocal cancelled, ping_started + nonlocal cancelled ping_started.set_result(None) try: await asyncio.sleep(1) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 2a83032e557..a343cbdfedf 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -351,7 +351,6 @@ async def test_timer_context_timeout_does_swallow_cancellation() -> None: ctx = helpers.TimerContext(loop) async def task_with_timeout() -> None: - nonlocal ctx new_task = asyncio.current_task() assert new_task is not None with pytest.raises(asyncio.TimeoutError): diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 9098ef9e7bf..d2f1341afe0 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -347,7 +347,6 @@ async def test_handler_cancellation(unused_port_socket: socket.socket) -> None: port = sock.getsockname()[1] async def on_request(_: web.Request) -> web.Response: - nonlocal event try: await asyncio.sleep(10) except asyncio.CancelledError: @@ -389,7 +388,7 @@ async def test_no_handler_cancellation(unused_port_socket: socket.socket) -> Non started = False async def on_request(_: web.Request) -> web.Response: - nonlocal done_event, started, timeout_event + nonlocal started started = True await asyncio.wait_for(timeout_event.wait(), timeout=5) done_event.set() From d3c4543fb4ccd3bfb6f03d4d7939bad48ac59970 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Apr 2025 13:05:24 -1000 Subject: [PATCH 1313/1511] [PR #10672/35c979b backport][3.12] Fix headers being mutated if passed to web.Response as a CIMultiDict (#10674) --- CHANGES/10672.bugfix.rst | 1 + aiohttp/web_response.py | 4 +--- tests/test_web_response.py | 14 +++++++++++++- 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10672.bugfix.rst diff --git a/CHANGES/10672.bugfix.rst b/CHANGES/10672.bugfix.rst new file mode 100644 index 00000000000..a4434f8c87a --- /dev/null +++ b/CHANGES/10672.bugfix.rst @@ -0,0 +1 @@ +Fixed :class:`multidict.CIMultiDict` being mutated when passed to :class:`aiohttp.web.Response` -- by :user:`bdraco`. diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index a1955ca0d9e..151fbea3473 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -639,10 +639,8 @@ def __init__( if headers is None: real_headers: CIMultiDict[str] = CIMultiDict() - elif not isinstance(headers, CIMultiDict): - real_headers = CIMultiDict(headers) else: - real_headers = headers # = cast('CIMultiDict[str]', headers) + real_headers = CIMultiDict(headers) if content_type is not None and "charset" in content_type: raise ValueError("charset must not be in content_type argument") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 0a2c5273080..54176ea661b 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -11,7 +11,7 @@ import aiosignal import pytest -from multidict import CIMultiDict, CIMultiDictProxy +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict from re_assert import Matches from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs @@ -1501,3 +1501,15 @@ def test_text_is_json_encoded(self) -> None: def test_content_type_is_overrideable(self) -> None: resp = json_response({"foo": 42}, content_type="application/vnd.json+api") assert "application/vnd.json+api" == resp.content_type + + +@pytest.mark.parametrize("loose_header_type", (MultiDict, CIMultiDict, dict)) +async def test_passing_cimultidict_to_web_response_not_mutated( + loose_header_type: type, +) -> None: + req = make_request("GET", "/") + headers = loose_header_type({}) + resp = Response(body=b"answer", headers=headers) + await resp.prepare(req) + assert resp.content_length == 6 + assert not headers From cca85c46027d7172d9aa7b022098a036aef906b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Apr 2025 13:33:26 -1000 Subject: [PATCH 1314/1511] [PR #10675/048ef4c backport][3.12] Remove useless nonlocal statements in tests (#10678) discovered by new flake8 in https://github.com/aio-libs/aiohttp/pull/10653 (cherry picked from commit 048ef4c617130ef5a29a70ad67e34d9395891ac1) --- tests/test_client_ws_functional.py | 3 +-- tests/test_helpers.py | 1 - tests/test_web_server.py | 3 +-- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 54cd5e92f80..0ca57ab3ab2 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -315,7 +315,6 @@ async def test_concurrent_close(aiohttp_client) -> None: client_ws = None async def handler(request): - nonlocal client_ws ws = web.WebSocketResponse() await ws.prepare(request) @@ -936,7 +935,7 @@ async def delayed_send_frame( message: bytes, opcode: int, compress: Optional[int] = None ) -> None: assert opcode == WSMsgType.PING - nonlocal cancelled, ping_started + nonlocal cancelled ping_started.set_result(None) try: await asyncio.sleep(1) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 2a83032e557..a343cbdfedf 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -351,7 +351,6 @@ async def test_timer_context_timeout_does_swallow_cancellation() -> None: ctx = helpers.TimerContext(loop) async def task_with_timeout() -> None: - nonlocal ctx new_task = asyncio.current_task() assert new_task is not None with pytest.raises(asyncio.TimeoutError): diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 9098ef9e7bf..d2f1341afe0 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -347,7 +347,6 @@ async def test_handler_cancellation(unused_port_socket: socket.socket) -> None: port = sock.getsockname()[1] async def on_request(_: web.Request) -> web.Response: - nonlocal event try: await asyncio.sleep(10) except asyncio.CancelledError: @@ -389,7 +388,7 @@ async def test_no_handler_cancellation(unused_port_socket: socket.socket) -> Non started = False async def on_request(_: web.Request) -> web.Response: - nonlocal done_event, started, timeout_event + nonlocal started started = True await asyncio.wait_for(timeout_event.wait(), timeout=5) done_event.set() From c723b3ce07b614d0c5fc7806d17f64b2f8ef268a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Apr 2025 14:36:56 -1000 Subject: [PATCH 1315/1511] Release 3.11.16 (#10679) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit <img width="700" alt="Screenshot 2025-04-01 at 1 42 52 PM" src="https://github.com/user-attachments/assets/5ec2c6b3-389f-417d-8bed-369f7a2fefda" /> --- CHANGES.rst | 27 +++++++++++++++++++++++++++ CHANGES/10634.bugfix.rst | 2 -- CHANGES/10672.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 28 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/10634.bugfix.rst delete mode 100644 CHANGES/10672.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index c2654b99214..00d728e775d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,33 @@ .. towncrier release notes start +3.11.16 (2025-04-01) +==================== + +Bug fixes +--------- + +- Replaced deprecated ``asyncio.iscoroutinefunction`` with its counterpart from ``inspect`` + -- by :user:`layday`. + + + *Related issues and pull requests on GitHub:* + :issue:`10634`. + + + +- Fixed :class:`multidict.CIMultiDict` being mutated when passed to :class:`aiohttp.web.Response` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10672`. + + + + +---- + + 3.11.15 (2025-03-31) ==================== diff --git a/CHANGES/10634.bugfix.rst b/CHANGES/10634.bugfix.rst deleted file mode 100644 index d6ec64a607e..00000000000 --- a/CHANGES/10634.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Replaced deprecated ``asyncio.iscoroutinefunction`` with its counterpart from ``inspect`` --- by :user:`layday`. diff --git a/CHANGES/10672.bugfix.rst b/CHANGES/10672.bugfix.rst deleted file mode 100644 index a4434f8c87a..00000000000 --- a/CHANGES/10672.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :class:`multidict.CIMultiDict` being mutated when passed to :class:`aiohttp.web.Response` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index acfef952d61..93b06c7367a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.16.dev0" +__version__ = "3.11.16" from typing import TYPE_CHECKING, Tuple From cba40bf1a4a8db67e4c96ec82cfc0d6699ace8f4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 1 Apr 2025 16:37:13 -1000 Subject: [PATCH 1316/1511] Increment version to 3.11.17.dev0 (#10681) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 93b06c7367a..8a3d34a4f87 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.16" +__version__ = "3.11.17.dev0" from typing import TYPE_CHECKING, Tuple From 8541a7ca342a2316e60e3fe3e12a07c1e73d33d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 17:16:07 -1000 Subject: [PATCH 1317/1511] Bump multidict from 6.2.0 to 6.3.1 (#10667) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.2.0 to 6.3.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.3.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>Set operations for <code>KeysView</code> and <code>ItemsView</code> of case-insensitive multidicts and their proxies are processed in case-insensitive manner.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/965">#965</a>.</p> </li> <li> <p>Rewrote :class:<code>multidict.CIMultiDict</code> and it proxy to always return :class:<code>multidict.istr</code> keys. <code>istr</code> is derived from :class:<code>str</code>, thus the change is backward compatible.</p> <p>The performance boost is about 15% for some operations for C Extension, pure Python implementation have got a visible (15% - 230%) speedup as well.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1097">#1097</a>.</p> </li> <li> <p>Fixed a crash when extending a multidict from multidict proxy if C Extensions were used.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1100">#1100</a>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Implemented a custom parser for <code>METH_FASTCALL | METH_KEYWORDS</code> protocol -- by :user:<code>asvetlov</code>.</p> <p>The patch re-enables fast call protocol in the :py:mod:<code>multidict</code> C Extension.</p> <p>Speedup is about 25%-30% for the library benchmarks for Python 3.12+.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1070">#1070</a>.</p> </li> <li> <p>The C-extension no longer pre-allocates a Python exception object in lookup-related methods of :py:class:<code>~multidict.MultiDict</code> when the passed-in <em>key</em> is not found but <em>default</em> value is provided.</p> <p>Namely, this affects :py:meth:<code>MultiDict.getone() <multidict.MultiDict.getone></code>, :py:meth:<code>MultiDict.getall() <multidict.MultiDict.getall></code>, :py:meth:<code>MultiDict.get() <multidict.MultiDict.get></code>, :py:meth:<code>MultiDict.pop() <multidict.MultiDict.pop></code>, :py:meth:<code>MultiDict.popone() <multidict.MultiDict.popone></code>, and :py:meth:<code>MultiDict.popall() <multidict.MultiDict.popall></code>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.3.0</h1> <p><em>(2025-03-31)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Set operations for <code>KeysView</code> and <code>ItemsView</code> of case-insensitive multidicts and their proxies are processed in case-insensitive manner.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>965</code>.</p> </li> <li> <p>Rewrote :class:<code>multidict.CIMultiDict</code> and it proxy to always return :class:<code>multidict.istr</code> keys. <code>istr</code> is derived from :class:<code>str</code>, thus the change is backward compatible.</p> <p>The performance boost is about 15% for some operations for C Extension, pure Python implementation have got a visible (15% - 230%) speedup as well.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1097</code>.</p> </li> <li> <p>Fixed a crash when extending a multidict from multidict proxy if C Extensions were used.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1100</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Implemented a custom parser for <code>METH_FASTCALL | METH_KEYWORDS</code> protocol -- by :user:<code>asvetlov</code>.</p> <p>The patch re-enables fast call protocol in the :py:mod:<code>multidict</code> C Extension.</p> <p>Speedup is about 25%-30% for the library benchmarks for Python 3.12+.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1070</code>.</p> </li> <li> <p>The C-extension no longer pre-allocates a Python exception object in lookup-related methods of :py:class:<code>~multidict.MultiDict</code> when the passed-in <em>key</em> is not found but <em>default</em> value is provided.</p> <p>Namely, this affects :py:meth:<code>MultiDict.getone() <multidict.MultiDict.getone></code>, :py:meth:<code>MultiDict.getall() <multidict.MultiDict.getall></code>, :py:meth:`MultiDict.get()</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/93482a83d2c0b9246992b34b95e4748aaa417151"><code>93482a8</code></a> Release 6.3.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1109">#1109</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/5d022e9957a5e9ec888adf736fdd563783d92c9b"><code>5d022e9</code></a> Properly support set operations for case insensitive multidict views (<a href="https://redirect.github.com/aio-libs/multidict/issues/1038">#1038</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/675c4ae3a8ce52d55e0f62ee7f77fca6442db93a"><code>675c4ae</code></a> Benchmarks for ItemsView (<a href="https://redirect.github.com/aio-libs/multidict/issues/1108">#1108</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/b5d9bd1a0d50afc94c892423b1c9782d5ff6ed15"><code>b5d9bd1</code></a> Add benchmarks for iterating multidict (<a href="https://redirect.github.com/aio-libs/multidict/issues/1107">#1107</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/1ba64e739de1c41cd51ce2e4ef6f246420a638a9"><code>1ba64e7</code></a> Run benchmark against all multidict implementations (<a href="https://redirect.github.com/aio-libs/multidict/issues/1106">#1106</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/389758e0b4ad7c5037b56a156106c7e82ad54201"><code>389758e</code></a> Use istr as keys in CIMultiDict (<a href="https://redirect.github.com/aio-libs/multidict/issues/1097">#1097</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/10a97e1340d80a7f22adcd69ffd575345f0d9143"><code>10a97e1</code></a> Remove the last element by popitem() (<a href="https://redirect.github.com/aio-libs/multidict/issues/1105">#1105</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/6097b60832b3a9c841f821a41527606bd9dff3bc"><code>6097b60</code></a> Optimize multidict extending (<a href="https://redirect.github.com/aio-libs/multidict/issues/1101">#1101</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/20515ba388e903646bb8bffb2f62c8f64e9af443"><code>20515ba</code></a> Add benchmarks for creating multidict from dict (<a href="https://redirect.github.com/aio-libs/multidict/issues/1104">#1104</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/f6b4db155898a71da5e915767b285aa2f1424765"><code>f6b4db1</code></a> Add benchmarks for updating/extending multidict with kwargs (<a href="https://redirect.github.com/aio-libs/multidict/issues/1103">#1103</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/multidict/compare/v6.2.0...v6.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.2.0&new-version=6.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 6c8b21f5aa8..ffee818cdbb 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.2.0 +multidict==6.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 142c09092b0..f24d2f8919a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -111,7 +111,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.2.0 +multidict==6.3.1 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index fc290ab6688..5f2bbcb7c1f 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.12 # via -r requirements/cython.in -multidict==6.2.0 +multidict==6.3.1 # via -r requirements/multidict.in typing-extensions==4.12.2 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index b9e52c24751..93d89ba3daf 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.2.0 +multidict==6.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index be4d86595fc..4ee354b5aa0 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.2.0 +multidict==6.3.1 # via -r requirements/multidict.in typing-extensions==4.12.2 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 0575278acab..7504cfc629b 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.5.0 # aiosignal idna==3.4 # via yarl -multidict==6.2.0 +multidict==6.3.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index b2ea7bfff70..d0a87da4001 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.2.0 +multidict==6.3.1 # via # -r requirements/runtime-deps.in # yarl From a0fcf320fa8335c13f3e7d3db41f7662f362482d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 11:02:45 +0000 Subject: [PATCH 1318/1511] Bump pytest-cov from 6.0.0 to 6.1.0 (#10687) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 6.0.0 to 6.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst">pytest-cov's changelog</a>.</em></p> <blockquote> <h2>6.1.0 (2025-04-01)</h2> <ul> <li>Change terminal output to use full width lines for the coverage header. Contributed by Tsvika Shapira in <code>[#678](https://github.com/pytest-dev/pytest-cov/issues/678) <https://github.com/pytest-dev/pytest-cov/pull/678></code>_.</li> <li>Removed unnecessary CovFailUnderWarning. Fixes <code>[#675](https://github.com/pytest-dev/pytest-cov/issues/675) <https://github.com/pytest-dev/pytest-cov/issues/675></code>_.</li> <li>Fixed the term report not using the precision specified via <code>--cov-precision</code>.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/10f8cde38c3b0aaf2c75d9ed62d4f333d8809d96"><code>10f8cde</code></a> Bump version: 6.0.0 → 6.1.0</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/10b14afffcd53b19967785c0b3e8b35ebac70b6f"><code>10b14af</code></a> Update changelog.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/aa57aed273475b4f9975cc9a8a1662b336718662"><code>aa57aed</code></a> Refactor a bit the internals to be a bit less boilerplatey and have more clar...</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/e760099a7fd5f49c235dc798bf7f222c0372b7e3"><code>e760099</code></a> Make sure the CLI precision is used when creating report. Fixes <a href="https://redirect.github.com/pytest-dev/pytest-cov/issues/674">#674</a>.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/44540e1e9f02f3b69b62834636cf3057edc960d6"><code>44540e1</code></a> Remove unnecessary CovFailUnderWarning. Closes <a href="https://redirect.github.com/pytest-dev/pytest-cov/issues/675">#675</a>.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/204af146f8f4ff03076825a693ee6aef587deb6b"><code>204af14</code></a> Update changelog.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/089e7bb5c16dcfdedd54f27fda094ccb3eeaae2c"><code>089e7bb</code></a> Upgrade ruff.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/ab2cd263ee46ae0777516ac681984c692feff3cf"><code>ab2cd26</code></a> Add py 3.13 to test grid and update some deps.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/2de0c6c4bd0d4f421d2d599a60ac9826662b22f2"><code>2de0c6c</code></a> add reference to code source</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/362a35966723a7a51e1cbefb57035b10f12bc65a"><code>362a359</code></a> move section between functions</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pytest-cov/compare/v6.0.0...v6.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=6.0.0&new-version=6.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f24d2f8919a..fd1bb4a78a8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -178,7 +178,7 @@ pytest-codspeed==3.2.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==6.0.0 +pytest-cov==6.1.0 # via -r requirements/test.in pytest-mock==3.14.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 93d89ba3daf..e028b4545cc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -173,7 +173,7 @@ pytest-codspeed==3.2.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==6.0.0 +pytest-cov==6.1.0 # via -r requirements/test.in pytest-mock==3.14.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index d0a87da4001..21af44f7b75 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -96,7 +96,7 @@ pytest==8.3.5 # pytest-xdist pytest-codspeed==3.2.0 # via -r requirements/test.in -pytest-cov==6.0.0 +pytest-cov==6.1.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in From 39b1afa9bed968dad488017ec1f1d1e43d056ffd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 11:15:23 +0000 Subject: [PATCH 1319/1511] Bump typing-extensions from 4.12.2 to 4.13.1 (#10693) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.12.2 to 4.13.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python/typing_extensions/releases">typing-extensions's releases</a>.</em></p> <blockquote> <h2>4.13.1</h2> <p>This is a bugfix release fixing two edge cases that appear on old bugfix releases of CPython.</p> <p>Bugfixes:</p> <ul> <li>Fix regression in 4.13.0 on Python 3.10.2 causing a <code>TypeError</code> when using <code>Concatenate</code>. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix <code>TypeError</code> when using <code>evaluate_forward_ref</code> on Python 3.10.1-2 and 3.9.8-10. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> </ul> <h2>4.13.0</h2> <p>New features:</p> <ul> <li>Add <code>typing_extensions.TypeForm</code> from PEP 747. Patch by Jelle Zijlstra.</li> <li>Add <code>typing_extensions.get_annotations</code>, a backport of <code>inspect.get_annotations</code> that adds features specified by PEP 649. Patches by Jelle Zijlstra and Alex Waygood.</li> <li>Backport <code>evaluate_forward_ref</code> from CPython PR <a href="https://redirect.github.com/python/cpython/pull/119891">#119891</a> to evaluate <code>ForwardRef</code>s. Patch by <a href="https://github.com/Daraan">Daraan</a>, backporting a CPython PR by Jelle Zijlstra.</li> </ul> <p>Bugfixes and changed features:</p> <ul> <li>Update PEP 728 implementation to a newer version of the PEP. Patch by Jelle Zijlstra.</li> <li>Copy the coroutine status of functions and methods wrapped with <code>@typing_extensions.deprecated</code>. Patch by Sebastian Rittau.</li> <li>Fix bug where <code>TypeAliasType</code> instances could be subscripted even where they were not generic. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix bug where a subscripted <code>TypeAliasType</code> instance did not have all attributes of the original <code>TypeAliasType</code> instance on older Python versions. Patch by <a href="https://github.com/Daraan">Daraan</a> and Alex Waygood.</li> <li>Fix bug where subscripted <code>TypeAliasType</code> instances (and some other subscripted objects) had wrong parameters if they were directly subscripted with an <code>Unpack</code> object. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Backport to Python 3.10 the ability to substitute <code>...</code> in generic <code>Callable</code> aliases that have a <code>Concatenate</code> special form as their argument. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Extended the <code>Concatenate</code> backport for Python 3.8-3.10 to now accept <code>Ellipsis</code> as an argument. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix backport of <code>get_type_hints</code> to reflect Python 3.11+ behavior which does not add <code>Union[..., NoneType]</code> to annotations that have a <code>None</code> default value anymore. This fixes wrapping of <code>Annotated</code> in an unwanted <code>Optional</code> in such cases. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix error in subscription of <code>Unpack</code> aliases causing nested Unpacks to not be resolved correctly. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Backport CPython PR <a href="https://redirect.github.com/python/cpython/pull/124795">#124795</a>: fix <code>TypeAliasType</code> not raising an error on non-tuple inputs for <code>type_params</code>. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix that lists and <code>...</code> could not be used for parameter expressions for <code>TypeAliasType</code></li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/typing_extensions/blob/main/CHANGELOG.md">typing-extensions's changelog</a>.</em></p> <blockquote> <h1>Release 4.13.1 (April 3, 2025)</h1> <p>Bugfixes:</p> <ul> <li>Fix regression in 4.13.0 on Python 3.10.2 causing a <code>TypeError</code> when using <code>Concatenate</code>. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix <code>TypeError</code> when using <code>evaluate_forward_ref</code> on Python 3.10.1-2 and 3.9.8-10. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> </ul> <h1>Release 4.13.0 (March 25, 2025)</h1> <p>No user-facing changes since 4.13.0rc1.</p> <h1>Release 4.13.0rc1 (March 18, 2025)</h1> <p>New features:</p> <ul> <li>Add <code>typing_extensions.TypeForm</code> from PEP 747. Patch by Jelle Zijlstra.</li> <li>Add <code>typing_extensions.get_annotations</code>, a backport of <code>inspect.get_annotations</code> that adds features specified by PEP 649. Patches by Jelle Zijlstra and Alex Waygood.</li> <li>Backport <code>evaluate_forward_ref</code> from CPython PR <a href="https://redirect.github.com/python/cpython/pull/119891">#119891</a> to evaluate <code>ForwardRef</code>s. Patch by <a href="https://github.com/Daraan">Daraan</a>, backporting a CPython PR by Jelle Zijlstra.</li> </ul> <p>Bugfixes and changed features:</p> <ul> <li>Update PEP 728 implementation to a newer version of the PEP. Patch by Jelle Zijlstra.</li> <li>Copy the coroutine status of functions and methods wrapped with <code>@typing_extensions.deprecated</code>. Patch by Sebastian Rittau.</li> <li>Fix bug where <code>TypeAliasType</code> instances could be subscripted even where they were not generic. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix bug where a subscripted <code>TypeAliasType</code> instance did not have all attributes of the original <code>TypeAliasType</code> instance on older Python versions. Patch by <a href="https://github.com/Daraan">Daraan</a> and Alex Waygood.</li> <li>Fix bug where subscripted <code>TypeAliasType</code> instances (and some other subscripted objects) had wrong parameters if they were directly subscripted with an <code>Unpack</code> object. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Backport to Python 3.10 the ability to substitute <code>...</code> in generic <code>Callable</code> aliases that have a <code>Concatenate</code> special form as their argument. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Extended the <code>Concatenate</code> backport for Python 3.8-3.10 to now accept <code>Ellipsis</code> as an argument. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix backport of <code>get_type_hints</code> to reflect Python 3.11+ behavior which does not add <code>Union[..., NoneType]</code> to annotations that have a <code>None</code> default value anymore. This fixes wrapping of <code>Annotated</code> in an unwanted <code>Optional</code> in such cases. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> <li>Fix error in subscription of <code>Unpack</code> aliases causing nested Unpacks to not be resolved correctly. Patch by <a href="https://github.com/Daraan">Daraan</a>.</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/typing_extensions/commit/45a8847aad979d2f1f7dff075ac52df5df7b7adb"><code>45a8847</code></a> Prepare release 4.13.1 (<a href="https://redirect.github.com/python/typing_extensions/issues/573">#573</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/f264e58146479d2d8456dd6e660d785dc07d6f26"><code>f264e58</code></a> Move CI to "ubuntu-latest" (round 2) (<a href="https://redirect.github.com/python/typing_extensions/issues/570">#570</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/5ce0e69b20992f8bf410849a31381cd656e3eb6b"><code>5ce0e69</code></a> Fix TypeError with evaluate_forward_ref on some 3.10 and 3.9 versions (<a href="https://redirect.github.com/python/typing_extensions/issues/558">#558</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/304f5cb17d709950ece3e9c84a76174bf7405b90"><code>304f5cb</code></a> Add SQLAlchemy to third-party daily tests (<a href="https://redirect.github.com/python/typing_extensions/issues/561">#561</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/ebe2b9405c493749429de6c82c8daddd1107c9e2"><code>ebe2b94</code></a> Fix duplicated keywords for typing._ConcatenateGenericAlias in 3.10.2 (<a href="https://redirect.github.com/python/typing_extensions/issues/557">#557</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/9f93d6fb752698504d80b1ed0c73b0a2a9d0cff6"><code>9f93d6f</code></a> Add intersphinx links for 3.13 typing features (<a href="https://redirect.github.com/python/typing_extensions/issues/550">#550</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/c8934015b7e2feb65dc461fef202ef69611d7d0e"><code>c893401</code></a> Prepare release 4.13.0 (<a href="https://redirect.github.com/python/typing_extensions/issues/555">#555</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/6239d868113cbf60c3db359775bb5f5c948c6978"><code>6239d86</code></a> Use latest Python docs as intersphinx base rather than 3.12 docs (<a href="https://redirect.github.com/python/typing_extensions/issues/549">#549</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/671a337a3231b90f7cd979300f9af9fa25cdd35f"><code>671a337</code></a> Fix 'Test and lint' workflow running on forks (<a href="https://redirect.github.com/python/typing_extensions/issues/551">#551</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/e77e8e2dbdab9d7edf3d88c9493c26f759a25978"><code>e77e8e2</code></a> Disable pyanalyze tests for now (<a href="https://redirect.github.com/python/typing_extensions/issues/554">#554</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/typing_extensions/compare/4.12.2...4.13.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.12.2&new-version=4.13.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index ffee818cdbb..efbdf3d5436 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,7 +40,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fd1bb4a78a8..70f4833e38f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -258,7 +258,7 @@ trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via # multidict # mypy diff --git a/requirements/cython.txt b/requirements/cython.txt index 5f2bbcb7c1f..31272de30f3 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.12 # via -r requirements/cython.in multidict==6.3.1 # via -r requirements/multidict.in -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index e028b4545cc..f9a3c08c6bd 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -249,7 +249,7 @@ trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via # multidict # mypy diff --git a/requirements/lint.txt b/requirements/lint.txt index c400e12cea0..fb4680014e1 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -95,7 +95,7 @@ tomli==2.2.1 # slotscheck trustme==1.2.1 # via -r requirements/lint.in -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via # mypy # pydantic diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 4ee354b5aa0..405d04c027b 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -6,5 +6,5 @@ # multidict==6.3.1 # via -r requirements/multidict.in -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 7504cfc629b..9dbcc807684 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -36,7 +36,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via multidict yarl==1.18.3 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 21af44f7b75..b3a8368d3cf 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -123,7 +123,7 @@ tomli==2.2.1 # pytest trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test.in -typing-extensions==4.12.2 +typing-extensions==4.13.1 # via # multidict # mypy From afacd1ba8ca4ee97ca12259efdfe7b4bfefc5e6b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 11:49:42 +0000 Subject: [PATCH 1320/1511] Bump multidict from 6.3.1 to 6.3.2 (#10695) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.3.1 to 6.3.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.3.2</h2> <h2>Bug fixes</h2> <ul> <li> <p>Resolved a memory leak by ensuring proper reference count decrementation -- by :user:<code>asvetlov</code> and :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1121">#1121</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.3.2</h1> <p><em>(2025-04-03)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Resolved a memory leak by ensuring proper reference count decrementation -- by :user:<code>asvetlov</code> and :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1121</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/0d667589cd02c8ce8251226f8f64fd86afd7ffa3"><code>0d66758</code></a> Release 6.3.2 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1124">#1124</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/64d2dc994c97c3d30680735302ec9504847580ef"><code>64d2dc9</code></a> Fix memory leak on objects passed to <code>init</code>, <code>update</code>, or <code>extend</code> (<a href="https://redirect.github.com/aio-libs/multidict/issues/1121">#1121</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/9a32040e331109291d453bac1903cd834d1e782d"><code>9a32040</code></a> Add leak tests for <a href="https://redirect.github.com/aio-libs/multidict/issues/1117">#1117</a> (<a href="https://redirect.github.com/aio-libs/multidict/issues/1120">#1120</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/multidict/compare/v6.3.1...v6.3.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.3.1&new-version=6.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index efbdf3d5436..3fe85928b13 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.3.1 +multidict==6.3.2 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 70f4833e38f..f96d0bf236e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -111,7 +111,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.3.1 +multidict==6.3.2 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index 31272de30f3..7a3b4737f54 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.12 # via -r requirements/cython.in -multidict==6.3.1 +multidict==6.3.2 # via -r requirements/multidict.in typing-extensions==4.13.1 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index f9a3c08c6bd..0888d764aa0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.3.1 +multidict==6.3.2 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 405d04c027b..3237ab6c359 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.3.1 +multidict==6.3.2 # via -r requirements/multidict.in typing-extensions==4.13.1 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 9dbcc807684..fddb7252229 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.5.0 # aiosignal idna==3.4 # via yarl -multidict==6.3.1 +multidict==6.3.2 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index b3a8368d3cf..28159c29844 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.3.1 +multidict==6.3.2 # via # -r requirements/runtime-deps.in # yarl From 89f17288da5aa614ae55a4a4c4544fc499a5cc0f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 11:55:20 +0000 Subject: [PATCH 1321/1511] Bump pydantic from 2.11.1 to 2.11.2 (#10694) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.11.1 to 2.11.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.11.2 2025-04-03</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Bump <code>pydantic-core</code> to v2.33.1 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11678">pydantic/pydantic#11678</a></li> <li>Make sure <code>__pydantic_private__</code> exists before setting private attributes by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11666">pydantic/pydantic#11666</a></li> <li>Do not override <code>FieldInfo._complete</code> when using field from parent class by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11668">pydantic/pydantic#11668</a></li> <li>Provide the available definitions when applying discriminated unions by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11670">pydantic/pydantic#11670</a></li> <li>Do not expand root type in the mypy plugin for variables by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11676">pydantic/pydantic#11676</a></li> <li>Mention the attribute name in model fields deprecation message by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11674">pydantic/pydantic#11674</a></li> <li>Properly validate parameterized mappings by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11658">pydantic/pydantic#11658</a></li> <li>Prepare release v2.11.2 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11684">pydantic/pydantic#11684</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.11.1...v2.11.2">https://github.com/pydantic/pydantic/compare/v2.11.1...v2.11.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.11.2 (2025-04-03)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.2">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Bump <code>pydantic-core</code> to v2.33.1 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11678">#11678</a></li> <li>Make sure <code>__pydantic_private__</code> exists before setting private attributes by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11666">#11666</a></li> <li>Do not override <code>FieldInfo._complete</code> when using field from parent class by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11668">#11668</a></li> <li>Provide the available definitions when applying discriminated unions by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11670">#11670</a></li> <li>Do not expand root type in the mypy plugin for variables by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11676">#11676</a></li> <li>Mention the attribute name in model fields deprecation message by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11674">#11674</a></li> <li>Properly validate parameterized mappings by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11658">#11658</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/bd1f8cf44a271e7313026faab318f3c37f23b3f4"><code>bd1f8cf</code></a> Prepare release v2.11.2 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11684">#11684</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/f70f2917913e53adfb82700f348bf8e3fae21357"><code>f70f291</code></a> Add integration documentation for llms.txt (<a href="https://redirect.github.com/pydantic/pydantic/issues/11677">#11677</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/34095c7523371e04b6b8fd9b55680845dd7dd279"><code>34095c7</code></a> Properly validate parameterized mappings (<a href="https://redirect.github.com/pydantic/pydantic/issues/11658">#11658</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/dfa6c6715df831c2eba3c4c96c231b66e0fc3d4a"><code>dfa6c67</code></a> Mention the attribute name in model fields deprecation message (<a href="https://redirect.github.com/pydantic/pydantic/issues/11674">#11674</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/cbf4202637a56fc5325ab4b18452e3f3ba910a84"><code>cbf4202</code></a> Do not expand root type in the mypy plugin for variables (<a href="https://redirect.github.com/pydantic/pydantic/issues/11676">#11676</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/8b0825add81e53daefba83e5a147cd13ca57a63b"><code>8b0825a</code></a> Provide the available definitions when applying discriminated unions (<a href="https://redirect.github.com/pydantic/pydantic/issues/11670">#11670</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/86c5703a2d4afe43f2adea9b2c5450b178ce5121"><code>86c5703</code></a> Do not override <code>FieldInfo._complete</code> when using field from parent class (<a href="https://redirect.github.com/pydantic/pydantic/issues/11">#11</a>...</li> <li><a href="https://github.com/pydantic/pydantic/commit/da841493831c86229ce5d5dd4dfbc482bfea3ddb"><code>da84149</code></a> Make sure <code>__pydantic_private__</code> exists before setting private attributes (<a href="https://redirect.github.com/pydantic/pydantic/issues/1">#1</a>...</li> <li><a href="https://github.com/pydantic/pydantic/commit/0cfe85396a9aa9f96113a7c3cba1feedb518a364"><code>0cfe853</code></a> Bump <code>pydantic-core</code> to v2.33.1 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11678">#11678</a>)</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.11.1...v2.11.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.11.1&new-version=2.11.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f96d0bf236e..df736e5c481 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,9 +148,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.1 +pydantic==2.11.2 # via python-on-whales -pydantic-core==2.33.0 +pydantic-core==2.33.1 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index 0888d764aa0..45ca59e2fa0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,9 +145,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.1 +pydantic==2.11.2 # via python-on-whales -pydantic-core==2.33.0 +pydantic-core==2.33.1 # via pydantic pygments==2.19.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index fb4680014e1..fd67ca91910 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -61,9 +61,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.1 +pydantic==2.11.2 # via python-on-whales -pydantic-core==2.33.0 +pydantic-core==2.33.1 # via pydantic pygments==2.19.1 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 28159c29844..c67ee7a1f7f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -81,9 +81,9 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.1 +pydantic==2.11.2 # via python-on-whales -pydantic-core==2.33.0 +pydantic-core==2.33.1 # via pydantic pygments==2.19.1 # via rich From eb1abe4982ef9088efbb06ab6343796f44560754 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 7 Apr 2025 03:19:52 +0000 Subject: [PATCH 1322/1511] [PR #10698/25693469 backport][3.12] Bump yarl to 1.19.0 (#10699) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 3fe85928b13..a2e94325437 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.13.1 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.18.3 +yarl==1.19.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index df736e5c481..a18b548d9ea 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -285,7 +285,7 @@ wait-for-it==2.3.0 # via -r requirements/test.in wheel==0.45.1 # via pip-tools -yarl==1.18.3 +yarl==1.19.0 # via -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/dev.txt b/requirements/dev.txt index 45ca59e2fa0..f8df5ff8903 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -276,7 +276,7 @@ wait-for-it==2.3.0 # via -r requirements/test.in wheel==0.45.1 # via pip-tools -yarl==1.18.3 +yarl==1.19.0 # via -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index fddb7252229..69190e8f6b8 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.13.1 # via multidict -yarl==1.18.3 +yarl==1.19.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index c67ee7a1f7f..e4bcbbae36b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -138,5 +138,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.3.0 # via -r requirements/test.in -yarl==1.18.3 +yarl==1.19.0 # via -r requirements/runtime-deps.in From a2569f9366ea62016f90733fbc7a5e1cddbb8cce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Apr 2025 11:30:43 +0000 Subject: [PATCH 1323/1511] Bump pytest-cov from 6.1.0 to 6.1.1 (#10703) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 6.1.0 to 6.1.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst">pytest-cov's changelog</a>.</em></p> <blockquote> <h2>6.1.1 (2025-04-05)</h2> <ul> <li>Fixed breakage that occurs when <code>--cov-context</code> and the <code>no_cover</code> marker are used together.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/9463242e3a7bc18a56b8f18c01b4dfb50087e5ed"><code>9463242</code></a> Bump version: 6.1.0 → 6.1.1</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/7f2781b47fc9bd4a8e94ff86b4f69f5959c3d907"><code>7f2781b</code></a> Update changelog.</li> <li><a href="https://github.com/pytest-dev/pytest-cov/commit/a59548f3adcb757ea8afcb40d8d849af49f6e925"><code>a59548f</code></a> Allow the context plugin to check if the controller is running or not. Fixes ...</li> <li>See full diff in <a href="https://github.com/pytest-dev/pytest-cov/compare/v6.1.0...v6.1.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=6.1.0&new-version=6.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a18b548d9ea..ae114cd4abd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -178,7 +178,7 @@ pytest-codspeed==3.2.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==6.1.0 +pytest-cov==6.1.1 # via -r requirements/test.in pytest-mock==3.14.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index f8df5ff8903..265a6cb1f98 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -173,7 +173,7 @@ pytest-codspeed==3.2.0 # via # -r requirements/lint.in # -r requirements/test.in -pytest-cov==6.1.0 +pytest-cov==6.1.1 # via -r requirements/test.in pytest-mock==3.14.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index e4bcbbae36b..4114bcc2b7c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -96,7 +96,7 @@ pytest==8.3.5 # pytest-xdist pytest-codspeed==3.2.0 # via -r requirements/test.in -pytest-cov==6.1.0 +pytest-cov==6.1.1 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in From da9e8e8bbf59388dbbc5b4c9b4f5f933bb4fe540 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 11:28:13 +0000 Subject: [PATCH 1324/1511] Bump pydantic from 2.11.2 to 2.11.3 (#10710) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.11.2 to 2.11.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.11.3 2025-04-08</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <h3>Packaging</h3> <ul> <li>Update V1 copy to v1.10.21 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11706">pydantic/pydantic#11706</a></li> </ul> <h3>Fixes</h3> <ul> <li>Preserve field description when rebuilding model fields by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11698">pydantic/pydantic#11698</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.11.2...v2.11.3">https://github.com/pydantic/pydantic/compare/v2.11.2...v2.11.3</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.11.3 (2025-04-08)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.3">GitHub release</a></p> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Update V1 copy to v1.10.21 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11706">#11706</a></li> </ul> <h4>Fixes</h4> <ul> <li>Preserve field description when rebuilding model fields by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11698">#11698</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/876bf76f34f9ab313923a3428a4d4aaf00144f43"><code>876bf76</code></a> Prepare release v2.11.3 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11716">#11716</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/4a8c9297052534c33411f0268527ab9bcb55bc1b"><code>4a8c929</code></a> Fix code annotation in <code>@computed_field</code> documentation (<a href="https://redirect.github.com/pydantic/pydantic/issues/11693">#11693</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/a8b8b5f6f34f27ef721d4d1f85a3747847762db9"><code>a8b8b5f</code></a> Fix source locations in documentation for external packages (<a href="https://redirect.github.com/pydantic/pydantic/issues/11710">#11710</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/f14b7da9cf7f5f802ca5b8076ba7d9fcde1352cf"><code>f14b7da</code></a> Preserve field description when rebuilding model fields (<a href="https://redirect.github.com/pydantic/pydantic/issues/11698">#11698</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/04fd6395c7165232785992a6ad0e817e9db6e590"><code>04fd639</code></a> Update V1 copy to v1.10.21 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11706">#11706</a>)</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.11.2...v2.11.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.11.2&new-version=2.11.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ae114cd4abd..ea91f5c1b3d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.2 +pydantic==2.11.3 # via python-on-whales pydantic-core==2.33.1 # via pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index 265a6cb1f98..7278bf42682 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,7 +145,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.2 +pydantic==2.11.3 # via python-on-whales pydantic-core==2.33.1 # via pydantic diff --git a/requirements/lint.txt b/requirements/lint.txt index fd67ca91910..303d2756904 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -61,7 +61,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.2 +pydantic==2.11.3 # via python-on-whales pydantic-core==2.33.1 # via pydantic diff --git a/requirements/test.txt b/requirements/test.txt index 4114bcc2b7c..8eb141f314c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -81,7 +81,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.2 +pydantic==2.11.3 # via python-on-whales pydantic-core==2.33.1 # via pydantic From 38373d6be8087001b4f6682b0568d2a03e149d6b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 01:00:11 +0000 Subject: [PATCH 1325/1511] Bump multidict from 6.3.2 to 6.4.2 (#10712) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.3.2 to 6.4.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.4.2</h2> <h2>Bug fixes</h2> <ul> <li> <p>Fixed a segmentation fault when creating subclassed :py:class:<code>~multidict.MultiDict</code> objects on Python < 3.11 -- by :user:<code>bdraco</code>.</p> <p>The problem first appeared in 6.4.0</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1141">#1141</a>.</p> </li> </ul> <hr /> <h2>6.4.1</h2> <p>No change release of 6.4.0 since the attestations failed to upload to GitHub</p> <hr /> <h2>6.4.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>Fixed a memory leak creating new :class:<code>~multidict.istr</code> objects -- by :user:<code>bdraco</code>.</p> <p>The leak was introduced in 6.3.0</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1133">#1133</a>.</p> </li> <li> <p>Fixed reference counting when calling :py:meth:<code>multidict.MultiDict.update</code> -- by :user:<code>bdraco</code>.</p> <p>The leak was introduced in 4.4.0</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1135">#1135</a>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Switched C Extension to use heap types and the module state.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1125">#1125</a>.</p> </li> <li> <p>Started building armv7l wheels -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em></p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.4.2</h1> <p><em>(2025-04-09)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Fixed a segmentation fault when creating subclassed :py:class:<code>~multidict.MultiDict</code> objects on Python < 3.11 -- by :user:<code>bdraco</code>.</p> <p>The problem first appeared in 6.4.0</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1141</code>.</p> </li> </ul> <hr /> <h1>6.4.1</h1> <p><em>(2025-04-09)</em></p> <p>No significant changes.</p> <hr /> <h1>6.4.0</h1> <p><em>(2025-04-09)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Fixed a memory leak creating new :class:<code>~multidict.istr</code> objects -- by :user:<code>bdraco</code>.</p> <p>The leak was introduced in 6.3.0</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1133</code>.</p> </li> <li> <p>Fixed reference counting when calling :py:meth:<code>multidict.MultiDict.update</code> -- by :user:<code>bdraco</code>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/fe79f9747450f7e52451c29cff8fb93d91ea92ee"><code>fe79f97</code></a> Release 6.4.2 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1142">#1142</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/21a6e9302a793c6e5df3c3372b7a1867caf23657"><code>21a6e93</code></a> Fix segfault in PyType_GetModuleByDef for Python < 3.11 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1141">#1141</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/2c0d4584ceb61c891ecf2409ea5c5c80672a7850"><code>2c0d458</code></a> Release 6.4.1 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1139">#1139</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/caf98ba99d850501a196151b04141f326d05bded"><code>caf98ba</code></a> Increase deploy timeout to 15 minutes (<a href="https://redirect.github.com/aio-libs/multidict/issues/1138">#1138</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/c707925ef77386ee6bbf2e57dae600e1a92a1b8f"><code>c707925</code></a> Release 6.4.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1137">#1137</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/78761cf0efdbd69d1caa72e37fc997f67aef0809"><code>78761cf</code></a> Fix memory leak creating new istr objects (<a href="https://redirect.github.com/aio-libs/multidict/issues/1133">#1133</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/e1dee30d95c222fdc3ac768926256f6e1a620fbf"><code>e1dee30</code></a> Start building armv7l wheels (<a href="https://redirect.github.com/aio-libs/multidict/issues/1127">#1127</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/f7febaab5cbd586796d375db588334e0e80a5d48"><code>f7febaa</code></a> Fix ref leak when calling update (<a href="https://redirect.github.com/aio-libs/multidict/issues/1135">#1135</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/9850454cd1a8e569cf44eab98e50a66f107e6096"><code>9850454</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/multidict/issues/1129">#1129</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/910a498a359d28ddffd30a041230e754317190cc"><code>910a498</code></a> Refactor test_leaks to drop dead code (<a href="https://redirect.github.com/aio-libs/multidict/issues/1136">#1136</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/multidict/compare/v6.3.2...v6.4.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.3.2&new-version=6.4.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index a2e94325437..1c553cd3875 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.3.2 +multidict==6.4.2 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ea91f5c1b3d..acfa6facf51 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -111,7 +111,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.3.2 +multidict==6.4.2 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index 7a3b4737f54..e472d1de6dc 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.12 # via -r requirements/cython.in -multidict==6.3.2 +multidict==6.4.2 # via -r requirements/multidict.in typing-extensions==4.13.1 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index 7278bf42682..ca4ad7751a1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.3.2 +multidict==6.4.2 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 3237ab6c359..70a4468156f 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.3.2 +multidict==6.4.2 # via -r requirements/multidict.in typing-extensions==4.13.1 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 69190e8f6b8..227515f3f7a 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.5.0 # aiosignal idna==3.4 # via yarl -multidict==6.3.2 +multidict==6.4.2 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 8eb141f314c..13cb2904eb1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.3.2 +multidict==6.4.2 # via # -r requirements/runtime-deps.in # yarl From 87ada6b878c2f05acf85d9940ac4c1d7c91c3d88 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 23:26:25 +0000 Subject: [PATCH 1326/1511] Bump urllib3 from 2.3.0 to 2.4.0 (#10715) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.3.0 to 2.4.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/releases">urllib3's releases</a>.</em></p> <blockquote> <h2>2.4.0</h2> <h2>🚀 urllib3 is fundraising for HTTP/2 support</h2> <p><a href="https://sethmlarson.dev/urllib3-is-fundraising-for-http2-support">urllib3 is raising ~$40,000 USD</a> to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects <a href="https://opencollective.com/urllib3">please consider contributing financially</a> to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.</p> <p>Thank you for your support.</p> <h1>Features</h1> <ul> <li>Applied PEP 639 by specifying the license fields in pyproject.toml. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3522">#3522</a>)</li> <li>Updated exceptions to save and restore more properties during the pickle/serialization process. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3567">#3567</a>)</li> <li>Added <code>verify_flags</code> option to <code>create_urllib3_context</code> with a default of <code>VERIFY_X509_PARTIAL_CHAIN</code> and <code>VERIFY_X509_STRICT</code> for Python 3.13+. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3571">#3571</a>)</li> </ul> <h1>Bugfixes</h1> <ul> <li>Fixed a bug with partial reads of streaming data in Emscripten. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3555">#3555</a>)</li> </ul> <h1>Misc</h1> <ul> <li>Switched to uv for installing development dependecies. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3550">#3550</a>)</li> <li>Removed the <code>multiple.intoto.jsonl</code> asset from GitHub releases. Attestation of release files since v2.3.0 can be found on PyPI. (<a href="https://redirect.github.com/urllib3/urllib3/issues/3566">#3566</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/urllib3/urllib3/blob/main/CHANGES.rst">urllib3's changelog</a>.</em></p> <blockquote> <h1>2.4.0 (2025-04-10)</h1> <h2>Features</h2> <ul> <li>Applied PEP 639 by specifying the license fields in pyproject.toml. (<code>[#3522](https://github.com/urllib3/urllib3/issues/3522) <https://github.com/urllib3/urllib3/issues/3522></code>__)</li> <li>Updated exceptions to save and restore more properties during the pickle/serialization process. (<code>[#3567](https://github.com/urllib3/urllib3/issues/3567) <https://github.com/urllib3/urllib3/issues/3567></code>__)</li> <li>Added <code>verify_flags</code> option to <code>create_urllib3_context</code> with a default of <code>VERIFY_X509_PARTIAL_CHAIN</code> and <code>VERIFY_X509_STRICT</code> for Python 3.13+. (<code>[#3571](https://github.com/urllib3/urllib3/issues/3571) <https://github.com/urllib3/urllib3/issues/3571></code>__)</li> </ul> <h2>Bugfixes</h2> <ul> <li>Fixed a bug with partial reads of streaming data in Emscripten. (<code>[#3555](https://github.com/urllib3/urllib3/issues/3555) <https://github.com/urllib3/urllib3/issues/3555></code>__)</li> </ul> <h2>Misc</h2> <ul> <li>Switched to uv for installing development dependecies. (<code>[#3550](https://github.com/urllib3/urllib3/issues/3550) <https://github.com/urllib3/urllib3/issues/3550></code>__)</li> <li>Removed the <code>multiple.intoto.jsonl</code> asset from GitHub releases. Attestation of release files since v2.3.0 can be found on PyPI. (<code>[#3566](https://github.com/urllib3/urllib3/issues/3566) <https://github.com/urllib3/urllib3/issues/3566></code>__)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/urllib3/urllib3/commit/a5ff7ac3bbb8659e2ec3ed41dd43889f06a7d7bc"><code>a5ff7ac</code></a> Release 2.4.0</li> <li><a href="https://github.com/urllib3/urllib3/commit/a135db29f72f828b0ef7314b856d19696a6f48ba"><code>a135db2</code></a> Upgrade memray and coverage to fix macOS tests (<a href="https://redirect.github.com/urllib3/urllib3/issues/3589">#3589</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/8f40e71612505a9985b0a58ad793cd84ec97614a"><code>8f40e71</code></a> Upgrade the publishing action to get correct licensing info on PyPI (<a href="https://redirect.github.com/urllib3/urllib3/issues/3585">#3585</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/3ff4e49ddf889554cf295b4a2e1189d066b60c71"><code>3ff4e49</code></a> Add a link to the 2024 annual report (<a href="https://redirect.github.com/urllib3/urllib3/issues/3586">#3586</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/75709c1dbd6770618f061fd0b8a6950c7741f17b"><code>75709c1</code></a> Set verify flags in <code>create_urllib3_context</code> (<a href="https://redirect.github.com/urllib3/urllib3/issues/3577">#3577</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/5c8f82a2f1c1b7a8360f0c84b5a88f25df070811"><code>5c8f82a</code></a> Bump astral-sh/setup-uv from 5.3.0 to 5.4.1 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3580">#3580</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/42e90d894b30ef8b897708eb1cdfa24e83cf5067"><code>42e90d8</code></a> Bump actions/setup-python from 5.4.0 to 5.5.0 (<a href="https://redirect.github.com/urllib3/urllib3/issues/3579">#3579</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/3e8f2db735dcaced6a3b777aa1966f40c018af7c"><code>3e8f2db</code></a> Stop using Ubuntu 20.04 and 22.04 in CI (<a href="https://redirect.github.com/urllib3/urllib3/issues/3570">#3570</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/e29db82a6df5f9a9acbb6997899f93bda79bb61e"><code>e29db82</code></a> Update exceptions to have more of their attributes pickled (<a href="https://redirect.github.com/urllib3/urllib3/issues/3572">#3572</a>)</li> <li><a href="https://github.com/urllib3/urllib3/commit/f8a0c4360ad67aefd79317279ee90c72d5d18697"><code>f8a0c43</code></a> Add PyPy 3.11 to CI</li> <li>Additional commits viewable in <a href="https://github.com/urllib3/urllib3/compare/2.3.0...2.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=2.3.0&new-version=2.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index acfa6facf51..7f0243cc1d0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -271,7 +271,7 @@ typing-inspection==0.4.0 # via pydantic uritemplate==4.1.1 # via gidgethub -urllib3==2.3.0 +urllib3==2.4.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index ca4ad7751a1..d501d2dfcbd 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -262,7 +262,7 @@ typing-inspection==0.4.0 # via pydantic uritemplate==4.1.1 # via gidgethub -urllib3==2.3.0 +urllib3==2.4.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index dfd7f09765d..fe5d7e5708d 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -72,7 +72,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.3.0 +urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/doc.txt b/requirements/doc.txt index 15356c89a9e..086c945725e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -65,7 +65,7 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.3.0 +urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: From abb3e87f3430f3d4e95a33f62f79882fb065325b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Apr 2025 23:51:56 +0000 Subject: [PATCH 1327/1511] Bump multidict from 6.4.2 to 6.4.3 (#10716) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.4.2 to 6.4.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.4.3</h2> <h2>Bug fixes</h2> <ul> <li> <p>Fixed building the library in debug mode.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1144">#1144</a>.</p> </li> <li> <p>Fixed custom <code>PyType_GetModuleByDef()</code> when non-heap type object was passed.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1147">#1147</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Added the ability to build in debug mode by setting :envvar:<code>MULTIDICT_DEBUG_BUILD</code> in the environment -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1145">#1145</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.4.3</h1> <p><em>(2025-04-10)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Fixed building the library in debug mode.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1144</code>.</p> </li> <li> <p>Fixed custom <code>PyType_GetModuleByDef()</code> when non-heap type object was passed.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1147</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Added the ability to build in debug mode by setting :envvar:<code>MULTIDICT_DEBUG_BUILD</code> in the environment -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1145</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/5f64e68142df6b4b3075b9df6d7b80b50e4a26eb"><code>5f64e68</code></a> Release 6.4.3 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1148">#1148</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/28b7b4b3361b828ada967a5c045abb289c62ee1a"><code>28b7b4b</code></a> Run tests in debug mode as well (<a href="https://redirect.github.com/aio-libs/multidict/issues/1146">#1146</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/cf690d5e377b16eef63ec96f81a1b536152928d3"><code>cf690d5</code></a> Fix custom PyType_GetModuleByDef when non-heap type object was passed (<a href="https://redirect.github.com/aio-libs/multidict/issues/1147">#1147</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/57c09c058b04558620a864d78ded2a591cafc66e"><code>57c09c0</code></a> Fix the library building in debug mode (<a href="https://redirect.github.com/aio-libs/multidict/issues/1144">#1144</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/multidict/compare/v6.4.2...v6.4.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.4.2&new-version=6.4.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1c553cd3875..08beaa66522 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.4.2 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7f0243cc1d0..e8a2d85b2bb 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -111,7 +111,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.4.2 +multidict==6.4.3 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index e472d1de6dc..d5661f8fff3 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.12 # via -r requirements/cython.in -multidict==6.4.2 +multidict==6.4.3 # via -r requirements/multidict.in typing-extensions==4.13.1 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index d501d2dfcbd..5aa7fd7c174 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.4.2 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 70a4468156f..64a6ea16b87 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.4.2 +multidict==6.4.3 # via -r requirements/multidict.in typing-extensions==4.13.1 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 227515f3f7a..3fcc493e191 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.5.0 # aiosignal idna==3.4 # via yarl -multidict==6.4.2 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 13cb2904eb1..3b16120500c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.2 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl From 5268479e62179542539d968222081bef1569956c Mon Sep 17 00:00:00 2001 From: Tim Menninger <tmenninger22@gmail.com> Date: Mon, 14 Apr 2025 14:06:54 -0700 Subject: [PATCH 1328/1511] [PR #10700/ceeca6a backport][3.12] Add support for switching the zlib implementation (#10723) --- CHANGES/9798.feature.rst | 5 + aiohttp/__init__.py | 2 + aiohttp/_websocket/reader_py.py | 18 +- aiohttp/_websocket/writer.py | 9 +- aiohttp/abc.py | 3 +- aiohttp/compression_utils.py | 147 +++++++++++-- aiohttp/http_writer.py | 3 +- aiohttp/multipart.py | 3 +- aiohttp/web_response.py | 5 +- docs/client_reference.rst | 24 +++ docs/conf.py | 2 + docs/spelling_wordlist.txt | 1 + docs/web_reference.rst | 7 +- requirements/dev.txt | 4 + requirements/lint.in | 2 + requirements/lint.txt | 4 + requirements/test.in | 2 + requirements/test.txt | 4 + tests/conftest.py | 16 ++ tests/test_client_functional.py | 5 + tests/test_client_request.py | 7 +- tests/test_client_ws_functional.py | 2 + tests/test_compression_utils.py | 18 +- tests/test_http_writer.py | 291 +++++++++++++++++++++++++- tests/test_multipart.py | 5 +- tests/test_web_functional.py | 40 ++-- tests/test_web_response.py | 32 ++- tests/test_web_sendfile_functional.py | 6 +- tests/test_websocket_parser.py | 15 +- tests/test_websocket_writer.py | 1 + 30 files changed, 606 insertions(+), 77 deletions(-) create mode 100644 CHANGES/9798.feature.rst diff --git a/CHANGES/9798.feature.rst b/CHANGES/9798.feature.rst new file mode 100644 index 00000000000..c1584b04491 --- /dev/null +++ b/CHANGES/9798.feature.rst @@ -0,0 +1,5 @@ +Allow user setting zlib compression backend -- by :user:`TimMenninger` + +This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the +zlib compression module of their choice. Default behavior continues to use +the builtin ``zlib`` library. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 66645143fc9..6321e713ed4 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -47,6 +47,7 @@ WSServerHandshakeError, request, ) +from .compression_utils import set_zlib_backend from .connector import ( AddrInfoType as AddrInfoType, SocketFactoryType as SocketFactoryType, @@ -183,6 +184,7 @@ "BasicAuth", "ChainMapProxy", "ETag", + "set_zlib_backend", # http "HttpVersion", "HttpVersion10", diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 92ad47a52f0..19579bd39a8 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -238,15 +238,23 @@ def _feed_data(self, data: bytes) -> None: self._decompressobj = ZLibDecompressor( suppress_deflate_header=True ) + # XXX: It's possible that the zlib backend (isal is known to + # do this, maybe others too?) will return max_length bytes, + # but internally buffer more data such that the payload is + # >max_length, so we return one extra byte and if we're able + # to do that, then the message is too big. payload_merged = self._decompressobj.decompress_sync( - assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size + assembled_payload + WS_DEFLATE_TRAILING, + ( + self._max_msg_size + 1 + if self._max_msg_size + else self._max_msg_size + ), ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) + if self._max_msg_size and len(payload_merged) > self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - f"Decompressed message size {self._max_msg_size + left}" - f" exceeds limit {self._max_msg_size}", + f"Decompressed message exceeds size limit {self._max_msg_size}", ) elif type(assembled_payload) is bytes: payload_merged = assembled_payload diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index fc2cf32b934..19163f9afdf 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -2,13 +2,12 @@ import asyncio import random -import zlib from functools import partial from typing import Any, Final, Optional, Union from ..base_protocol import BaseProtocol from ..client_exceptions import ClientConnectionResetError -from ..compression_utils import ZLibCompressor +from ..compression_utils import ZLibBackend, ZLibCompressor from .helpers import ( MASK_LEN, MSG_SIZE, @@ -95,7 +94,9 @@ async def send_frame( message = ( await compressobj.compress(message) + compressobj.flush( - zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ZLibBackend.Z_FULL_FLUSH + if self.notakeover + else ZLibBackend.Z_SYNC_FLUSH ) ).removesuffix(WS_DEFLATE_TRAILING) # Its critical that we do not return control to the event @@ -160,7 +161,7 @@ async def send_frame( def _make_compress_obj(self, compress: int) -> ZLibCompressor: return ZLibCompressor( - level=zlib.Z_BEST_SPEED, + level=ZLibBackend.Z_BEST_SPEED, wbits=-compress, max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, ) diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 5794a9108b0..3c4f8c61b00 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -1,7 +1,6 @@ import asyncio import logging import socket -import zlib from abc import ABC, abstractmethod from collections.abc import Sized from http.cookies import BaseCookie, Morsel @@ -219,7 +218,7 @@ async def drain(self) -> None: @abstractmethod def enable_compression( - self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + self, encoding: str = "deflate", strategy: Optional[int] = None ) -> None: """Enable HTTP body compression""" diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index ebe8857f487..f08c3d9cdff 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -1,7 +1,15 @@ import asyncio +import sys import zlib from concurrent.futures import Executor -from typing import Optional, cast +from typing import Any, Final, Optional, Protocol, TypedDict, cast + +if sys.version_info >= (3, 12): + from collections.abc import Buffer +else: + from typing import Union + + Buffer = Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] try: try: @@ -16,14 +24,113 @@ MAX_SYNC_CHUNK_SIZE = 1024 +class ZLibCompressObjProtocol(Protocol): + def compress(self, data: Buffer) -> bytes: ... + def flush(self, mode: int = ..., /) -> bytes: ... + + +class ZLibDecompressObjProtocol(Protocol): + def decompress(self, data: Buffer, max_length: int = ...) -> bytes: ... + def flush(self, length: int = ..., /) -> bytes: ... + + @property + def eof(self) -> bool: ... + + +class ZLibBackendProtocol(Protocol): + MAX_WBITS: int + Z_FULL_FLUSH: int + Z_SYNC_FLUSH: int + Z_BEST_SPEED: int + Z_FINISH: int + + def compressobj( + self, + level: int = ..., + method: int = ..., + wbits: int = ..., + memLevel: int = ..., + strategy: int = ..., + zdict: Optional[Buffer] = ..., + ) -> ZLibCompressObjProtocol: ... + def decompressobj( + self, wbits: int = ..., zdict: Buffer = ... + ) -> ZLibDecompressObjProtocol: ... + + def compress( + self, data: Buffer, /, level: int = ..., wbits: int = ... + ) -> bytes: ... + def decompress( + self, data: Buffer, /, wbits: int = ..., bufsize: int = ... + ) -> bytes: ... + + +class CompressObjArgs(TypedDict, total=False): + wbits: int + strategy: int + level: int + + +class ZLibBackendWrapper: + def __init__(self, _zlib_backend: ZLibBackendProtocol): + self._zlib_backend: ZLibBackendProtocol = _zlib_backend + + @property + def name(self) -> str: + return getattr(self._zlib_backend, "__name__", "undefined") + + @property + def MAX_WBITS(self) -> int: + return self._zlib_backend.MAX_WBITS + + @property + def Z_FULL_FLUSH(self) -> int: + return self._zlib_backend.Z_FULL_FLUSH + + @property + def Z_SYNC_FLUSH(self) -> int: + return self._zlib_backend.Z_SYNC_FLUSH + + @property + def Z_BEST_SPEED(self) -> int: + return self._zlib_backend.Z_BEST_SPEED + + @property + def Z_FINISH(self) -> int: + return self._zlib_backend.Z_FINISH + + def compressobj(self, *args: Any, **kwargs: Any) -> ZLibCompressObjProtocol: + return self._zlib_backend.compressobj(*args, **kwargs) + + def decompressobj(self, *args: Any, **kwargs: Any) -> ZLibDecompressObjProtocol: + return self._zlib_backend.decompressobj(*args, **kwargs) + + def compress(self, data: Buffer, *args: Any, **kwargs: Any) -> bytes: + return self._zlib_backend.compress(data, *args, **kwargs) + + def decompress(self, data: Buffer, *args: Any, **kwargs: Any) -> bytes: + return self._zlib_backend.decompress(data, *args, **kwargs) + + # Everything not explicitly listed in the Protocol we just pass through + def __getattr__(self, attrname: str) -> Any: + return getattr(self._zlib_backend, attrname) + + +ZLibBackend: ZLibBackendWrapper = ZLibBackendWrapper(zlib) + + +def set_zlib_backend(new_zlib_backend: ZLibBackendProtocol) -> None: + ZLibBackend._zlib_backend = new_zlib_backend + + def encoding_to_mode( encoding: Optional[str] = None, suppress_deflate_header: bool = False, ) -> int: if encoding == "gzip": - return 16 + zlib.MAX_WBITS + return 16 + ZLibBackend.MAX_WBITS - return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS + return -ZLibBackend.MAX_WBITS if suppress_deflate_header else ZLibBackend.MAX_WBITS class ZlibBaseHandler: @@ -45,7 +152,7 @@ def __init__( suppress_deflate_header: bool = False, level: Optional[int] = None, wbits: Optional[int] = None, - strategy: int = zlib.Z_DEFAULT_STRATEGY, + strategy: Optional[int] = None, executor: Optional[Executor] = None, max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ): @@ -58,12 +165,15 @@ def __init__( executor=executor, max_sync_chunk_size=max_sync_chunk_size, ) - if level is None: - self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy) - else: - self._compressor = zlib.compressobj( - wbits=self._mode, strategy=strategy, level=level - ) + self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) + + kwargs: CompressObjArgs = {} + kwargs["wbits"] = self._mode + if strategy is not None: + kwargs["strategy"] = strategy + if level is not None: + kwargs["level"] = level + self._compressor = self._zlib_backend.compressobj(**kwargs) self._compress_lock = asyncio.Lock() def compress_sync(self, data: bytes) -> bytes: @@ -92,8 +202,10 @@ async def compress(self, data: bytes) -> bytes: ) return self.compress_sync(data) - def flush(self, mode: int = zlib.Z_FINISH) -> bytes: - return self._compressor.flush(mode) + def flush(self, mode: Optional[int] = None) -> bytes: + return self._compressor.flush( + mode if mode is not None else self._zlib_backend.Z_FINISH + ) class ZLibDecompressor(ZlibBaseHandler): @@ -109,7 +221,8 @@ def __init__( executor=executor, max_sync_chunk_size=max_sync_chunk_size, ) - self._decompressor = zlib.decompressobj(wbits=self._mode) + self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) + self._decompressor = self._zlib_backend.decompressobj(wbits=self._mode) def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: return self._decompressor.decompress(data, max_length) @@ -141,14 +254,6 @@ def flush(self, length: int = 0) -> bytes: def eof(self) -> bool: return self._decompressor.eof - @property - def unconsumed_tail(self) -> bytes: - return self._decompressor.unconsumed_tail - - @property - def unused_data(self) -> bytes: - return self._decompressor.unused_data - class BrotliDecompressor: # Supports both 'brotlipy' and 'Brotli' packages diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index e031a97708d..3e05628238d 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -2,7 +2,6 @@ import asyncio import sys -import zlib from typing import ( # noqa Any, Awaitable, @@ -80,7 +79,7 @@ def enable_chunking(self) -> None: self.chunked = True def enable_compression( - self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + self, encoding: str = "deflate", strategy: Optional[int] = None ) -> None: self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index bd4d8ae1ddf..459cc321a1d 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -5,7 +5,6 @@ import sys import uuid import warnings -import zlib from collections import deque from types import TracebackType from typing import ( @@ -1028,7 +1027,7 @@ def enable_encoding(self, encoding: str) -> None: self._encoding = "quoted-printable" def enable_compression( - self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + self, encoding: str = "deflate", strategy: Optional[int] = None ) -> None: self._compress = ZLibCompressor( encoding=encoding, diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 151fbea3473..8a940ef43bf 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -6,7 +6,6 @@ import math import time import warnings -import zlib from concurrent.futures import Executor from http import HTTPStatus from http.cookies import SimpleCookie @@ -82,7 +81,7 @@ class StreamResponse(BaseClass, HeadersMixin): _keep_alive: Optional[bool] = None _chunked: bool = False _compression: bool = False - _compression_strategy: int = zlib.Z_DEFAULT_STRATEGY + _compression_strategy: Optional[int] = None _compression_force: Optional[ContentCoding] = None _req: Optional["BaseRequest"] = None _payload_writer: Optional[AbstractStreamWriter] = None @@ -192,7 +191,7 @@ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: def enable_compression( self, force: Optional[Union[bool, ContentCoding]] = None, - strategy: int = zlib.Z_DEFAULT_STRATEGY, + strategy: Optional[int] = None, ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index a99db06764b..8d01d726e1c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2163,6 +2163,30 @@ Utilities .. versionadded:: 3.0 +.. function:: set_zlib_backend(lib) + + Sets the compression backend for zlib-based operations. + + This function allows you to override the default zlib backend + used internally by passing a module that implements the standard + compression interface. + + The module should implement at minimum the exact interface offered by the + latest version of zlib. + + :param types.ModuleType lib: A module that implements the zlib-compatible compression API. + + Example usage:: + + import zlib_ng.zlib_ng as zng + import aiohttp + + aiohttp.set_zlib_backend(zng) + + .. note:: aiohttp has been tested internally with :mod:`zlib`, :mod:`zlib_ng.zlib_ng`, and :mod:`isal.isal_zlib`. + + .. versionadded:: 3.12 + FormData ^^^^^^^^ diff --git a/docs/conf.py b/docs/conf.py index 595f02efb89..84dadfc8442 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -85,6 +85,8 @@ "aiohttpdemos": ("https://aiohttp-demos.readthedocs.io/en/latest/", None), "aiojobs": ("https://aiojobs.readthedocs.io/en/stable/", None), "aiohappyeyeballs": ("https://aiohappyeyeballs.readthedocs.io/en/latest/", None), + "isal": ("https://python-isal.readthedocs.io/en/stable/", None), + "zlib_ng": ("https://python-zlib-ng.readthedocs.io/en/stable/", None), } # Add any paths that contain templates here, relative to this directory. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 59ea99c40bb..f2321adb708 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -375,3 +375,4 @@ wss www xxx yarl +zlib diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 62edd4c24aa..f2954b06b51 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -669,7 +669,7 @@ and :ref:`aiohttp-web-signals` handlers:: .. seealso:: :meth:`enable_compression` - .. method:: enable_compression(force=None, strategy=zlib.Z_DEFAULT_STRATEGY) + .. method:: enable_compression(force=None, strategy=None) Enable compression. @@ -680,7 +680,10 @@ and :ref:`aiohttp-web-signals` handlers:: :class:`ContentCoding`. *strategy* accepts a :mod:`zlib` compression strategy. - See :func:`zlib.compressobj` for possible values. + See :func:`zlib.compressobj` for possible values, or refer to the + docs for the zlib of your using, should you use :func:`aiohttp.set_zlib_backend` + to change zlib backend. If ``None``, the default value adopted by + your zlib backend will be used where applicable. .. seealso:: :attr:`compression` diff --git a/requirements/dev.txt b/requirements/dev.txt index 5aa7fd7c174..90d5c88acb5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -99,6 +99,8 @@ incremental==24.7.2 # via towncrier iniconfig==2.1.0 # via pytest +isal==1.7.2 + # via -r requirements/test.in jinja2==3.1.6 # via # sphinx @@ -278,6 +280,8 @@ wheel==0.45.1 # via pip-tools yarl==1.19.0 # via -r requirements/runtime-deps.in +zlib_ng==0.5.1 + # via -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 diff --git a/requirements/lint.in b/requirements/lint.in index 4759dadc6a9..fe996d00176 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,6 +1,7 @@ aiodns blockbuster freezegun +isal mypy; implementation_name == "cpython" pre-commit pytest @@ -11,3 +12,4 @@ slotscheck trustme uvloop; platform_system != "Windows" valkey +zlib_ng diff --git a/requirements/lint.txt b/requirements/lint.txt index 303d2756904..b53cccca9f6 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -39,6 +39,8 @@ idna==3.7 # via trustme iniconfig==2.1.0 # via pytest +isal==1.7.2 + # via -r requirements/lint.in markdown-it-py==3.0.0 # via rich mdurl==0.1.2 @@ -111,3 +113,5 @@ valkey==6.1.0 # via -r requirements/lint.in virtualenv==20.30.0 # via pre-commit +zlib-ng==0.5.1 + # via -r requirements/lint.in diff --git a/requirements/test.in b/requirements/test.in index c6547bee5e5..91b5e115952 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -3,6 +3,7 @@ blockbuster coverage freezegun +isal mypy; implementation_name == "cpython" proxy.py >= 2.4.4rc5 pytest @@ -15,3 +16,4 @@ re-assert setuptools-git trustme; platform_machine != "i686" # no 32-bit wheels wait-for-it +zlib_ng diff --git a/requirements/test.txt b/requirements/test.txt index 3b16120500c..4953cdbd09a 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,6 +53,8 @@ idna==3.4 # yarl iniconfig==2.1.0 # via pytest +isal==1.7.2 + # via -r requirements/test.in markdown-it-py==3.0.0 # via rich mdurl==0.1.2 @@ -140,3 +142,5 @@ wait-for-it==2.3.0 # via -r requirements/test.in yarl==1.19.0 # via -r requirements/runtime-deps.in +zlib_ng==0.5.1 + # via -r requirements/test.in diff --git a/tests/conftest.py b/tests/conftest.py index 5bca52fe67c..be763400f45 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,6 +4,7 @@ import socket import ssl import sys +import zlib from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory @@ -11,10 +12,13 @@ from unittest import mock from uuid import uuid4 +import isal.isal_zlib import pytest +import zlib_ng.zlib_ng from blockbuster import blockbuster_ctx from aiohttp.client_proto import ResponseHandler +from aiohttp.compression_utils import ZLibBackend, ZLibBackendProtocol, set_zlib_backend from aiohttp.http import WS_KEY from aiohttp.test_utils import get_unused_port_socket, loop_context @@ -295,3 +299,15 @@ def unused_port_socket() -> Generator[socket.socket, None, None]: yield s finally: s.close() + + +@pytest.fixture(params=[zlib, zlib_ng.zlib_ng, isal.isal_zlib]) +def parametrize_zlib_backend( + request: pytest.FixtureRequest, +) -> Generator[None, None, None]: + original_backend: ZLibBackendProtocol = ZLibBackend._zlib_backend + set_zlib_backend(request.param) + + yield + + set_zlib_backend(original_backend) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 9ffe5f523f3..0ea3ce1619a 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2040,6 +2040,7 @@ async def expect_handler(request): assert expect_called +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_encoding_deflate(aiohttp_client) -> None: async def handler(request): resp = web.Response(text="text") @@ -2058,6 +2059,7 @@ async def handler(request): resp.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_encoding_deflate_nochunk(aiohttp_client) -> None: async def handler(request): resp = web.Response(text="text") @@ -2075,6 +2077,7 @@ async def handler(request): resp.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_encoding_gzip(aiohttp_client) -> None: async def handler(request): resp = web.Response(text="text") @@ -2093,6 +2096,7 @@ async def handler(request): resp.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_encoding_gzip_write_by_chunks(aiohttp_client) -> None: async def handler(request): resp = web.StreamResponse() @@ -2113,6 +2117,7 @@ async def handler(request): resp.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_encoding_gzip_nochunk(aiohttp_client) -> None: async def handler(request): resp = web.Response(text="text") diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f86ff5d7587..6454b42c89b 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -4,7 +4,6 @@ import pathlib import sys import urllib.parse -import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie from typing import Any, Callable, Dict, Iterable, Optional from unittest import mock @@ -23,6 +22,7 @@ _gen_default_accept_encoding, _merge_ssl_params, ) +from aiohttp.compression_utils import ZLibBackend from aiohttp.http import HttpVersion10, HttpVersion11 from aiohttp.test_utils import make_mocked_coro @@ -800,6 +800,7 @@ async def test_bytes_data(loop, conn) -> None: resp.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_content_encoding(loop, conn) -> None: req = ClientRequest( "post", URL("http://python.org/"), data="foo", compress="deflate", loop=loop @@ -826,6 +827,7 @@ async def test_content_encoding_dont_set_headers_if_no_body(loop, conn) -> None: resp.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_content_encoding_header(loop, conn) -> None: req = ClientRequest( "post", @@ -925,8 +927,9 @@ async def test_file_upload_not_chunked(loop) -> None: await req.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_precompressed_data_stays_intact(loop) -> None: - data = zlib.compress(b"foobar") + data = ZLibBackend.compress(b"foobar") req = ClientRequest( "post", URL("http://python.org/"), diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 0ca57ab3ab2..7b6bd032244 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -953,6 +953,7 @@ async def delayed_send_frame( assert cancelled is True +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_send_recv_compress(aiohttp_client: AiohttpClient) -> None: async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() @@ -978,6 +979,7 @@ async def handler(request: web.Request) -> web.WebSocketResponse: assert resp.get_extra_info("socket") is None +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_send_recv_compress_wbits(aiohttp_client) -> None: async def handler(request): ws = web.WebSocketResponse() diff --git a/tests/test_compression_utils.py b/tests/test_compression_utils.py index 047a4ff7cf0..fdaf91b36a0 100644 --- a/tests/test_compression_utils.py +++ b/tests/test_compression_utils.py @@ -1,22 +1,34 @@ """Tests for compression utils.""" -from aiohttp.compression_utils import ZLibCompressor, ZLibDecompressor +import pytest +from aiohttp.compression_utils import ZLibBackend, ZLibCompressor, ZLibDecompressor + +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_compression_round_trip_in_executor() -> None: """Ensure that compression and decompression work correctly in the executor.""" - compressor = ZLibCompressor(max_sync_chunk_size=1) + compressor = ZLibCompressor( + strategy=ZLibBackend.Z_DEFAULT_STRATEGY, max_sync_chunk_size=1 + ) + assert type(compressor._compressor) is type(ZLibBackend.compressobj()) decompressor = ZLibDecompressor(max_sync_chunk_size=1) + assert type(decompressor._decompressor) is type(ZLibBackend.decompressobj()) data = b"Hi" * 100 compressed_data = await compressor.compress(data) + compressor.flush() decompressed_data = await decompressor.decompress(compressed_data) assert data == decompressed_data +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_compression_round_trip_in_event_loop() -> None: """Ensure that compression and decompression work correctly in the event loop.""" - compressor = ZLibCompressor(max_sync_chunk_size=10000) + compressor = ZLibCompressor( + strategy=ZLibBackend.Z_DEFAULT_STRATEGY, max_sync_chunk_size=10000 + ) + assert type(compressor._compressor) is type(ZLibBackend.compressobj()) decompressor = ZLibDecompressor(max_sync_chunk_size=10000) + assert type(decompressor._decompressor) is type(ZLibBackend.decompressobj()) data = b"Hi" * 100 compressed_data = await compressor.compress(data) + compressor.flush() decompressed_data = await decompressor.decompress(compressed_data) diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 420816b3137..7f813692571 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -2,7 +2,7 @@ import array import asyncio import zlib -from typing import Generator, Iterable +from typing import Generator, Iterable, Union from unittest import mock import pytest @@ -10,6 +10,7 @@ from aiohttp import ClientConnectionResetError, hdrs, http from aiohttp.base_protocol import BaseProtocol +from aiohttp.compression_utils import ZLibBackend from aiohttp.http_writer import _serialize_headers from aiohttp.test_utils import make_mocked_coro @@ -61,6 +62,26 @@ def protocol(loop, transport): return protocol +def decompress(data: bytes) -> bytes: + d = ZLibBackend.decompressobj() + return d.decompress(data) + + +def decode_chunked(chunked: Union[bytes, bytearray]) -> bytes: + i = 0 + out = b"" + while i < len(chunked): + j = chunked.find(b"\r\n", i) + assert j != -1, "Malformed chunk" + size = int(chunked[i:j], 16) + if size == 0: + break + i = j + 2 + out += chunked[i : i + size] + i += size + 2 # skip \r\n after the chunk + return out + + def test_payloadwriter_properties(transport, protocol, loop) -> None: writer = http.StreamWriter(protocol, loop) assert writer.protocol == protocol @@ -112,6 +133,7 @@ async def test_write_payload_length(protocol, transport, loop) -> None: @pytest.mark.usefixtures("disable_writelines") +@pytest.mark.internal # Used for performance benchmarking async def test_write_large_payload_deflate_compression_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, @@ -137,7 +159,42 @@ async def test_write_large_payload_deflate_compression_data_in_eof( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("disable_writelines") +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_large_payload_deflate_compression_data_in_eof_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + + await msg.write(b"data" * 4096) + # Behavior depends on zlib backend, isal compress() returns b'' initially + # and the entire compressed bytes at flush() for this data + backend_to_write_called = { + "isal.isal_zlib": False, + "zlib": True, + "zlib_ng.zlib_ng": True, + } + assert transport.write.called == backend_to_write_called[ZLibBackend.name] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + transport.write.reset_mock() # type: ignore[attr-defined] + + # This payload compresses to 20447 bytes + payload = b"".join( + [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] + ) + await msg.write_eof(payload) + chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] + + assert all(chunks) + content = b"".join(chunks) + assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload + + @pytest.mark.usefixtures("enable_writelines") +@pytest.mark.internal # Used for performance benchmarking async def test_write_large_payload_deflate_compression_data_in_eof_writelines( protocol: BaseProtocol, transport: asyncio.Transport, @@ -164,6 +221,43 @@ async def test_write_large_payload_deflate_compression_data_in_eof_writelines( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_large_payload_deflate_compression_data_in_eof_writelines_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + + await msg.write(b"data" * 4096) + # Behavior depends on zlib backend, isal compress() returns b'' initially + # and the entire compressed bytes at flush() for this data + backend_to_write_called = { + "isal.isal_zlib": False, + "zlib": True, + "zlib_ng.zlib_ng": True, + } + assert transport.write.called == backend_to_write_called[ZLibBackend.name] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + transport.write.reset_mock() # type: ignore[attr-defined] + assert not transport.writelines.called # type: ignore[attr-defined] + + # This payload compresses to 20447 bytes + payload = b"".join( + [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] + ) + await msg.write_eof(payload) + assert transport.writelines.called != transport.write.called # type: ignore[attr-defined] + if transport.writelines.called: # type: ignore[attr-defined] + chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + else: # transport.write.called: # type: ignore[attr-defined] + chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] + content = b"".join(chunks) + assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload + + async def test_write_payload_chunked_filter( protocol: BaseProtocol, transport: asyncio.Transport, @@ -200,6 +294,7 @@ async def test_write_payload_chunked_filter_multiple_chunks( ) +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_compression(protocol, transport, loop) -> None: COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b" write = transport.write = mock.Mock() @@ -214,6 +309,24 @@ async def test_write_payload_deflate_compression(protocol, transport, loop) -> N assert COMPRESSED == content.split(b"\r\n\r\n", 1)[-1] +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_compression_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + await msg.write(b"data") + await msg.write_eof() + + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert b"data" == decompress(content) + + +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_compression_chunked( protocol: BaseProtocol, transport: asyncio.Transport, @@ -232,8 +345,27 @@ async def test_write_payload_deflate_compression_chunked( assert content == expected +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_compression_chunked_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof() + + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert b"data" == decompress(decode_chunked(content)) + + @pytest.mark.usefixtures("enable_writelines") @pytest.mark.usefixtures("force_writelines_small_payloads") +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_compression_chunked_writelines( protocol: BaseProtocol, transport: asyncio.Transport, @@ -252,6 +384,27 @@ async def test_write_payload_deflate_compression_chunked_writelines( assert content == expected +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_compression_chunked_writelines_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof() + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert b"data" == decompress(decode_chunked(content)) + + +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_and_chunked( buf: bytearray, protocol: BaseProtocol, @@ -270,6 +423,25 @@ async def test_write_payload_deflate_and_chunked( assert thing == buf +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_and_chunked_all_zlib( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + + await msg.write(b"da") + await msg.write(b"ta") + await msg.write_eof() + + assert b"data" == decompress(decode_chunked(buf)) + + +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_compression_chunked_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, @@ -288,8 +460,27 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof( assert content == expected +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_compression_chunked_data_in_eof_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof(b"end") + + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert b"dataend" == decompress(decode_chunked(content)) + + @pytest.mark.usefixtures("enable_writelines") @pytest.mark.usefixtures("force_writelines_small_payloads") +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines( protocol: BaseProtocol, transport: asyncio.Transport, @@ -308,6 +499,27 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines( assert content == expected +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + await msg.write_eof(b"end") + + chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + assert all(chunks) + content = b"".join(chunks) + assert b"dataend" == decompress(decode_chunked(content)) + + +@pytest.mark.internal # Used for performance benchmarking async def test_write_large_payload_deflate_compression_chunked_data_in_eof( protocol: BaseProtocol, transport: asyncio.Transport, @@ -334,8 +546,36 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof( assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_large_payload_deflate_compression_chunked_data_in_eof_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + + await msg.write(b"data" * 4096) + # This payload compresses to 1111 bytes + payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) + await msg.write_eof(payload) + + compressed = [] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + chunked_body = b"".join(chunks) + split_body = chunked_body.split(b"\r\n") + while split_body: + if split_body.pop(0): + compressed.append(split_body.pop(0)) + + content = b"".join(compressed) + assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload + + @pytest.mark.usefixtures("enable_writelines") @pytest.mark.usefixtures("force_writelines_small_payloads") +@pytest.mark.internal # Used for performance benchmarking async def test_write_large_payload_deflate_compression_chunked_data_in_eof_writelines( protocol: BaseProtocol, transport: asyncio.Transport, @@ -362,6 +602,36 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof_write assert zlib.decompress(content) == (b"data" * 4096) + payload +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_large_payload_deflate_compression_chunked_data_in_eof_writelines_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + + await msg.write(b"data" * 4096) + # This payload compresses to 1111 bytes + payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) + await msg.write_eof(payload) + assert not transport.write.called # type: ignore[attr-defined] + + chunks = [] + for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] + chunked_payload = list(write_lines_call[1][0])[1:] + chunked_payload.pop() + chunks.extend(chunked_payload) + + assert all(chunks) + content = b"".join(chunks) + assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload + + +@pytest.mark.internal # Used for performance benchmarking async def test_write_payload_deflate_compression_chunked_connection_lost( protocol: BaseProtocol, transport: asyncio.Transport, @@ -380,6 +650,25 @@ async def test_write_payload_deflate_compression_chunked_connection_lost( await msg.write_eof(b"end") +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_write_payload_deflate_compression_chunked_connection_lost_all_zlib( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + await msg.write(b"data") + with ( + pytest.raises( + ClientConnectionResetError, match="Cannot write to closing transport" + ), + mock.patch.object(transport, "is_closing", return_value=True), + ): + await msg.write_eof(b"end") + + async def test_write_payload_bytes_memoryview( buf: bytearray, protocol: BaseProtocol, diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 8576998962e..b0ca92fde9e 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -3,13 +3,13 @@ import json import pathlib import sys -import zlib from unittest import mock import pytest import aiohttp from aiohttp import payload +from aiohttp.compression_utils import ZLibBackend from aiohttp.hdrs import ( CONTENT_DISPOSITION, CONTENT_ENCODING, @@ -1190,6 +1190,7 @@ async def test_writer_write_no_parts(buf, stream, writer) -> None: assert b"--:--\r\n" == bytes(buf) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): writer.append("Time to Relax!", {CONTENT_ENCODING: "gzip"}) await writer.write(stream) @@ -1200,7 +1201,7 @@ async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): b"Content-Encoding: gzip" == headers ) - decompressor = zlib.decompressobj(wbits=16 + zlib.MAX_WBITS) + decompressor = ZLibBackend.decompressobj(wbits=16 + ZLibBackend.MAX_WBITS) data = decompressor.decompress(message.split(b"\r\n")[0]) data += decompressor.flush() assert b"Time to Relax!" == data diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 47189f7460b..9cc05a08426 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -4,8 +4,7 @@ import pathlib import socket import sys -import zlib -from typing import Any, NoReturn, Optional +from typing import Any, Dict, Generator, NoReturn, Optional, Tuple from unittest import mock import pytest @@ -22,6 +21,7 @@ multipart, web, ) +from aiohttp.compression_utils import ZLibBackend, ZLibCompressObjProtocol from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING from aiohttp.pytest_plugin import AiohttpClient from aiohttp.test_utils import make_mocked_coro @@ -1134,19 +1134,30 @@ async def handler(request): await resp.release() -@pytest.mark.parametrize( - "compressor,encoding", - [ - (zlib.compressobj(wbits=16 + zlib.MAX_WBITS), "gzip"), - (zlib.compressobj(wbits=zlib.MAX_WBITS), "deflate"), - # Actually, wrong compression format, but - # should be supported for some legacy cases. - (zlib.compressobj(wbits=-zlib.MAX_WBITS), "deflate"), - ], -) +@pytest.fixture(params=["gzip", "deflate", "deflate-raw"]) +def compressor_case( + request: pytest.FixtureRequest, + parametrize_zlib_backend: None, +) -> Generator[Tuple[ZLibCompressObjProtocol, str], None, None]: + encoding: str = request.param + max_wbits: int = ZLibBackend.MAX_WBITS + + encoding_to_wbits: Dict[str, int] = { + "deflate": max_wbits, + "deflate-raw": -max_wbits, + "gzip": 16 + max_wbits, + } + + compressor = ZLibBackend.compressobj(wbits=encoding_to_wbits[encoding]) + yield (compressor, "deflate" if encoding.startswith("deflate") else encoding) + + async def test_response_with_precompressed_body( - aiohttp_client, compressor, encoding + aiohttp_client: AiohttpClient, + compressor_case: Tuple[ZLibCompressObjProtocol, str], ) -> None: + compressor, encoding = compressor_case + async def handler(request): headers = {"Content-Encoding": encoding} data = compressor.compress(b"mydata") + compressor.flush() @@ -2189,6 +2200,7 @@ async def handler(request): @pytest.mark.parametrize( "auto_decompress,len_of", [(True, "uncompressed"), (False, "compressed")] ) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_auto_decompress( aiohttp_client, auto_decompress, @@ -2203,7 +2215,7 @@ async def handler(request): client = await aiohttp_client(app) uncompressed = b"dataaaaaaaaaaaaaaaaaaaaaaaaa" - compressor = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) + compressor = ZLibBackend.compressobj(wbits=16 + ZLibBackend.MAX_WBITS) compressed = compressor.compress(uncompressed) + compressor.flush() assert len(compressed) != len(uncompressed) headers = {"content-encoding": "gzip"} diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 54176ea661b..b7758f46baa 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -4,7 +4,6 @@ import io import json import sys -import zlib from concurrent.futures import ThreadPoolExecutor from typing import AsyncIterator, Optional from unittest import mock @@ -417,6 +416,7 @@ async def test_chunked_encoding_forbidden_for_http_10() -> None: assert Matches("Using chunked encoding is forbidden for HTTP/1.0") == str(ctx.value) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_compression_no_accept() -> None: req = make_request("GET", "/") resp = StreamResponse() @@ -458,6 +458,7 @@ async def test_force_compression_false_backwards_compat() -> None: assert not msg.enable_compression.called +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_compression_default_coding() -> None: req = make_request( "GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"}) @@ -471,11 +472,12 @@ async def test_compression_default_coding() -> None: msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("deflate", zlib.Z_DEFAULT_STRATEGY) + msg.enable_compression.assert_called_with("deflate", None) assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) assert msg.filter is not None +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_deflate() -> None: req = make_request( "GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"}) @@ -486,10 +488,12 @@ async def test_force_compression_deflate() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("deflate", zlib.Z_DEFAULT_STRATEGY) + assert msg is not None + msg.enable_compression.assert_called_with("deflate", None) assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_deflate_large_payload() -> None: """Make sure a warning is thrown for large payloads compressed in the event loop.""" req = make_request( @@ -509,6 +513,7 @@ async def test_force_compression_deflate_large_payload() -> None: assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_no_accept_deflate() -> None: req = make_request("GET", "/") resp = StreamResponse() @@ -517,10 +522,12 @@ async def test_force_compression_no_accept_deflate() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("deflate", zlib.Z_DEFAULT_STRATEGY) + assert msg is not None + msg.enable_compression.assert_called_with("deflate", None) assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_gzip() -> None: req = make_request( "GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"}) @@ -531,10 +538,12 @@ async def test_force_compression_gzip() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("gzip", zlib.Z_DEFAULT_STRATEGY) + assert msg is not None + msg.enable_compression.assert_called_with("gzip", None) assert "gzip" == resp.headers.get(hdrs.CONTENT_ENCODING) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_no_accept_gzip() -> None: req = make_request("GET", "/") resp = StreamResponse() @@ -543,10 +552,12 @@ async def test_force_compression_no_accept_gzip() -> None: assert resp.compression msg = await resp.prepare(req) - msg.enable_compression.assert_called_with("gzip", zlib.Z_DEFAULT_STRATEGY) + assert msg is not None + msg.enable_compression.assert_called_with("gzip", None) assert "gzip" == resp.headers.get(hdrs.CONTENT_ENCODING) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_change_content_threaded_compression_enabled() -> None: req = make_request("GET", "/") body_thread_size = 1024 @@ -558,6 +569,7 @@ async def test_change_content_threaded_compression_enabled() -> None: assert gzip.decompress(resp._compressed_body) == body +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_change_content_threaded_compression_enabled_explicit() -> None: req = make_request("GET", "/") body_thread_size = 1024 @@ -572,6 +584,7 @@ async def test_change_content_threaded_compression_enabled_explicit() -> None: assert gzip.decompress(resp._compressed_body) == body +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_change_content_length_if_compression_enabled() -> None: req = make_request("GET", "/") resp = Response(body=b"answer") @@ -581,6 +594,7 @@ async def test_change_content_length_if_compression_enabled() -> None: assert resp.content_length is not None and resp.content_length != len(b"answer") +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_set_content_length_if_compression_enabled() -> None: writer = mock.Mock() @@ -600,6 +614,7 @@ async def write_headers(status_line, headers): assert resp.content_length == 26 +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_remove_content_length_if_compression_enabled_http11() -> None: writer = mock.Mock() @@ -616,6 +631,7 @@ async def write_headers(status_line, headers): assert resp.content_length is None +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_remove_content_length_if_compression_enabled_http10() -> None: writer = mock.Mock() @@ -632,6 +648,7 @@ async def write_headers(status_line, headers): assert resp.content_length is None +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_identity() -> None: writer = mock.Mock() @@ -648,6 +665,7 @@ async def write_headers(status_line, headers): assert resp.content_length == 123 +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_force_compression_identity_response() -> None: writer = mock.Mock() @@ -663,6 +681,7 @@ async def write_headers(status_line, headers): assert resp.content_length == 6 +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_rm_content_length_if_compression_http11() -> None: writer = mock.Mock() @@ -680,6 +699,7 @@ async def write_headers(status_line, headers): assert resp.content_length is None +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_rm_content_length_if_compression_http10() -> None: writer = mock.Mock() diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 256cf4d243a..fc4db06a307 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -3,7 +3,6 @@ import gzip import pathlib import socket -import zlib from typing import Any, Iterable, Optional from unittest import mock @@ -11,6 +10,7 @@ import aiohttp from aiohttp import web +from aiohttp.compression_utils import ZLibBackend try: import brotlicffi as brotli @@ -300,6 +300,7 @@ async def handler(request): [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], ) @pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_static_file_with_encoding_and_enable_compression( hello_txt: pathlib.Path, aiohttp_client: Any, @@ -1047,6 +1048,7 @@ async def test_static_file_if_range_invalid_date( await client.close() +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_static_file_compression(aiohttp_client, sender) -> None: filepath = pathlib.Path(__file__).parent / "data.unknown_mime_type" @@ -1061,7 +1063,7 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - zcomp = zlib.compressobj(wbits=zlib.MAX_WBITS) + zcomp = ZLibBackend.compressobj(wbits=ZLibBackend.MAX_WBITS) expected_body = zcomp.compress(b"file content\n") + zcomp.flush() assert expected_body == await resp.read() assert "application/octet-stream" == resp.headers["Content-Type"] diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 7f8b98d4566..2cac4cf6b87 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -2,7 +2,6 @@ import pickle import random import struct -import zlib from typing import Union from unittest import mock @@ -20,6 +19,7 @@ from aiohttp._websocket.models import WS_DEFLATE_TRAILING from aiohttp._websocket.reader import WebSocketDataQueue from aiohttp.base_protocol import BaseProtocol +from aiohttp.compression_utils import ZLibBackend from aiohttp.http import WebSocketError, WSCloseCode, WSMessage, WSMsgType from aiohttp.http_websocket import WebSocketReader @@ -29,13 +29,15 @@ class PatchableWebSocketReader(WebSocketReader): def build_frame( - message, opcode, use_mask=False, noheader=False, is_fin=True, compress=False + message, opcode, use_mask=False, noheader=False, is_fin=True, ZLibBackend=None ): # Send a frame over the websocket with message as its payload. - if compress: - compressobj = zlib.compressobj(wbits=-9) + compress = False + if ZLibBackend: + compress = True + compressobj = ZLibBackend.compressobj(wbits=-9) message = compressobj.compress(message) - message = message + compressobj.flush(zlib.Z_SYNC_FLUSH) + message = message + compressobj.flush(ZLibBackend.Z_SYNC_FLUSH) if message.endswith(WS_DEFLATE_TRAILING): message = message[:-4] msg_length = len(message) @@ -545,6 +547,7 @@ def test_parse_compress_error_frame(parser) -> None: assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_parse_no_compress_frame_single( loop: asyncio.AbstractEventLoop, out: WebSocketDataQueue ) -> None: @@ -574,7 +577,7 @@ def test_msg_too_large_not_fin(out) -> None: def test_compressed_msg_too_large(out) -> None: parser = WebSocketReader(out, 256, compress=True) - data = build_frame(b"aaa" * 256, WSMsgType.TEXT, compress=True) + data = build_frame(b"aaa" * 256, WSMsgType.TEXT, ZLibBackend=ZLibBackend) with pytest.raises(WebSocketError) as ctx: parser._feed_data(data) assert ctx.value.code == WSCloseCode.MESSAGE_TOO_BIG diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index 77eaa2f74b8..b39e411f90d 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -118,6 +118,7 @@ async def test_send_compress_text_per_message(protocol, transport) -> None: (32, lambda count: 64 + count if count % 2 else count), ), ) +@pytest.mark.usefixtures("parametrize_zlib_backend") async def test_concurrent_messages( protocol: Any, transport: Any, From 798648a408961a995bb8ee009495e4dccd6b181d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 11:09:22 -1000 Subject: [PATCH 1329/1511] =?UTF-8?q?[PR=20#10706/db6faf75=20backport][3.1?= =?UTF-8?q?2]=20docs/client=5Freference.rst,=20attribute=20name=20from=20t?= =?UTF-8?q?race=5Fconfig=20to=20trace=5F=E2=80=A6=20(#10708)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: alegtk <115006266+alegtk@users.noreply.github.com> --- docs/client_reference.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 8d01d726e1c..aa664b24ff4 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -368,7 +368,7 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.7 - .. attribute:: trace_config + .. attribute:: trace_configs A list of :class:`TraceConfig` instances used for client tracing. ``None`` (default) is used for request tracing From 98add82d7b9eddd88b8ff60e3783413750db9274 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 11:09:34 -1000 Subject: [PATCH 1330/1511] =?UTF-8?q?[PR=20#10706/db6faf75=20backport][3.1?= =?UTF-8?q?1]=20docs/client=5Freference.rst,=20attribute=20name=20from=20t?= =?UTF-8?q?race=5Fconfig=20to=20trace=5F=E2=80=A6=20(#10707)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: alegtk <115006266+alegtk@users.noreply.github.com> --- docs/client_reference.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 26537161971..130ba6cc336 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -364,7 +364,7 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.7 - .. attribute:: trace_config + .. attribute:: trace_configs A list of :class:`TraceConfig` instances used for client tracing. ``None`` (default) is used for request tracing From ce76d11151fa7453e3a8a456631b7525cdd5cee6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 23:19:16 +0000 Subject: [PATCH 1331/1511] [PR #10721/d912123c backport][3.12] Update zlib benchmarks for multiple zlib backends (#10724) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client.py | 2 ++ tests/test_benchmarks_http_websocket.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index aa3536be820..ef2a4d88c92 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -2,6 +2,7 @@ import asyncio +import pytest from pytest_codspeed import BenchmarkFixture from aiohttp import hdrs, web @@ -178,6 +179,7 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +@pytest.mark.usefixtures("parametrize_zlib_backend") def test_get_request_with_251308_compressed_chunked_payload( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 7ff04199d24..8e6a8bb7bb9 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -3,6 +3,7 @@ import asyncio from typing import Union +import pytest from pytest_codspeed import BenchmarkFixture from aiohttp._websocket.helpers import MSG_SIZE, PACK_LEN3 @@ -117,6 +118,7 @@ def _run() -> None: loop.run_until_complete(_send_one_hundred_websocket_text_messages()) +@pytest.mark.usefixtures("parametrize_zlib_backend") def test_send_one_hundred_websocket_compressed_messages( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: From 538938d0b1c220350c10d435e4e4a20e97a2fed4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 10:50:30 +0000 Subject: [PATCH 1332/1511] Bump yarl from 1.19.0 to 1.20.0 (#10727) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.19.0 to 1.20.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/releases">yarl's releases</a>.</em></p> <blockquote> <h2>1.20.0</h2> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1456">#1456</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Started building wheels for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/yarl/issues/1456">#1456</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/yarl/blob/master/CHANGES.rst">yarl's changelog</a>.</em></p> <blockquote> <h1>1.20.0</h1> <p><em>(2025-04-16)</em></p> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1456</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Started building wheels for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1456</code>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/yarl/commit/2127e8dee07ecc92d2d98b708ff23fb80f121ecb"><code>2127e8d</code></a> Release 1.20.0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1504">#1504</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/50b561e6da945da97ca6f7db8222c0ec2811da1b"><code>50b561e</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/yarl/issues/1501">#1501</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/7ad86e83b0a3a7b58c0acc6ba3d27bfd1288c0b7"><code>7ad86e8</code></a> Support the free-threaded build of CPython 3.13 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1456">#1456</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/85793031205d480d8ffb3336173acc4f3dbbd914"><code>8579303</code></a> Bump multidict from 6.4.2 to 6.4.3 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1503">#1503</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/b9e519330a1a8de0ca6e5e51ca50293cc18ef133"><code>b9e5193</code></a> Bump multidict from 6.3.2 to 6.4.2 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1502">#1502</a>)</li> <li><a href="https://github.com/aio-libs/yarl/commit/937f030e19ea7d3c25f6e1a5297a0ad51cb401be"><code>937f030</code></a> Increment version to 1.19.1.dev0 (<a href="https://redirect.github.com/aio-libs/yarl/issues/1500">#1500</a>)</li> <li>See full diff in <a href="https://github.com/aio-libs/yarl/compare/v1.19.0...v1.20.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.19.0&new-version=1.20.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 10 +++++++++- requirements/dev.txt | 12 ++++++++---- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 4 ++-- 5 files changed, 21 insertions(+), 9 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 08beaa66522..7f8e2a2a20a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -44,5 +44,5 @@ typing-extensions==4.13.1 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.19.0 +yarl==1.20.0 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e8a2d85b2bb..79097789543 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -101,6 +101,10 @@ incremental==24.7.2 # via towncrier iniconfig==2.1.0 # via pytest +isal==1.7.2 + # via + # -r requirements/lint.in + # -r requirements/test.in jinja2==3.1.6 # via # sphinx @@ -285,8 +289,12 @@ wait-for-it==2.3.0 # via -r requirements/test.in wheel==0.45.1 # via pip-tools -yarl==1.19.0 +yarl==1.20.0 # via -r requirements/runtime-deps.in +zlib-ng==0.5.1 + # via + # -r requirements/lint.in + # -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 90d5c88acb5..2f6ce6afcb6 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -100,7 +100,9 @@ incremental==24.7.2 iniconfig==2.1.0 # via pytest isal==1.7.2 - # via -r requirements/test.in + # via + # -r requirements/lint.in + # -r requirements/test.in jinja2==3.1.6 # via # sphinx @@ -278,10 +280,12 @@ wait-for-it==2.3.0 # via -r requirements/test.in wheel==0.45.1 # via pip-tools -yarl==1.19.0 +yarl==1.20.0 # via -r requirements/runtime-deps.in -zlib_ng==0.5.1 - # via -r requirements/test.in +zlib-ng==0.5.1 + # via + # -r requirements/lint.in + # -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 3fcc493e191..1d68b4cdc19 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -38,5 +38,5 @@ pycparser==2.22 # via cffi typing-extensions==4.13.1 # via multidict -yarl==1.19.0 +yarl==1.20.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 4953cdbd09a..7196f9bb4db 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -140,7 +140,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.3.0 # via -r requirements/test.in -yarl==1.19.0 +yarl==1.20.0 # via -r requirements/runtime-deps.in -zlib_ng==0.5.1 +zlib-ng==0.5.1 # via -r requirements/test.in From 98a44cccebb2ba841b6dbc4d957fcf5ef33d662f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 11:08:14 +0000 Subject: [PATCH 1333/1511] Bump typing-extensions from 4.13.1 to 4.13.2 (#10718) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.13.1 to 4.13.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/python/typing_extensions/releases">typing-extensions's releases</a>.</em></p> <blockquote> <h2>4.13.2</h2> <ul> <li>Fix <code>TypeError</code> when taking the union of <code>typing_extensions.TypeAliasType</code> and a <code>typing.TypeAliasType</code> on Python 3.12 and 3.13. Patch by <a href="https://github.com/jorenham">Joren Hammudoglu</a>.</li> <li>Backport from CPython PR <a href="https://redirect.github.com/python/cpython/pull/132160">#132160</a> to avoid having user arguments shadowed in generated <code>__new__</code> by <code>@typing_extensions.deprecated</code>. Patch by <a href="https://github.com/Viicos">Victorien Plot</a>.</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/python/typing_extensions/blob/main/CHANGELOG.md">typing-extensions's changelog</a>.</em></p> <blockquote> <h1>Release 4.13.2 (April 10, 2025)</h1> <ul> <li>Fix <code>TypeError</code> when taking the union of <code>typing_extensions.TypeAliasType</code> and a <code>typing.TypeAliasType</code> on Python 3.12 and 3.13. Patch by <a href="https://github.com/jorenham">Joren Hammudoglu</a>.</li> <li>Backport from CPython PR <a href="https://redirect.github.com/python/cpython/pull/132160">#132160</a> to avoid having user arguments shadowed in generated <code>__new__</code> by <code>@typing_extensions.deprecated</code>. Patch by <a href="https://github.com/Viicos">Victorien Plot</a>.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/typing_extensions/commit/4525e9dbbd177b4ef8a84f55ff5fe127582a071d"><code>4525e9d</code></a> Prepare release 4.13.2 (<a href="https://redirect.github.com/python/typing_extensions/issues/583">#583</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/88a0c200ceb0ccfe4329d3db8a1a863a2381e44c"><code>88a0c20</code></a> Do not shadow user arguments in generated <code>__new__</code> by <code>@deprecated</code> (<a href="https://redirect.github.com/python/typing_extensions/issues/581">#581</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/281d7b0ca6edad384e641d1066b759c280602919"><code>281d7b0</code></a> Add 3rd party tests for litestar (<a href="https://redirect.github.com/python/typing_extensions/issues/578">#578</a>)</li> <li><a href="https://github.com/python/typing_extensions/commit/8092c3996f4902ad9c74ac2d1d8dd19371ecbaa3"><code>8092c39</code></a> fix <code>TypeAliasType</code> union with <code>typing.TypeAliasType</code> (<a href="https://redirect.github.com/python/typing_extensions/issues/575">#575</a>)</li> <li>See full diff in <a href="https://github.com/python/typing_extensions/compare/4.13.1...4.13.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.13.1&new-version=4.13.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7f8e2a2a20a..5c59f913f4e 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,7 +40,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 79097789543..21021de9034 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -262,7 +262,7 @@ trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via # multidict # mypy diff --git a/requirements/cython.txt b/requirements/cython.txt index d5661f8fff3..8686651881b 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.12 # via -r requirements/cython.in multidict==6.4.3 # via -r requirements/multidict.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index 2f6ce6afcb6..a1b87b493ae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -253,7 +253,7 @@ trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via # multidict # mypy diff --git a/requirements/lint.txt b/requirements/lint.txt index b53cccca9f6..4e9689f1d5e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -97,7 +97,7 @@ tomli==2.2.1 # slotscheck trustme==1.2.1 # via -r requirements/lint.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via # mypy # pydantic diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 64a6ea16b87..41435a67142 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -6,5 +6,5 @@ # multidict==6.4.3 # via -r requirements/multidict.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 1d68b4cdc19..f8fab0f177a 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -36,7 +36,7 @@ pycares==4.5.0 # via aiodns pycparser==2.22 # via cffi -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via multidict yarl==1.20.0 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 7196f9bb4db..be63bafac53 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -125,7 +125,7 @@ tomli==2.2.1 # pytest trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via # multidict # mypy From d612e83e6a2607522d3f342f852dbc8d80ae58bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 11:26:04 +0000 Subject: [PATCH 1334/1511] Bump pycares from 4.5.0 to 4.6.0 (#10702) Bumps [pycares](https://github.com/saghul/pycares) from 4.5.0 to 4.6.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/saghul/pycares/commit/0d4ba7a624e382e5a37d80164f23f6c235438a75"><code>0d4ba7a</code></a> Bump version to 4.6.0</li> <li><a href="https://github.com/saghul/pycares/commit/68e301a5cae4f855ea4500b6df9d503318cc52f2"><code>68e301a</code></a> Fix module has no attribute type errors</li> <li><a href="https://github.com/saghul/pycares/commit/8fb9c4bdc6feb3921c81afa87925a684e076b706"><code>8fb9c4b</code></a> Add initial type annotations (<a href="https://redirect.github.com/saghul/pycares/issues/212">#212</a>)</li> <li><a href="https://github.com/saghul/pycares/commit/dd881b5459c18da40864740700796a89815e9ed3"><code>dd881b5</code></a> Fixup tests (<a href="https://redirect.github.com/saghul/pycares/issues/214">#214</a>)</li> <li><a href="https://github.com/saghul/pycares/commit/dc9a052b18f9a545d1c6b25fe7f2fbd21992d7b3"><code>dc9a052</code></a> Swap out is_all_ascii for built-in isascii (<a href="https://redirect.github.com/saghul/pycares/issues/209">#209</a>)</li> <li>See full diff in <a href="https://github.com/saghul/pycares/compare/v4.5.0...v4.6.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycares&package-manager=pip&previous-version=4.5.0&new-version=4.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 5c59f913f4e..e7dfdd67a62 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.5.0 +pycares==4.6.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 21021de9034..4332afac2e6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.5.0 +pycares==4.6.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/dev.txt b/requirements/dev.txt index a1b87b493ae..ba62db63f1d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,7 +145,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.5.0 +pycares==4.6.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/lint.txt b/requirements/lint.txt index 4e9689f1d5e..ab419411f50 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -59,7 +59,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in -pycares==4.5.0 +pycares==4.6.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index f8fab0f177a..da7a66e9a38 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,7 +32,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.5.0 +pycares==4.6.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/test.txt b/requirements/test.txt index be63bafac53..ea0360d111d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -79,7 +79,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.5.0 +pycares==4.6.0 # via aiodns pycparser==2.22 # via cffi From e55937532f764b85a4e3df94ed62068101bdd177 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:07:21 +0000 Subject: [PATCH 1335/1511] [PR #10730/0b9d3571 backport][3.12] Add benchmarks for large binary WebSocket message roundtrips (#10732) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index 6d4cf309cad..c244d33f6bd 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -2,6 +2,7 @@ import asyncio +import pytest from pytest_codspeed import BenchmarkFixture from aiohttp import web @@ -40,19 +41,22 @@ def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) +@pytest.mark.parametrize("msg_size", [6, MSG_SIZE * 4], ids=["small", "large"]) def test_one_thousand_round_trip_websocket_binary_messages( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, + msg_size: int, ) -> None: """Benchmark round trip of 1000 WebSocket binary messages.""" message_count = 1000 + raw_message = b"x" * msg_size async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) for _ in range(message_count): - await ws.send_bytes(b"answer") + await ws.send_bytes(raw_message) await ws.close() return ws From 377a1f89551b29c85b795a57b9e8891c519f4507 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:13:46 +0000 Subject: [PATCH 1336/1511] [PR #10730/0b9d3571 backport][3.11] Add benchmarks for large binary WebSocket message roundtrips (#10731) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index 6d4cf309cad..c244d33f6bd 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -2,6 +2,7 @@ import asyncio +import pytest from pytest_codspeed import BenchmarkFixture from aiohttp import web @@ -40,19 +41,22 @@ def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) +@pytest.mark.parametrize("msg_size", [6, MSG_SIZE * 4], ids=["small", "large"]) def test_one_thousand_round_trip_websocket_binary_messages( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, benchmark: BenchmarkFixture, + msg_size: int, ) -> None: """Benchmark round trip of 1000 WebSocket binary messages.""" message_count = 1000 + raw_message = b"x" * msg_size async def handler(request: web.Request) -> web.WebSocketResponse: ws = web.WebSocketResponse() await ws.prepare(request) for _ in range(message_count): - await ws.send_bytes(b"answer") + await ws.send_bytes(raw_message) await ws.close() return ws From 07c437218022fd5130591c44e4fcc04478948aee Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:03:21 +0000 Subject: [PATCH 1337/1511] [PR #10714/75bbc03e backport][3.11] Only fetch SSLContext and peername once per connection (#10734) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10714.misc.rst | 1 + aiohttp/test_utils.py | 4 ++++ aiohttp/web_protocol.py | 25 +++++++++++++++++++++++++ aiohttp/web_request.py | 6 ++---- 4 files changed, 32 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10714.misc.rst diff --git a/CHANGES/10714.misc.rst b/CHANGES/10714.misc.rst new file mode 100644 index 00000000000..a36a80872f5 --- /dev/null +++ b/CHANGES/10714.misc.rst @@ -0,0 +1 @@ +Improved web server performance when connection can be reused -- by :user:`bdraco`. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index be6e9b3353e..87c31427867 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -730,6 +730,10 @@ def make_mocked_request( if protocol is sentinel: protocol = mock.Mock() protocol.transport = transport + type(protocol).peername = mock.PropertyMock( + return_value=transport.get_extra_info("peername") + ) + type(protocol).ssl_context = mock.PropertyMock(return_value=sslcontext) if writer is sentinel: writer = mock.Mock() diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 1dba9606ea0..a7d50195828 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -24,6 +24,7 @@ import attr import yarl +from propcache import under_cached_property from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol @@ -47,6 +48,8 @@ __all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") if TYPE_CHECKING: + import ssl + from .web_server import Server @@ -167,6 +170,7 @@ class RequestHandler(BaseProtocol): "_current_request", "_timeout_ceil_threshold", "_request_in_progress", + "_cache", ) def __init__( @@ -246,6 +250,7 @@ def __init__( self._close = False self._force_close = False self._request_in_progress = False + self._cache: dict[str, Any] = {} def __repr__(self) -> str: return "<{} {}>".format( @@ -253,6 +258,26 @@ def __repr__(self) -> str: "connected" if self.transport is not None else "disconnected", ) + @under_cached_property + def ssl_context(self) -> Optional["ssl.SSLContext"]: + """Return SSLContext if available.""" + return ( + None + if self.transport is None + else self.transport.get_extra_info("sslcontext") + ) + + @under_cached_property + def peername( + self, + ) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]: + """Return peername if available.""" + return ( + None + if self.transport is None + else self.transport.get_extra_info("peername") + ) + @property def keepalive_timeout(self) -> float: return self._keepalive_timeout diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index f11d49020a0..6bf5a9dea74 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -198,10 +198,8 @@ def __init__( self._client_max_size = client_max_size self._loop = loop - transport = protocol.transport - assert transport is not None - self._transport_sslcontext = transport.get_extra_info("sslcontext") - self._transport_peername = transport.get_extra_info("peername") + self._transport_sslcontext = protocol.ssl_context + self._transport_peername = protocol.peername if remote is not None: self._cache["remote"] = remote From cc4b8c5e671ca0fd07cbcd3313e44cc45d22076c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:08:56 +0000 Subject: [PATCH 1338/1511] [PR #10714/75bbc03e backport][3.12] Only fetch SSLContext and peername once per connection (#10735) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10714.misc.rst | 1 + aiohttp/test_utils.py | 4 ++++ aiohttp/web_protocol.py | 25 +++++++++++++++++++++++++ aiohttp/web_request.py | 6 ++---- 4 files changed, 32 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10714.misc.rst diff --git a/CHANGES/10714.misc.rst b/CHANGES/10714.misc.rst new file mode 100644 index 00000000000..a36a80872f5 --- /dev/null +++ b/CHANGES/10714.misc.rst @@ -0,0 +1 @@ +Improved web server performance when connection can be reused -- by :user:`bdraco`. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index be6e9b3353e..87c31427867 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -730,6 +730,10 @@ def make_mocked_request( if protocol is sentinel: protocol = mock.Mock() protocol.transport = transport + type(protocol).peername = mock.PropertyMock( + return_value=transport.get_extra_info("peername") + ) + type(protocol).ssl_context = mock.PropertyMock(return_value=sslcontext) if writer is sentinel: writer = mock.Mock() diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 1dba9606ea0..a7d50195828 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -24,6 +24,7 @@ import attr import yarl +from propcache import under_cached_property from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol @@ -47,6 +48,8 @@ __all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") if TYPE_CHECKING: + import ssl + from .web_server import Server @@ -167,6 +170,7 @@ class RequestHandler(BaseProtocol): "_current_request", "_timeout_ceil_threshold", "_request_in_progress", + "_cache", ) def __init__( @@ -246,6 +250,7 @@ def __init__( self._close = False self._force_close = False self._request_in_progress = False + self._cache: dict[str, Any] = {} def __repr__(self) -> str: return "<{} {}>".format( @@ -253,6 +258,26 @@ def __repr__(self) -> str: "connected" if self.transport is not None else "disconnected", ) + @under_cached_property + def ssl_context(self) -> Optional["ssl.SSLContext"]: + """Return SSLContext if available.""" + return ( + None + if self.transport is None + else self.transport.get_extra_info("sslcontext") + ) + + @under_cached_property + def peername( + self, + ) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]: + """Return peername if available.""" + return ( + None + if self.transport is None + else self.transport.get_extra_info("peername") + ) + @property def keepalive_timeout(self) -> float: return self._keepalive_timeout diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index f11d49020a0..6bf5a9dea74 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -198,10 +198,8 @@ def __init__( self._client_max_size = client_max_size self._loop = loop - transport = protocol.transport - assert transport is not None - self._transport_sslcontext = transport.get_extra_info("sslcontext") - self._transport_peername = transport.get_extra_info("peername") + self._transport_sslcontext = protocol.ssl_context + self._transport_peername = protocol.peername if remote is not None: self._cache["remote"] = remote From eadcd28528b3ff3450ab0ea5a11b10d5a7e660bf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 18 Apr 2025 01:40:35 -1000 Subject: [PATCH 1339/1511] [3.11] Bump multidict to 6.4.3 (#10736) --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index d79bdab3893..f279c187ebc 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.1.0 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 041a3737ab0..16816dcd426 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -116,7 +116,7 @@ markupsafe==2.1.5 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.4.3 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index f67cc903a0b..b2ff3e71d39 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.11 # via -r requirements/cython.in -multidict==6.1.0 +multidict==6.4.3 # via -r requirements/multidict.in typing-extensions==4.12.2 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index a99644dff81..6ab9baf6b59 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -110,7 +110,7 @@ markupsafe==2.1.5 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index b8b44428920..a83b5029c3f 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.1.0 +multidict==6.4.3 # via -r requirements/multidict.in typing-extensions==4.12.2 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index cf7f0e396f6..6c9fcc5ccd0 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.5.0 # aiosignal idna==3.4 # via yarl -multidict==6.1.0 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index cf81a7bf257..025940dcf50 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -62,7 +62,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl From eacbe957b5164b253d4f98db9498ef49dfe0aa09 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:46:56 +0000 Subject: [PATCH 1340/1511] Bump frozenlist from 1.5.0 to 1.6.0 (#10737) Bumps [frozenlist](https://github.com/aio-libs/frozenlist) from 1.5.0 to 1.6.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/frozenlist/releases">frozenlist's releases</a>.</em></p> <blockquote> <h2>1.6.0</h2> <h2>Bug fixes</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>41591f2</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/618">#618</a>.</p> </li> <li> <p>Started building armv7l wheels -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/642">#642</a>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>41591f2</code>.</p> </li> <li> <p>Started building wheels for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/618">#618</a>.</p> </li> <li> <p>The packaging metadata switched to including an SPDX license identifier introduced in :pep:<code>639</code> -- by :user:<code>cdce8p</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/frozenlist/issues/639">#639</a>.</p> </li> </ul> <h2>Contributor-facing changes</h2> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst">frozenlist's changelog</a>.</em></p> <blockquote> <h1>v1.6.0</h1> <p><em>(2025-04-17)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>41591f2</code>.</p> </li> </ul> <h2>Features</h2> <ul> <li> <p>Implemented support for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>618</code>.</p> </li> <li> <p>Started building armv7l wheels -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>642</code>.</p> </li> </ul> <h2>Packaging updates and notes for downstreams</h2> <ul> <li> <p>Stopped implicitly allowing the use of Cython pre-release versions when building the distribution package -- by :user:<code>ajsanchezsanz</code> and :user:<code>markgreene74</code>.</p> <p><em>Related commits on GitHub:</em> :commit:<code>41591f2</code>.</p> </li> <li> <p>Started building wheels for the free-threaded build of CPython 3.13 -- by :user:<code>lysnikolaou</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>618</code>.</p> </li> <li> <p>The packaging metadata switched to including an SPDX license identifier introduced in :pep:<code>639</code> -- by :user:<code>cdce8p</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>639</code>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/frozenlist/commit/9f4253c6a959af6a81ffc93350861e8a375355bc"><code>9f4253c</code></a> Fix towncrier head_line missing the leading v (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/645">#645</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/4c8207aac1a9f8865c444eb0622f4b0a6151a571"><code>4c8207a</code></a> Release 1.6.0 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/643">#643</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/58aef99a5c91dc3a3d7bdc44068c35ec2986e65f"><code>58aef99</code></a> Start building wheels on armv7l (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/642">#642</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/d8e4a822ebefb0ffcc64c744006ec60ff74ab344"><code>d8e4a82</code></a> Use SPDX license expression (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/639">#639</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/57ce23807c0d08651106c9de0b78e525838f3fac"><code>57ce238</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/641">#641</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/f545c232865bcb260be716921add26bd7b941d16"><code>f545c23</code></a> Implement support for the free-threaded build of CPython 3.13 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/618">#618</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/4ee4583a792a970a748872cc33a2f45bb984188f"><code>4ee4583</code></a> Build(deps): Bump pypa/cibuildwheel from 2.23.1 to 2.23.2 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/640">#640</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/c28f32d6816ca0fa56a5876e84831c46084bb85d"><code>c28f32d</code></a> Better organize lint and test dependencies (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/636">#636</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/a611cc2042463db3dceba5d759439ca0f6b65179"><code>a611cc2</code></a> Build(deps): Bump pypa/cibuildwheel from 2.23.0 to 2.23.1 (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/638">#638</a>)</li> <li><a href="https://github.com/aio-libs/frozenlist/commit/bfa0cb194ffb3859e7652b348bdaf35d7adf608b"><code>bfa0cb1</code></a> Reduce number of coverage uploads needed (<a href="https://redirect.github.com/aio-libs/frozenlist/issues/637">#637</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/frozenlist/compare/v1.5.0...v1.6.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=frozenlist&package-manager=pip&previous-version=1.5.0&new-version=1.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e7dfdd67a62..b4366c8fa26 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -18,7 +18,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.17.1 # via pycares -frozenlist==1.5.0 +frozenlist==1.6.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4332afac2e6..9cf1615af28 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -80,7 +80,7 @@ freezegun==1.5.1 # via # -r requirements/lint.in # -r requirements/test.in -frozenlist==1.5.0 +frozenlist==1.6.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/dev.txt b/requirements/dev.txt index ba62db63f1d..fb26879cabc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -78,7 +78,7 @@ freezegun==1.5.1 # via # -r requirements/lint.in # -r requirements/test.in -frozenlist==1.5.0 +frozenlist==1.6.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index da7a66e9a38..a1d1a47cf00 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -18,7 +18,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==1.17.1 # via pycares -frozenlist==1.5.0 +frozenlist==1.6.0 # via # -r requirements/runtime-deps.in # aiosignal diff --git a/requirements/test.txt b/requirements/test.txt index ea0360d111d..ab2185d9ee7 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -41,7 +41,7 @@ forbiddenfruit==0.1.4 # via blockbuster freezegun==1.5.1 # via -r requirements/test.in -frozenlist==1.5.0 +frozenlist==1.6.0 # via # -r requirements/runtime-deps.in # aiosignal From 2c3b885dbd217518d030f2b0cc6343c2da77cf49 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 18 Apr 2025 08:39:55 -1000 Subject: [PATCH 1341/1511] [PR #10713/8d74e26 backport][3.11] Avoid fetching loop time on each request unless logging is enabled (#10738) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/10713.misc.rst | 1 + aiohttp/web_protocol.py | 15 +++++++++++---- tests/test_web_app.py | 14 ++++++++++++++ tests/test_web_log.py | 26 ++++++++++++++++++++++++++ 4 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10713.misc.rst diff --git a/CHANGES/10713.misc.rst b/CHANGES/10713.misc.rst new file mode 100644 index 00000000000..a556d11e1e0 --- /dev/null +++ b/CHANGES/10713.misc.rst @@ -0,0 +1 @@ +Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a7d50195828..e1923aac24b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -170,6 +170,7 @@ class RequestHandler(BaseProtocol): "_current_request", "_timeout_ceil_threshold", "_request_in_progress", + "_logging_enabled", "_cache", ) @@ -244,8 +245,10 @@ def __init__( self.access_logger: Optional[AbstractAccessLogger] = access_log_class( access_log, access_log_format ) + self._logging_enabled = self.access_logger.enabled else: self.access_logger = None + self._logging_enabled = False self._close = False self._force_close = False @@ -463,9 +466,11 @@ def force_close(self) -> None: self.transport = None def log_access( - self, request: BaseRequest, response: StreamResponse, time: float + self, request: BaseRequest, response: StreamResponse, time: Optional[float] ) -> None: if self.access_logger is not None and self.access_logger.enabled: + if TYPE_CHECKING: + assert time is not None self.access_logger.log(request, response, self._loop.time() - time) def log_debug(self, *args: Any, **kw: Any) -> None: @@ -495,7 +500,7 @@ def _process_keepalive(self) -> None: async def _handle_request( self, request: BaseRequest, - start_time: float, + start_time: Optional[float], request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: self._request_in_progress = True @@ -563,7 +568,9 @@ async def start(self) -> None: message, payload = self._messages.popleft() - start = loop.time() + # time is only fetched if logging is enabled as otherwise + # its thrown away and never used. + start = loop.time() if self._logging_enabled else None manager.requests_count += 1 writer = StreamWriter(self, loop) @@ -671,7 +678,7 @@ async def start(self) -> None: self.transport.close() async def finish_response( - self, request: BaseRequest, resp: StreamResponse, start_time: float + self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float] ) -> Tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 6a86a3458a3..8c03a6041b2 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -144,6 +144,20 @@ def log(self, request, response, time): ) +async def test_app_make_handler_no_access_log_class(mocker) -> None: + srv = mocker.patch("aiohttp.web_app.Server") + app = web.Application(handler_args={"access_log": None}) + app._make_handler(access_log=None) + srv.assert_called_with( + app._handle, + request_factory=app._make_request, + loop=asyncio.get_event_loop(), + access_log=None, + debug=mock.ANY, + access_log_class=mock.ANY, + ) + + async def test_app_make_handler_raises_deprecation_warning() -> None: app = web.Application() diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 0896c41c9e1..16c4b976daa 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -255,3 +255,29 @@ def enabled(self) -> bool: resp = await client.get("/") assert 200 == resp.status assert "This should not be logged" not in caplog.text + + +async def test_logger_set_to_none( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test logger does nothing when access_log is set to None.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + class Logger(AbstractAccessLogger): + + def log( + self, request: web.BaseRequest, response: web.StreamResponse, time: float + ) -> None: + self.logger.critical("This should not be logged") # pragma: no cover + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, access_log=None, access_log_class=Logger) + client = await aiohttp_client(server) + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text From 47511fc620376c8758237abf69486d6edaabeedb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 18 Apr 2025 08:43:11 -1000 Subject: [PATCH 1342/1511] [PR #10713/8d74e26 backport][3.12] Avoid fetching loop time on each request unless logging is enabled (#10739) Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/10713.misc.rst | 1 + aiohttp/web_protocol.py | 15 +++++++++++---- tests/test_web_app.py | 14 ++++++++++++++ tests/test_web_log.py | 26 ++++++++++++++++++++++++++ 4 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10713.misc.rst diff --git a/CHANGES/10713.misc.rst b/CHANGES/10713.misc.rst new file mode 100644 index 00000000000..a556d11e1e0 --- /dev/null +++ b/CHANGES/10713.misc.rst @@ -0,0 +1 @@ +Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index a7d50195828..e1923aac24b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -170,6 +170,7 @@ class RequestHandler(BaseProtocol): "_current_request", "_timeout_ceil_threshold", "_request_in_progress", + "_logging_enabled", "_cache", ) @@ -244,8 +245,10 @@ def __init__( self.access_logger: Optional[AbstractAccessLogger] = access_log_class( access_log, access_log_format ) + self._logging_enabled = self.access_logger.enabled else: self.access_logger = None + self._logging_enabled = False self._close = False self._force_close = False @@ -463,9 +466,11 @@ def force_close(self) -> None: self.transport = None def log_access( - self, request: BaseRequest, response: StreamResponse, time: float + self, request: BaseRequest, response: StreamResponse, time: Optional[float] ) -> None: if self.access_logger is not None and self.access_logger.enabled: + if TYPE_CHECKING: + assert time is not None self.access_logger.log(request, response, self._loop.time() - time) def log_debug(self, *args: Any, **kw: Any) -> None: @@ -495,7 +500,7 @@ def _process_keepalive(self) -> None: async def _handle_request( self, request: BaseRequest, - start_time: float, + start_time: Optional[float], request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], ) -> Tuple[StreamResponse, bool]: self._request_in_progress = True @@ -563,7 +568,9 @@ async def start(self) -> None: message, payload = self._messages.popleft() - start = loop.time() + # time is only fetched if logging is enabled as otherwise + # its thrown away and never used. + start = loop.time() if self._logging_enabled else None manager.requests_count += 1 writer = StreamWriter(self, loop) @@ -671,7 +678,7 @@ async def start(self) -> None: self.transport.close() async def finish_response( - self, request: BaseRequest, resp: StreamResponse, start_time: float + self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float] ) -> Tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 6a86a3458a3..8c03a6041b2 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -144,6 +144,20 @@ def log(self, request, response, time): ) +async def test_app_make_handler_no_access_log_class(mocker) -> None: + srv = mocker.patch("aiohttp.web_app.Server") + app = web.Application(handler_args={"access_log": None}) + app._make_handler(access_log=None) + srv.assert_called_with( + app._handle, + request_factory=app._make_request, + loop=asyncio.get_event_loop(), + access_log=None, + debug=mock.ANY, + access_log_class=mock.ANY, + ) + + async def test_app_make_handler_raises_deprecation_warning() -> None: app = web.Application() diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 0896c41c9e1..16c4b976daa 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -255,3 +255,29 @@ def enabled(self) -> bool: resp = await client.get("/") assert 200 == resp.status assert "This should not be logged" not in caplog.text + + +async def test_logger_set_to_none( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test logger does nothing when access_log is set to None.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + class Logger(AbstractAccessLogger): + + def log( + self, request: web.BaseRequest, response: web.StreamResponse, time: float + ) -> None: + self.logger.critical("This should not be logged") # pragma: no cover + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, access_log=None, access_log_class=Logger) + client = await aiohttp_client(server) + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text From 099cc0c9f8943d41586055a6825aec31bc70bbd3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 18 Apr 2025 21:16:35 -1000 Subject: [PATCH 1343/1511] [PR #10740/0d21d8d backport][3.11] Refactor WebSocket reader to avoid creating lists (#10746) --- CHANGES/10740.misc.rst | 1 + aiohttp/_websocket/reader_c.pxd | 36 ++-- aiohttp/_websocket/reader_py.py | 318 ++++++++++++++++---------------- tests/test_websocket_parser.py | 312 ++++++++++++++++--------------- 4 files changed, 330 insertions(+), 337 deletions(-) create mode 100644 CHANGES/10740.misc.rst diff --git a/CHANGES/10740.misc.rst b/CHANGES/10740.misc.rst new file mode 100644 index 00000000000..34ed19aebba --- /dev/null +++ b/CHANGES/10740.misc.rst @@ -0,0 +1 @@ +Improved performance of the WebSocket reader -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index f156a7ff704..3efebeb81dc 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -8,12 +8,17 @@ cdef unsigned int READ_PAYLOAD_LENGTH cdef unsigned int READ_PAYLOAD_MASK cdef unsigned int READ_PAYLOAD -cdef unsigned int OP_CODE_CONTINUATION -cdef unsigned int OP_CODE_TEXT -cdef unsigned int OP_CODE_BINARY -cdef unsigned int OP_CODE_CLOSE -cdef unsigned int OP_CODE_PING -cdef unsigned int OP_CODE_PONG +cdef int OP_CODE_NOT_SET +cdef int OP_CODE_CONTINUATION +cdef int OP_CODE_TEXT +cdef int OP_CODE_BINARY +cdef int OP_CODE_CLOSE +cdef int OP_CODE_PING +cdef int OP_CODE_PONG + +cdef int COMPRESSED_NOT_SET +cdef int COMPRESSED_FALSE +cdef int COMPRESSED_TRUE cdef object UNPACK_LEN3 cdef object UNPACK_CLOSE_CODE @@ -60,9 +65,9 @@ cdef class WebSocketReader: cdef bytearray _partial cdef unsigned int _state - cdef object _opcode - cdef object _frame_fin - cdef object _frame_opcode + cdef int _opcode + cdef bint _frame_fin + cdef int _frame_opcode cdef object _frame_payload cdef unsigned long long _frame_payload_len @@ -71,7 +76,7 @@ cdef class WebSocketReader: cdef bytes _frame_mask cdef unsigned long long _payload_length cdef unsigned int _payload_length_flag - cdef object _compressed + cdef int _compressed cdef object _decompressobj cdef bint _compress @@ -82,22 +87,21 @@ cdef class WebSocketReader: fin=bint, has_partial=bint, payload_merged=bytes, - opcode="unsigned int", ) - cpdef void _feed_data(self, bytes data) + cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except * @cython.locals( start_pos="unsigned int", - buf_len="unsigned int", + data_len="unsigned int", length="unsigned int", chunk_size="unsigned int", chunk_len="unsigned int", - buf_length="unsigned int", - buf_cstr="const unsigned char *", + data_length="unsigned int", + data_cstr="const unsigned char *", first_byte="unsigned char", second_byte="unsigned char", end_pos="unsigned int", has_mask=bint, fin=bint, ) - cpdef list parse_frame(self, bytes buf) + cpdef void _feed_data(self, bytes data) except * diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 92ad47a52f0..5c5dbc3b0c4 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -3,7 +3,7 @@ import asyncio import builtins from collections import deque -from typing import Deque, Final, List, Optional, Set, Tuple, Union +from typing import Deque, Final, Optional, Set, Tuple, Union from ..base_protocol import BaseProtocol from ..compression_utils import ZLibDecompressor @@ -31,6 +31,7 @@ WS_MSG_TYPE_TEXT = WSMsgType.TEXT # WSMsgType values unpacked so they can by cythonized to ints +OP_CODE_NOT_SET = -1 OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value OP_CODE_TEXT = WSMsgType.TEXT.value OP_CODE_BINARY = WSMsgType.BINARY.value @@ -41,9 +42,13 @@ EMPTY_FRAME_ERROR = (True, b"") EMPTY_FRAME = (False, b"") +COMPRESSED_NOT_SET = -1 +COMPRESSED_FALSE = 0 +COMPRESSED_TRUE = 1 + TUPLE_NEW = tuple.__new__ -int_ = int # Prevent Cython from converting to PyInt +cython_int = int # Typed to int in Python, but cython with use a signed int in the pxd class WebSocketDataQueue: @@ -95,7 +100,7 @@ def feed_eof(self) -> None: self._release_waiter() self._exception = None # Break cyclic references - def feed_data(self, data: "WSMessage", size: "int_") -> None: + def feed_data(self, data: "WSMessage", size: "cython_int") -> None: self._size += size self._put_buffer((data, size)) self._release_waiter() @@ -136,9 +141,9 @@ def __init__( self._partial = bytearray() self._state = READ_HEADER - self._opcode: Optional[int] = None + self._opcode: int = OP_CODE_NOT_SET self._frame_fin = False - self._frame_opcode: Optional[int] = None + self._frame_opcode: int = OP_CODE_NOT_SET self._frame_payload: Union[bytes, bytearray] = b"" self._frame_payload_len = 0 @@ -147,7 +152,7 @@ def __init__( self._frame_mask: Optional[bytes] = None self._payload_length = 0 self._payload_length_flag = 0 - self._compressed: Optional[bool] = None + self._compressed: int = COMPRESSED_NOT_SET self._decompressobj: Optional[ZLibDecompressor] = None self._compress = compress @@ -175,165 +180,153 @@ def feed_data( return EMPTY_FRAME - def _feed_data(self, data: bytes) -> None: + def _handle_frame( + self, + fin: bool, + opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int + payload: Union[bytes, bytearray], + compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int + ) -> None: msg: WSMessage - for frame in self.parse_frame(data): - fin = frame[0] - opcode = frame[1] - payload = frame[2] - compressed = frame[3] - - is_continuation = opcode == OP_CODE_CONTINUATION - if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation: - # load text/binary - if not fin: - # got partial frame payload - if not is_continuation: - self._opcode = opcode - self._partial += payload - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(self._partial)} " - f"exceeds limit {self._max_msg_size}", - ) - continue - - has_partial = bool(self._partial) - if is_continuation: - if self._opcode is None: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) - opcode = self._opcode - self._opcode = None - # previous frame was non finished - # we should get continuation opcode - elif has_partial: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - f"to be zero, got {opcode!r}", - ) - - assembled_payload: Union[bytes, bytearray] - if has_partial: - assembled_payload = self._partial + payload - self._partial.clear() - else: - assembled_payload = payload - - if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: + # load text/binary + if not fin: + # got partial frame payload + if opcode != OP_CODE_CONTINUATION: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(assembled_payload)} " + f"Message size {len(self._partial)} " f"exceeds limit {self._max_msg_size}", ) + return - # Decompress process must to be done after all packets - # received. - if compressed: - if not self._decompressobj: - self._decompressobj = ZLibDecompressor( - suppress_deflate_header=True - ) - payload_merged = self._decompressobj.decompress_sync( - assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Decompressed message size {self._max_msg_size + left}" - f" exceeds limit {self._max_msg_size}", - ) - elif type(assembled_payload) is bytes: - payload_merged = assembled_payload - else: - payload_merged = bytes(assembled_payload) - - if opcode == OP_CODE_TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # XXX: The Text and Binary messages here can be a performance - # bottleneck, so we use tuple.__new__ to improve performance. - # This is not type safe, but many tests should fail in - # test_client_ws_functional.py if this is wrong. - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), - len(payload_merged), - ) - else: - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), - len(payload_merged), - ) - elif opcode == OP_CODE_CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = TUPLE_NEW( - WSMessage, (WSMsgType.CLOSE, close_code, close_message) - ) - elif payload: + has_partial = bool(self._partial) + if opcode == OP_CODE_CONTINUATION: + if self._opcode == OP_CODE_NOT_SET: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", + "Continuation frame for non started message", ) - else: - msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) + opcode = self._opcode + self._opcode = OP_CODE_NOT_SET + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + f"to be zero, got {opcode!r}", + ) - self.queue.feed_data(msg, 0) - elif opcode == OP_CODE_PING: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) - self.queue.feed_data(msg, len(payload)) + assembled_payload: Union[bytes, bytearray] + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload - elif opcode == OP_CODE_PONG: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) - self.queue.feed_data(msg, len(payload)) + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + f"Message size {len(assembled_payload)} " + f"exceeds limit {self._max_msg_size}", + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + f"Decompressed message size {self._max_msg_size + left}" + f" exceeds limit {self._max_msg_size}", + ) + elif type(assembled_payload) is bytes: + payload_merged = assembled_payload + else: + payload_merged = bytes(assembled_payload) + if opcode == OP_CODE_TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + # XXX: The Text and Binary messages here can be a performance + # bottleneck, so we use tuple.__new__ to improve performance. + # This is not type safe, but many tests should fail in + # test_client_ws_functional.py if this is wrong. + self.queue.feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), + len(payload_merged), + ) else: + self.queue.feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), + len(payload_merged), + ) + elif opcode == OP_CODE_CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message)) + elif payload: raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", ) + else: + msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) + + self.queue.feed_data(msg, 0) + elif opcode == OP_CODE_PING: + msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) + self.queue.feed_data(msg, len(payload)) + elif opcode == OP_CODE_PONG: + msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) + self.queue.feed_data(msg, len(payload)) + else: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) - def parse_frame( - self, buf: bytes - ) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]: + def _feed_data(self, data: bytes) -> None: """Return the next frame from the socket.""" - frames: List[ - Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]] - ] = [] if self._tail: - buf, self._tail = self._tail + buf, b"" + data, self._tail = self._tail + data, b"" start_pos: int = 0 - buf_length = len(buf) - buf_cstr = buf + data_length = len(data) + data_cstr = data while True: # read header if self._state == READ_HEADER: - if buf_length - start_pos < 2: + if data_length - start_pos < 2: break - first_byte = buf_cstr[start_pos] - second_byte = buf_cstr[start_pos + 1] + first_byte = data_cstr[start_pos] + second_byte = data_cstr[start_pos + 1] start_pos += 2 fin = (first_byte >> 7) & 1 @@ -378,8 +371,8 @@ def parse_frame( # Set compress status if last package is FIN # OR set compress status if this is first fragment # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False + if self._frame_fin or self._compressed == COMPRESSED_NOT_SET: + self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE elif rsv1: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, @@ -396,18 +389,17 @@ def parse_frame( if self._state == READ_PAYLOAD_LENGTH: length_flag = self._payload_length_flag if length_flag == 126: - if buf_length - start_pos < 2: + if data_length - start_pos < 2: break - first_byte = buf_cstr[start_pos] - second_byte = buf_cstr[start_pos + 1] + first_byte = data_cstr[start_pos] + second_byte = data_cstr[start_pos + 1] start_pos += 2 self._payload_length = first_byte << 8 | second_byte elif length_flag > 126: - if buf_length - start_pos < 8: + if data_length - start_pos < 8: break - data = buf_cstr[start_pos : start_pos + 8] + self._payload_length = UNPACK_LEN3(data, start_pos)[0] start_pos += 8 - self._payload_length = UNPACK_LEN3(data)[0] else: self._payload_length = length_flag @@ -415,16 +407,16 @@ def parse_frame( # read payload mask if self._state == READ_PAYLOAD_MASK: - if buf_length - start_pos < 4: + if data_length - start_pos < 4: break - self._frame_mask = buf_cstr[start_pos : start_pos + 4] + self._frame_mask = data_cstr[start_pos : start_pos + 4] start_pos += 4 self._state = READ_PAYLOAD if self._state == READ_PAYLOAD: - chunk_len = buf_length - start_pos + chunk_len = data_length - start_pos if self._payload_length >= chunk_len: - end_pos = buf_length + end_pos = data_length self._payload_length -= chunk_len else: end_pos = start_pos + self._payload_length @@ -433,10 +425,10 @@ def parse_frame( if self._frame_payload_len: if type(self._frame_payload) is not bytearray: self._frame_payload = bytearray(self._frame_payload) - self._frame_payload += buf_cstr[start_pos:end_pos] + self._frame_payload += data_cstr[start_pos:end_pos] else: # Fast path for the first frame - self._frame_payload = buf_cstr[start_pos:end_pos] + self._frame_payload = data_cstr[start_pos:end_pos] self._frame_payload_len += end_pos - start_pos start_pos = end_pos @@ -450,19 +442,17 @@ def parse_frame( self._frame_payload = bytearray(self._frame_payload) websocket_mask(self._frame_mask, self._frame_payload) - frames.append( - ( - self._frame_fin, - self._frame_opcode, - self._frame_payload, - self._compressed, - ) + self._handle_frame( + self._frame_fin, + self._frame_opcode, + self._frame_payload, + self._compressed, ) self._frame_payload = b"" self._frame_payload_len = 0 self._state = READ_HEADER # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. - self._tail = buf_cstr[start_pos:buf_length] if start_pos < buf_length else b"" - - return frames + self._tail = ( + data_cstr[start_pos:data_length] if start_pos < data_length else b"" + ) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 7f8b98d4566..8a65ac11d50 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -27,6 +27,25 @@ class PatchableWebSocketReader(WebSocketReader): """WebSocketReader subclass that allows for patching parse_frame.""" + def parse_frame( + self, data: bytes + ) -> list[tuple[bool, int, Union[bytes, bytearray], int]]: + # This method is overridden to allow for patching in tests. + frames: list[tuple[bool, int, Union[bytes, bytearray], int]] = [] + + def _handle_frame( + fin: bool, + opcode: int, + payload: Union[bytes, bytearray], + compressed: int, + ) -> None: + # This method is overridden to allow for patching in tests. + frames.append((fin, opcode, payload, compressed)) + + with mock.patch.object(self, "_handle_frame", _handle_frame): + self._feed_data(data) + return frames + def build_frame( message, opcode, use_mask=False, noheader=False, is_fin=True, compress=False @@ -127,32 +146,32 @@ def test_feed_data_remembers_exception(parser: WebSocketReader) -> None: assert data == b"" -def test_parse_frame(parser) -> None: +def test_parse_frame(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 0b00000001)) res = parser.parse_frame(b"1") fin, opcode, payload, compress = res[0] - assert (0, 1, b"1", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"1", 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length0(parser) -> None: +def test_parse_frame_length0(parser: PatchableWebSocketReader) -> None: fin, opcode, payload, compress = parser.parse_frame( struct.pack("!BB", 0b00000001, 0b00000000) )[0] - assert (0, 1, b"", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"", 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length2(parser) -> None: +def test_parse_frame_length2(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) parser.parse_frame(struct.pack("!H", 4)) res = parser.parse_frame(b"1234") fin, opcode, payload, compress = res[0] - assert (0, 1, b"1234", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"1234", 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length2_multi_byte(parser: WebSocketReader) -> None: +def test_parse_frame_length2_multi_byte(parser: PatchableWebSocketReader) -> None: """Ensure a multi-byte length is parsed correctly.""" expected_payload = b"1" * 32768 parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) @@ -160,10 +179,12 @@ def test_parse_frame_length2_multi_byte(parser: WebSocketReader) -> None: res = parser.parse_frame(b"1" * 32768) fin, opcode, payload, compress = res[0] - assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) + assert (0, 1, expected_payload, 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length2_multi_byte_multi_packet(parser: WebSocketReader) -> None: +def test_parse_frame_length2_multi_byte_multi_packet( + parser: PatchableWebSocketReader, +) -> None: """Ensure a multi-byte length with multiple packets is parsed correctly.""" expected_payload = b"1" * 32768 assert parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) == [] @@ -174,44 +195,53 @@ def test_parse_frame_length2_multi_byte_multi_packet(parser: WebSocketReader) -> res = parser.parse_frame(b"1" * 8192) fin, opcode, payload, compress = res[0] assert len(payload) == 32768 - assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) + assert (0, 1, expected_payload, 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length4(parser: WebSocketReader) -> None: +def test_parse_frame_length4(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 127)) parser.parse_frame(struct.pack("!Q", 4)) fin, opcode, payload, compress = parser.parse_frame(b"1234")[0] - assert (0, 1, b"1234", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"1234", 0) == (fin, opcode, payload, compress) -def test_parse_frame_mask(parser) -> None: +def test_parse_frame_mask(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 0b10000001)) parser.parse_frame(b"0001") fin, opcode, payload, compress = parser.parse_frame(b"1")[0] - assert (0, 1, b"\x01", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"\x01", 0) == (fin, opcode, payload, compress) -def test_parse_frame_header_reversed_bits(out, parser) -> None: +def test_parse_frame_header_reversed_bits( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b01100000, 0b00000000)) raise out.exception() -def test_parse_frame_header_control_frame(out, parser) -> None: +def test_parse_frame_header_control_frame( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b00001000, 0b00000000)) raise out.exception() -def _test_parse_frame_header_new_data_err(out, parser): +@pytest.mark.xfail() +def test_parse_frame_header_new_data_err( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b000000000, 0b00000000)) raise out.exception() -def test_parse_frame_header_payload_size(out, parser) -> None: +def test_parse_frame_header_payload_size( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b10001000, 0b01111110)) raise out.exception() @@ -226,54 +256,45 @@ def test_parse_frame_header_payload_size(out, parser) -> None: ) def test_ping_frame( out: WebSocketDataQueue, - parser: WebSocketReader, + parser: PatchableWebSocketReader, data: Union[bytes, bytearray, memoryview], ) -> None: - with mock.patch.object(parser, "parse_frame", autospec=True) as m: - m.return_value = [(1, WSMsgType.PING, b"data", False)] - - parser.feed_data(data) - res = out._buffer[0] - assert res == ((WSMsgType.PING, b"data", ""), 4) - + parser._handle_frame(True, WSMsgType.PING, b"data", 0) + res = out._buffer[0] + assert res == ((WSMsgType.PING, b"data", ""), 4) -def test_pong_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.PONG, b"data", False)] - parser.feed_data(b"") +def test_pong_frame(out: WebSocketDataQueue, parser: PatchableWebSocketReader) -> None: + parser._handle_frame(True, WSMsgType.PONG, b"data", 0) res = out._buffer[0] assert res == ((WSMsgType.PONG, b"data", ""), 4) -def test_close_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CLOSE, b"", False)] - - parser.feed_data(b"") +def test_close_frame(out: WebSocketDataQueue, parser: PatchableWebSocketReader) -> None: + parser._handle_frame(True, WSMsgType.CLOSE, b"", 0) res = out._buffer[0] assert res == ((WSMsgType.CLOSE, 0, ""), 0) -def test_close_frame_info(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CLOSE, b"0112345", False)] - - parser.feed_data(b"") +def test_close_frame_info( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(True, WSMsgType.CLOSE, b"0112345", 0) res = out._buffer[0] assert res == (WSMessage(WSMsgType.CLOSE, 12337, "12345"), 0) -def test_close_frame_invalid(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CLOSE, b"1", False)] - parser.feed_data(b"") - - assert isinstance(out.exception(), WebSocketError) - assert out.exception().code == WSCloseCode.PROTOCOL_ERROR +def test_close_frame_invalid( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + with pytest.raises(WebSocketError) as ctx: + parser._handle_frame(True, WSMsgType.CLOSE, b"1", 0) + assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -def test_close_frame_invalid_2(out, parser) -> None: +def test_close_frame_invalid_2( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data = build_close_frame(code=1) with pytest.raises(WebSocketError) as ctx: @@ -282,7 +303,7 @@ def test_close_frame_invalid_2(out, parser) -> None: assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -def test_close_frame_unicode_err(parser) -> None: +def test_close_frame_unicode_err(parser: PatchableWebSocketReader) -> None: data = build_close_frame(code=1000, message=b"\xf4\x90\x80\x80") with pytest.raises(WebSocketError) as ctx: @@ -291,23 +312,21 @@ def test_close_frame_unicode_err(parser) -> None: assert ctx.value.code == WSCloseCode.INVALID_TEXT -def test_unknown_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CONTINUATION, b"", False)] - +def test_unknown_frame( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): - parser.feed_data(b"") - raise out.exception() + parser._handle_frame(True, WSMsgType.CONTINUATION, b"", 0) -def test_simple_text(out, parser) -> None: +def test_simple_text(out: WebSocketDataQueue, parser: PatchableWebSocketReader) -> None: data = build_frame(b"text", WSMsgType.TEXT) parser._feed_data(data) res = out._buffer[0] assert res == ((WSMsgType.TEXT, "text", ""), 4) -def test_simple_text_unicode_err(parser) -> None: +def test_simple_text_unicode_err(parser: PatchableWebSocketReader) -> None: data = build_frame(b"\xf4\x90\x80\x80", WSMsgType.TEXT) with pytest.raises(WebSocketError) as ctx: @@ -316,16 +335,18 @@ def test_simple_text_unicode_err(parser) -> None: assert ctx.value.code == WSCloseCode.INVALID_TEXT -def test_simple_binary(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.BINARY, b"binary", False)] - - parser.feed_data(b"") +def test_simple_binary( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + data = build_frame(b"binary", WSMsgType.BINARY) + parser._feed_data(data) res = out._buffer[0] assert res == ((WSMsgType.BINARY, b"binary", ""), 6) -def test_fragmentation_header(out, parser) -> None: +def test_fragmentation_header( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data = build_frame(b"a", WSMsgType.TEXT) parser._feed_data(data[:1]) parser._feed_data(data[1:]) @@ -334,7 +355,9 @@ def test_fragmentation_header(out, parser) -> None: assert res == (WSMessage(WSMsgType.TEXT, "a", ""), 1) -def test_continuation(out, parser) -> None: +def test_continuation( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data1 = build_frame(b"line1", WSMsgType.TEXT, is_fin=False) parser._feed_data(data1) @@ -345,14 +368,9 @@ def test_continuation(out, parser) -> None: assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) -def test_continuation_with_ping(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.PING, b"", False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - +def test_continuation_with_ping( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data1 = build_frame(b"line1", WSMsgType.TEXT, is_fin=False) parser._feed_data(data1) @@ -368,90 +386,78 @@ def test_continuation_with_ping(out, parser) -> None: assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) -def test_continuation_err(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (1, WSMsgType.TEXT, b"line2", False), - ] - +def test_continuation_err( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) with pytest.raises(WebSocketError): - parser._feed_data(b"") - + parser._handle_frame(True, WSMsgType.TEXT, b"line2", 0) -def test_continuation_with_close(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, build_close_frame(1002, b"test", noheader=True), False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - parser.feed_data(b"") +def test_continuation_with_close( + out: WebSocketDataQueue, parser: WebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) + parser._handle_frame( + False, + WSMsgType.CLOSE, + build_close_frame(1002, b"test", noheader=True), + False, + ) + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) res = out._buffer[0] - assert res, (WSMessage(WSMsgType.CLOSE, 1002, "test"), 0) + assert res == (WSMessage(WSMsgType.CLOSE, 1002, "test"), 0) res = out._buffer[1] assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) -def test_continuation_with_close_unicode_err(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - ( - 0, +def test_continuation_with_close_unicode_err( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) + with pytest.raises(WebSocketError) as ctx: + parser._handle_frame( + False, WSMsgType.CLOSE, build_close_frame(1000, b"\xf4\x90\x80\x80", noheader=True), - False, - ), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - - with pytest.raises(WebSocketError) as ctx: - parser._feed_data(b"") - + 0, + ) + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) assert ctx.value.code == WSCloseCode.INVALID_TEXT -def test_continuation_with_close_bad_code(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, build_close_frame(1, b"test", noheader=True), False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - +def test_continuation_with_close_bad_code( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) with pytest.raises(WebSocketError) as ctx: - parser._feed_data(b"") + parser._handle_frame( + False, WSMsgType.CLOSE, build_close_frame(1, b"test", noheader=True), 0 + ) assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) -def test_continuation_with_close_bad_payload(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, b"1", False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - +def test_continuation_with_close_bad_payload( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) with pytest.raises(WebSocketError) as ctx: - parser._feed_data(b"") - - assert ctx.value.code, WSCloseCode.PROTOCOL_ERROR + parser._handle_frame(False, WSMsgType.CLOSE, b"1", 0) + assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) -def test_continuation_with_close_empty(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, b"", False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] +def test_continuation_with_close_empty( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) + parser._handle_frame(False, WSMsgType.CLOSE, b"", 0) + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) - parser.feed_data(b"") res = out._buffer[0] - assert res, (WSMessage(WSMsgType.CLOSE, 0, ""), 0) + assert res == (WSMessage(WSMsgType.CLOSE, 0, ""), 0) res = out._buffer[1] assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) @@ -506,7 +512,7 @@ def test_msgtype_aliases() -> None: assert aiohttp.WSMsgType.ERROR == aiohttp.WSMsgType.error -def test_parse_compress_frame_single(parser) -> None: +def test_parse_compress_frame_single(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b11000001, 0b00000001)) res = parser.parse_frame(b"1") fin, opcode, payload, compress = res[0] @@ -514,7 +520,7 @@ def test_parse_compress_frame_single(parser) -> None: assert (1, 1, b"1", True) == (fin, opcode, payload, not not compress) -def test_parse_compress_frame_multi(parser) -> None: +def test_parse_compress_frame_multi(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b01000001, 126)) parser.parse_frame(struct.pack("!H", 4)) res = parser.parse_frame(b"1234") @@ -534,7 +540,7 @@ def test_parse_compress_frame_multi(parser) -> None: assert (1, 1, b"1234", False) == (fin, opcode, payload, not not compress) -def test_parse_compress_error_frame(parser) -> None: +def test_parse_compress_error_frame(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b01000001, 0b00000001)) parser.parse_frame(b"1") @@ -545,10 +551,8 @@ def test_parse_compress_error_frame(parser) -> None: assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -async def test_parse_no_compress_frame_single( - loop: asyncio.AbstractEventLoop, out: WebSocketDataQueue -) -> None: - parser_no_compress = WebSocketReader(out, 0, compress=False) +def test_parse_no_compress_frame_single(out: WebSocketDataQueue) -> None: + parser_no_compress = PatchableWebSocketReader(out, 0, compress=False) with pytest.raises(WebSocketError) as ctx: parser_no_compress.parse_frame(struct.pack("!BB", 0b11000001, 0b00000001)) parser_no_compress.parse_frame(b"1") @@ -600,34 +604,28 @@ def test_pickle(self) -> None: def test_flow_control_binary( protocol: BaseProtocol, out_low_limit: WebSocketDataQueue, - parser_low_limit: WebSocketReader, + parser_low_limit: PatchableWebSocketReader, ) -> None: large_payload = b"b" * (1 + 16 * 2) - large_payload_len = len(large_payload) - with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: - m.return_value = [(1, WSMsgType.BINARY, large_payload, False)] - - parser_low_limit.feed_data(b"") - + large_payload_size = len(large_payload) + parser_low_limit._handle_frame(True, WSMsgType.BINARY, large_payload, 0) res = out_low_limit._buffer[0] - assert res == (WSMessage(WSMsgType.BINARY, large_payload, ""), large_payload_len) + assert res == (WSMessage(WSMsgType.BINARY, large_payload, ""), large_payload_size) assert protocol._reading_paused is True def test_flow_control_multi_byte_text( protocol: BaseProtocol, out_low_limit: WebSocketDataQueue, - parser_low_limit: WebSocketReader, + parser_low_limit: PatchableWebSocketReader, ) -> None: large_payload_text = "𒀁" * (1 + 16 * 2) large_payload = large_payload_text.encode("utf-8") - large_payload_len = len(large_payload) - - with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: - m.return_value = [(1, WSMsgType.TEXT, large_payload, False)] - - parser_low_limit.feed_data(b"") - + large_payload_size = len(large_payload) + parser_low_limit._handle_frame(True, WSMsgType.TEXT, large_payload, 0) res = out_low_limit._buffer[0] - assert res == (WSMessage(WSMsgType.TEXT, large_payload_text, ""), large_payload_len) + assert res == ( + WSMessage(WSMsgType.TEXT, large_payload_text, ""), + large_payload_size, + ) assert protocol._reading_paused is True From 2c8575c1b27c78505d670c9e0708e0b8ca544e4e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 18 Apr 2025 21:16:44 -1000 Subject: [PATCH 1344/1511] [PR #10740/0d21d8d backport][3.12] Refactor WebSocket reader to avoid creating lists (#10745) --- CHANGES/10740.misc.rst | 1 + aiohttp/_websocket/reader_c.pxd | 36 ++-- aiohttp/_websocket/reader_py.py | 334 ++++++++++++++++---------------- tests/test_websocket_parser.py | 308 ++++++++++++++--------------- 4 files changed, 337 insertions(+), 342 deletions(-) create mode 100644 CHANGES/10740.misc.rst diff --git a/CHANGES/10740.misc.rst b/CHANGES/10740.misc.rst new file mode 100644 index 00000000000..34ed19aebba --- /dev/null +++ b/CHANGES/10740.misc.rst @@ -0,0 +1 @@ +Improved performance of the WebSocket reader -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index f156a7ff704..3efebeb81dc 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -8,12 +8,17 @@ cdef unsigned int READ_PAYLOAD_LENGTH cdef unsigned int READ_PAYLOAD_MASK cdef unsigned int READ_PAYLOAD -cdef unsigned int OP_CODE_CONTINUATION -cdef unsigned int OP_CODE_TEXT -cdef unsigned int OP_CODE_BINARY -cdef unsigned int OP_CODE_CLOSE -cdef unsigned int OP_CODE_PING -cdef unsigned int OP_CODE_PONG +cdef int OP_CODE_NOT_SET +cdef int OP_CODE_CONTINUATION +cdef int OP_CODE_TEXT +cdef int OP_CODE_BINARY +cdef int OP_CODE_CLOSE +cdef int OP_CODE_PING +cdef int OP_CODE_PONG + +cdef int COMPRESSED_NOT_SET +cdef int COMPRESSED_FALSE +cdef int COMPRESSED_TRUE cdef object UNPACK_LEN3 cdef object UNPACK_CLOSE_CODE @@ -60,9 +65,9 @@ cdef class WebSocketReader: cdef bytearray _partial cdef unsigned int _state - cdef object _opcode - cdef object _frame_fin - cdef object _frame_opcode + cdef int _opcode + cdef bint _frame_fin + cdef int _frame_opcode cdef object _frame_payload cdef unsigned long long _frame_payload_len @@ -71,7 +76,7 @@ cdef class WebSocketReader: cdef bytes _frame_mask cdef unsigned long long _payload_length cdef unsigned int _payload_length_flag - cdef object _compressed + cdef int _compressed cdef object _decompressobj cdef bint _compress @@ -82,22 +87,21 @@ cdef class WebSocketReader: fin=bint, has_partial=bint, payload_merged=bytes, - opcode="unsigned int", ) - cpdef void _feed_data(self, bytes data) + cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except * @cython.locals( start_pos="unsigned int", - buf_len="unsigned int", + data_len="unsigned int", length="unsigned int", chunk_size="unsigned int", chunk_len="unsigned int", - buf_length="unsigned int", - buf_cstr="const unsigned char *", + data_length="unsigned int", + data_cstr="const unsigned char *", first_byte="unsigned char", second_byte="unsigned char", end_pos="unsigned int", has_mask=bint, fin=bint, ) - cpdef list parse_frame(self, bytes buf) + cpdef void _feed_data(self, bytes data) except * diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 19579bd39a8..a8a8eb7eb01 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -3,7 +3,7 @@ import asyncio import builtins from collections import deque -from typing import Deque, Final, List, Optional, Set, Tuple, Union +from typing import Deque, Final, Optional, Set, Tuple, Union from ..base_protocol import BaseProtocol from ..compression_utils import ZLibDecompressor @@ -31,6 +31,7 @@ WS_MSG_TYPE_TEXT = WSMsgType.TEXT # WSMsgType values unpacked so they can by cythonized to ints +OP_CODE_NOT_SET = -1 OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value OP_CODE_TEXT = WSMsgType.TEXT.value OP_CODE_BINARY = WSMsgType.BINARY.value @@ -41,9 +42,13 @@ EMPTY_FRAME_ERROR = (True, b"") EMPTY_FRAME = (False, b"") +COMPRESSED_NOT_SET = -1 +COMPRESSED_FALSE = 0 +COMPRESSED_TRUE = 1 + TUPLE_NEW = tuple.__new__ -int_ = int # Prevent Cython from converting to PyInt +cython_int = int # Typed to int in Python, but cython with use a signed int in the pxd class WebSocketDataQueue: @@ -95,7 +100,7 @@ def feed_eof(self) -> None: self._release_waiter() self._exception = None # Break cyclic references - def feed_data(self, data: "WSMessage", size: "int_") -> None: + def feed_data(self, data: "WSMessage", size: "cython_int") -> None: self._size += size self._put_buffer((data, size)) self._release_waiter() @@ -136,9 +141,9 @@ def __init__( self._partial = bytearray() self._state = READ_HEADER - self._opcode: Optional[int] = None + self._opcode: int = OP_CODE_NOT_SET self._frame_fin = False - self._frame_opcode: Optional[int] = None + self._frame_opcode: int = OP_CODE_NOT_SET self._frame_payload: Union[bytes, bytearray] = b"" self._frame_payload_len = 0 @@ -147,7 +152,7 @@ def __init__( self._frame_mask: Optional[bytes] = None self._payload_length = 0 self._payload_length_flag = 0 - self._compressed: Optional[bool] = None + self._compressed: int = COMPRESSED_NOT_SET self._decompressobj: Optional[ZLibDecompressor] = None self._compress = compress @@ -175,173 +180,161 @@ def feed_data( return EMPTY_FRAME - def _feed_data(self, data: bytes) -> None: + def _handle_frame( + self, + fin: bool, + opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int + payload: Union[bytes, bytearray], + compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int + ) -> None: msg: WSMessage - for frame in self.parse_frame(data): - fin = frame[0] - opcode = frame[1] - payload = frame[2] - compressed = frame[3] - - is_continuation = opcode == OP_CODE_CONTINUATION - if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation: - # load text/binary - if not fin: - # got partial frame payload - if not is_continuation: - self._opcode = opcode - self._partial += payload - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(self._partial)} " - f"exceeds limit {self._max_msg_size}", - ) - continue - - has_partial = bool(self._partial) - if is_continuation: - if self._opcode is None: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) - opcode = self._opcode - self._opcode = None - # previous frame was non finished - # we should get continuation opcode - elif has_partial: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - f"to be zero, got {opcode!r}", - ) - - assembled_payload: Union[bytes, bytearray] - if has_partial: - assembled_payload = self._partial + payload - self._partial.clear() - else: - assembled_payload = payload - - if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: + # load text/binary + if not fin: + # got partial frame payload + if opcode != OP_CODE_CONTINUATION: + self._opcode = opcode + self._partial += payload + if self._max_msg_size and len(self._partial) >= self._max_msg_size: raise WebSocketError( WSCloseCode.MESSAGE_TOO_BIG, - f"Message size {len(assembled_payload)} " + f"Message size {len(self._partial)} " f"exceeds limit {self._max_msg_size}", ) + return - # Decompress process must to be done after all packets - # received. - if compressed: - if not self._decompressobj: - self._decompressobj = ZLibDecompressor( - suppress_deflate_header=True - ) - # XXX: It's possible that the zlib backend (isal is known to - # do this, maybe others too?) will return max_length bytes, - # but internally buffer more data such that the payload is - # >max_length, so we return one extra byte and if we're able - # to do that, then the message is too big. - payload_merged = self._decompressobj.decompress_sync( - assembled_payload + WS_DEFLATE_TRAILING, - ( - self._max_msg_size + 1 - if self._max_msg_size - else self._max_msg_size - ), - ) - if self._max_msg_size and len(payload_merged) > self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - f"Decompressed message exceeds size limit {self._max_msg_size}", - ) - elif type(assembled_payload) is bytes: - payload_merged = assembled_payload - else: - payload_merged = bytes(assembled_payload) - - if opcode == OP_CODE_TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # XXX: The Text and Binary messages here can be a performance - # bottleneck, so we use tuple.__new__ to improve performance. - # This is not type safe, but many tests should fail in - # test_client_ws_functional.py if this is wrong. - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), - len(payload_merged), - ) - else: - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), - len(payload_merged), - ) - elif opcode == OP_CODE_CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = TUPLE_NEW( - WSMessage, (WSMsgType.CLOSE, close_code, close_message) - ) - elif payload: + has_partial = bool(self._partial) + if opcode == OP_CODE_CONTINUATION: + if self._opcode == OP_CODE_NOT_SET: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", + "Continuation frame for non started message", ) - else: - msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) + opcode = self._opcode + self._opcode = OP_CODE_NOT_SET + # previous frame was non finished + # we should get continuation opcode + elif has_partial: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + f"to be zero, got {opcode!r}", + ) - self.queue.feed_data(msg, 0) - elif opcode == OP_CODE_PING: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) - self.queue.feed_data(msg, len(payload)) + assembled_payload: Union[bytes, bytearray] + if has_partial: + assembled_payload = self._partial + payload + self._partial.clear() + else: + assembled_payload = payload - elif opcode == OP_CODE_PONG: - msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) - self.queue.feed_data(msg, len(payload)) + if self._max_msg_size and len(assembled_payload) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + f"Message size {len(assembled_payload)} " + f"exceeds limit {self._max_msg_size}", + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + if not self._decompressobj: + self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) + # XXX: It's possible that the zlib backend (isal is known to + # do this, maybe others too?) will return max_length bytes, + # but internally buffer more data such that the payload is + # >max_length, so we return one extra byte and if we're able + # to do that, then the message is too big. + payload_merged = self._decompressobj.decompress_sync( + assembled_payload + WS_DEFLATE_TRAILING, + ( + self._max_msg_size + 1 + if self._max_msg_size + else self._max_msg_size + ), + ) + if self._max_msg_size and len(payload_merged) > self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + f"Decompressed message exceeds size limit {self._max_msg_size}", + ) + elif type(assembled_payload) is bytes: + payload_merged = assembled_payload + else: + payload_merged = bytes(assembled_payload) + if opcode == OP_CODE_TEXT: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + # XXX: The Text and Binary messages here can be a performance + # bottleneck, so we use tuple.__new__ to improve performance. + # This is not type safe, but many tests should fail in + # test_client_ws_functional.py if this is wrong. + self.queue.feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), + len(payload_merged), + ) else: + self.queue.feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), + len(payload_merged), + ) + elif opcode == OP_CODE_CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message)) + elif payload: raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", ) + else: + msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, "")) + + self.queue.feed_data(msg, 0) + elif opcode == OP_CODE_PING: + msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, "")) + self.queue.feed_data(msg, len(payload)) + elif opcode == OP_CODE_PONG: + msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, "")) + self.queue.feed_data(msg, len(payload)) + else: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) - def parse_frame( - self, buf: bytes - ) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]: + def _feed_data(self, data: bytes) -> None: """Return the next frame from the socket.""" - frames: List[ - Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]] - ] = [] if self._tail: - buf, self._tail = self._tail + buf, b"" + data, self._tail = self._tail + data, b"" start_pos: int = 0 - buf_length = len(buf) - buf_cstr = buf + data_length = len(data) + data_cstr = data while True: # read header if self._state == READ_HEADER: - if buf_length - start_pos < 2: + if data_length - start_pos < 2: break - first_byte = buf_cstr[start_pos] - second_byte = buf_cstr[start_pos + 1] + first_byte = data_cstr[start_pos] + second_byte = data_cstr[start_pos + 1] start_pos += 2 fin = (first_byte >> 7) & 1 @@ -386,8 +379,8 @@ def parse_frame( # Set compress status if last package is FIN # OR set compress status if this is first fragment # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False + if self._frame_fin or self._compressed == COMPRESSED_NOT_SET: + self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE elif rsv1: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, @@ -404,18 +397,17 @@ def parse_frame( if self._state == READ_PAYLOAD_LENGTH: length_flag = self._payload_length_flag if length_flag == 126: - if buf_length - start_pos < 2: + if data_length - start_pos < 2: break - first_byte = buf_cstr[start_pos] - second_byte = buf_cstr[start_pos + 1] + first_byte = data_cstr[start_pos] + second_byte = data_cstr[start_pos + 1] start_pos += 2 self._payload_length = first_byte << 8 | second_byte elif length_flag > 126: - if buf_length - start_pos < 8: + if data_length - start_pos < 8: break - data = buf_cstr[start_pos : start_pos + 8] + self._payload_length = UNPACK_LEN3(data, start_pos)[0] start_pos += 8 - self._payload_length = UNPACK_LEN3(data)[0] else: self._payload_length = length_flag @@ -423,16 +415,16 @@ def parse_frame( # read payload mask if self._state == READ_PAYLOAD_MASK: - if buf_length - start_pos < 4: + if data_length - start_pos < 4: break - self._frame_mask = buf_cstr[start_pos : start_pos + 4] + self._frame_mask = data_cstr[start_pos : start_pos + 4] start_pos += 4 self._state = READ_PAYLOAD if self._state == READ_PAYLOAD: - chunk_len = buf_length - start_pos + chunk_len = data_length - start_pos if self._payload_length >= chunk_len: - end_pos = buf_length + end_pos = data_length self._payload_length -= chunk_len else: end_pos = start_pos + self._payload_length @@ -441,10 +433,10 @@ def parse_frame( if self._frame_payload_len: if type(self._frame_payload) is not bytearray: self._frame_payload = bytearray(self._frame_payload) - self._frame_payload += buf_cstr[start_pos:end_pos] + self._frame_payload += data_cstr[start_pos:end_pos] else: # Fast path for the first frame - self._frame_payload = buf_cstr[start_pos:end_pos] + self._frame_payload = data_cstr[start_pos:end_pos] self._frame_payload_len += end_pos - start_pos start_pos = end_pos @@ -458,19 +450,17 @@ def parse_frame( self._frame_payload = bytearray(self._frame_payload) websocket_mask(self._frame_mask, self._frame_payload) - frames.append( - ( - self._frame_fin, - self._frame_opcode, - self._frame_payload, - self._compressed, - ) + self._handle_frame( + self._frame_fin, + self._frame_opcode, + self._frame_payload, + self._compressed, ) self._frame_payload = b"" self._frame_payload_len = 0 self._state = READ_HEADER # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. - self._tail = buf_cstr[start_pos:buf_length] if start_pos < buf_length else b"" - - return frames + self._tail = ( + data_cstr[start_pos:data_length] if start_pos < data_length else b"" + ) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 2cac4cf6b87..04c83f19610 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -27,6 +27,25 @@ class PatchableWebSocketReader(WebSocketReader): """WebSocketReader subclass that allows for patching parse_frame.""" + def parse_frame( + self, data: bytes + ) -> list[tuple[bool, int, Union[bytes, bytearray], int]]: + # This method is overridden to allow for patching in tests. + frames: list[tuple[bool, int, Union[bytes, bytearray], int]] = [] + + def _handle_frame( + fin: bool, + opcode: int, + payload: Union[bytes, bytearray], + compressed: int, + ) -> None: + # This method is overridden to allow for patching in tests. + frames.append((fin, opcode, payload, compressed)) + + with mock.patch.object(self, "_handle_frame", _handle_frame): + self._feed_data(data) + return frames + def build_frame( message, opcode, use_mask=False, noheader=False, is_fin=True, ZLibBackend=None @@ -129,32 +148,32 @@ def test_feed_data_remembers_exception(parser: WebSocketReader) -> None: assert data == b"" -def test_parse_frame(parser) -> None: +def test_parse_frame(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 0b00000001)) res = parser.parse_frame(b"1") fin, opcode, payload, compress = res[0] - assert (0, 1, b"1", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"1", 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length0(parser) -> None: +def test_parse_frame_length0(parser: PatchableWebSocketReader) -> None: fin, opcode, payload, compress = parser.parse_frame( struct.pack("!BB", 0b00000001, 0b00000000) )[0] - assert (0, 1, b"", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"", 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length2(parser) -> None: +def test_parse_frame_length2(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) parser.parse_frame(struct.pack("!H", 4)) res = parser.parse_frame(b"1234") fin, opcode, payload, compress = res[0] - assert (0, 1, b"1234", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"1234", 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length2_multi_byte(parser: WebSocketReader) -> None: +def test_parse_frame_length2_multi_byte(parser: PatchableWebSocketReader) -> None: """Ensure a multi-byte length is parsed correctly.""" expected_payload = b"1" * 32768 parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) @@ -162,10 +181,12 @@ def test_parse_frame_length2_multi_byte(parser: WebSocketReader) -> None: res = parser.parse_frame(b"1" * 32768) fin, opcode, payload, compress = res[0] - assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) + assert (0, 1, expected_payload, 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length2_multi_byte_multi_packet(parser: WebSocketReader) -> None: +def test_parse_frame_length2_multi_byte_multi_packet( + parser: PatchableWebSocketReader, +) -> None: """Ensure a multi-byte length with multiple packets is parsed correctly.""" expected_payload = b"1" * 32768 assert parser.parse_frame(struct.pack("!BB", 0b00000001, 126)) == [] @@ -176,44 +197,53 @@ def test_parse_frame_length2_multi_byte_multi_packet(parser: WebSocketReader) -> res = parser.parse_frame(b"1" * 8192) fin, opcode, payload, compress = res[0] assert len(payload) == 32768 - assert (0, 1, expected_payload, False) == (fin, opcode, payload, not not compress) + assert (0, 1, expected_payload, 0) == (fin, opcode, payload, not not compress) -def test_parse_frame_length4(parser: WebSocketReader) -> None: +def test_parse_frame_length4(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 127)) parser.parse_frame(struct.pack("!Q", 4)) fin, opcode, payload, compress = parser.parse_frame(b"1234")[0] - assert (0, 1, b"1234", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"1234", 0) == (fin, opcode, payload, compress) -def test_parse_frame_mask(parser) -> None: +def test_parse_frame_mask(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b00000001, 0b10000001)) parser.parse_frame(b"0001") fin, opcode, payload, compress = parser.parse_frame(b"1")[0] - assert (0, 1, b"\x01", False) == (fin, opcode, payload, not not compress) + assert (0, 1, b"\x01", 0) == (fin, opcode, payload, compress) -def test_parse_frame_header_reversed_bits(out, parser) -> None: +def test_parse_frame_header_reversed_bits( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b01100000, 0b00000000)) raise out.exception() -def test_parse_frame_header_control_frame(out, parser) -> None: +def test_parse_frame_header_control_frame( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b00001000, 0b00000000)) raise out.exception() -def _test_parse_frame_header_new_data_err(out, parser): +@pytest.mark.xfail() +def test_parse_frame_header_new_data_err( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b000000000, 0b00000000)) raise out.exception() -def test_parse_frame_header_payload_size(out, parser) -> None: +def test_parse_frame_header_payload_size( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): parser.parse_frame(struct.pack("!BB", 0b10001000, 0b01111110)) raise out.exception() @@ -228,54 +258,45 @@ def test_parse_frame_header_payload_size(out, parser) -> None: ) def test_ping_frame( out: WebSocketDataQueue, - parser: WebSocketReader, + parser: PatchableWebSocketReader, data: Union[bytes, bytearray, memoryview], ) -> None: - with mock.patch.object(parser, "parse_frame", autospec=True) as m: - m.return_value = [(1, WSMsgType.PING, b"data", False)] - - parser.feed_data(data) - res = out._buffer[0] - assert res == ((WSMsgType.PING, b"data", ""), 4) - + parser._handle_frame(True, WSMsgType.PING, b"data", 0) + res = out._buffer[0] + assert res == ((WSMsgType.PING, b"data", ""), 4) -def test_pong_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.PONG, b"data", False)] - parser.feed_data(b"") +def test_pong_frame(out: WebSocketDataQueue, parser: PatchableWebSocketReader) -> None: + parser._handle_frame(True, WSMsgType.PONG, b"data", 0) res = out._buffer[0] assert res == ((WSMsgType.PONG, b"data", ""), 4) -def test_close_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CLOSE, b"", False)] - - parser.feed_data(b"") +def test_close_frame(out: WebSocketDataQueue, parser: PatchableWebSocketReader) -> None: + parser._handle_frame(True, WSMsgType.CLOSE, b"", 0) res = out._buffer[0] assert res == ((WSMsgType.CLOSE, 0, ""), 0) -def test_close_frame_info(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CLOSE, b"0112345", False)] - - parser.feed_data(b"") +def test_close_frame_info( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(True, WSMsgType.CLOSE, b"0112345", 0) res = out._buffer[0] assert res == (WSMessage(WSMsgType.CLOSE, 12337, "12345"), 0) -def test_close_frame_invalid(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CLOSE, b"1", False)] - parser.feed_data(b"") - - assert isinstance(out.exception(), WebSocketError) - assert out.exception().code == WSCloseCode.PROTOCOL_ERROR +def test_close_frame_invalid( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + with pytest.raises(WebSocketError) as ctx: + parser._handle_frame(True, WSMsgType.CLOSE, b"1", 0) + assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -def test_close_frame_invalid_2(out, parser) -> None: +def test_close_frame_invalid_2( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data = build_close_frame(code=1) with pytest.raises(WebSocketError) as ctx: @@ -284,7 +305,7 @@ def test_close_frame_invalid_2(out, parser) -> None: assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR -def test_close_frame_unicode_err(parser) -> None: +def test_close_frame_unicode_err(parser: PatchableWebSocketReader) -> None: data = build_close_frame(code=1000, message=b"\xf4\x90\x80\x80") with pytest.raises(WebSocketError) as ctx: @@ -293,23 +314,21 @@ def test_close_frame_unicode_err(parser) -> None: assert ctx.value.code == WSCloseCode.INVALID_TEXT -def test_unknown_frame(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.CONTINUATION, b"", False)] - +def test_unknown_frame( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: with pytest.raises(WebSocketError): - parser.feed_data(b"") - raise out.exception() + parser._handle_frame(True, WSMsgType.CONTINUATION, b"", 0) -def test_simple_text(out, parser) -> None: +def test_simple_text(out: WebSocketDataQueue, parser: PatchableWebSocketReader) -> None: data = build_frame(b"text", WSMsgType.TEXT) parser._feed_data(data) res = out._buffer[0] assert res == ((WSMsgType.TEXT, "text", ""), 4) -def test_simple_text_unicode_err(parser) -> None: +def test_simple_text_unicode_err(parser: PatchableWebSocketReader) -> None: data = build_frame(b"\xf4\x90\x80\x80", WSMsgType.TEXT) with pytest.raises(WebSocketError) as ctx: @@ -318,16 +337,18 @@ def test_simple_text_unicode_err(parser) -> None: assert ctx.value.code == WSCloseCode.INVALID_TEXT -def test_simple_binary(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [(1, WSMsgType.BINARY, b"binary", False)] - - parser.feed_data(b"") +def test_simple_binary( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + data = build_frame(b"binary", WSMsgType.BINARY) + parser._feed_data(data) res = out._buffer[0] assert res == ((WSMsgType.BINARY, b"binary", ""), 6) -def test_fragmentation_header(out, parser) -> None: +def test_fragmentation_header( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data = build_frame(b"a", WSMsgType.TEXT) parser._feed_data(data[:1]) parser._feed_data(data[1:]) @@ -336,7 +357,9 @@ def test_fragmentation_header(out, parser) -> None: assert res == (WSMessage(WSMsgType.TEXT, "a", ""), 1) -def test_continuation(out, parser) -> None: +def test_continuation( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data1 = build_frame(b"line1", WSMsgType.TEXT, is_fin=False) parser._feed_data(data1) @@ -347,14 +370,9 @@ def test_continuation(out, parser) -> None: assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) -def test_continuation_with_ping(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.PING, b"", False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - +def test_continuation_with_ping( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: data1 = build_frame(b"line1", WSMsgType.TEXT, is_fin=False) parser._feed_data(data1) @@ -370,90 +388,78 @@ def test_continuation_with_ping(out, parser) -> None: assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) -def test_continuation_err(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (1, WSMsgType.TEXT, b"line2", False), - ] - +def test_continuation_err( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) with pytest.raises(WebSocketError): - parser._feed_data(b"") + parser._handle_frame(True, WSMsgType.TEXT, b"line2", 0) -def test_continuation_with_close(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, build_close_frame(1002, b"test", noheader=True), False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - - parser.feed_data(b"") +def test_continuation_with_close( + out: WebSocketDataQueue, parser: WebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) + parser._handle_frame( + False, + WSMsgType.CLOSE, + build_close_frame(1002, b"test", noheader=True), + False, + ) + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) res = out._buffer[0] - assert res, (WSMessage(WSMsgType.CLOSE, 1002, "test"), 0) + assert res == (WSMessage(WSMsgType.CLOSE, 1002, "test"), 0) res = out._buffer[1] assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) -def test_continuation_with_close_unicode_err(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - ( - 0, +def test_continuation_with_close_unicode_err( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) + with pytest.raises(WebSocketError) as ctx: + parser._handle_frame( + False, WSMsgType.CLOSE, build_close_frame(1000, b"\xf4\x90\x80\x80", noheader=True), - False, - ), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - - with pytest.raises(WebSocketError) as ctx: - parser._feed_data(b"") - + 0, + ) + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) assert ctx.value.code == WSCloseCode.INVALID_TEXT -def test_continuation_with_close_bad_code(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, build_close_frame(1, b"test", noheader=True), False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - +def test_continuation_with_close_bad_code( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) with pytest.raises(WebSocketError) as ctx: - parser._feed_data(b"") + parser._handle_frame( + False, WSMsgType.CLOSE, build_close_frame(1, b"test", noheader=True), 0 + ) assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) -def test_continuation_with_close_bad_payload(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, b"1", False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] - +def test_continuation_with_close_bad_payload( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) with pytest.raises(WebSocketError) as ctx: - parser._feed_data(b"") - - assert ctx.value.code, WSCloseCode.PROTOCOL_ERROR + parser._handle_frame(False, WSMsgType.CLOSE, b"1", 0) + assert ctx.value.code == WSCloseCode.PROTOCOL_ERROR + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) -def test_continuation_with_close_empty(out, parser) -> None: - parser.parse_frame = mock.Mock() - parser.parse_frame.return_value = [ - (0, WSMsgType.TEXT, b"line1", False), - (0, WSMsgType.CLOSE, b"", False), - (1, WSMsgType.CONTINUATION, b"line2", False), - ] +def test_continuation_with_close_empty( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + parser._handle_frame(False, WSMsgType.TEXT, b"line1", 0) + parser._handle_frame(False, WSMsgType.CLOSE, b"", 0) + parser._handle_frame(True, WSMsgType.CONTINUATION, b"line2", 0) - parser.feed_data(b"") res = out._buffer[0] - assert res, (WSMessage(WSMsgType.CLOSE, 0, ""), 0) + assert res == (WSMessage(WSMsgType.CLOSE, 0, ""), 0) res = out._buffer[1] assert res == (WSMessage(WSMsgType.TEXT, "line1line2", ""), 10) @@ -508,7 +514,7 @@ def test_msgtype_aliases() -> None: assert aiohttp.WSMsgType.ERROR == aiohttp.WSMsgType.error -def test_parse_compress_frame_single(parser) -> None: +def test_parse_compress_frame_single(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b11000001, 0b00000001)) res = parser.parse_frame(b"1") fin, opcode, payload, compress = res[0] @@ -516,7 +522,7 @@ def test_parse_compress_frame_single(parser) -> None: assert (1, 1, b"1", True) == (fin, opcode, payload, not not compress) -def test_parse_compress_frame_multi(parser) -> None: +def test_parse_compress_frame_multi(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b01000001, 126)) parser.parse_frame(struct.pack("!H", 4)) res = parser.parse_frame(b"1234") @@ -536,7 +542,7 @@ def test_parse_compress_frame_multi(parser) -> None: assert (1, 1, b"1234", False) == (fin, opcode, payload, not not compress) -def test_parse_compress_error_frame(parser) -> None: +def test_parse_compress_error_frame(parser: PatchableWebSocketReader) -> None: parser.parse_frame(struct.pack("!BB", 0b01000001, 0b00000001)) parser.parse_frame(b"1") @@ -551,7 +557,7 @@ def test_parse_compress_error_frame(parser) -> None: async def test_parse_no_compress_frame_single( loop: asyncio.AbstractEventLoop, out: WebSocketDataQueue ) -> None: - parser_no_compress = WebSocketReader(out, 0, compress=False) + parser_no_compress = PatchableWebSocketReader(out, 0, compress=False) with pytest.raises(WebSocketError) as ctx: parser_no_compress.parse_frame(struct.pack("!BB", 0b11000001, 0b00000001)) parser_no_compress.parse_frame(b"1") @@ -603,34 +609,28 @@ def test_pickle(self) -> None: def test_flow_control_binary( protocol: BaseProtocol, out_low_limit: WebSocketDataQueue, - parser_low_limit: WebSocketReader, + parser_low_limit: PatchableWebSocketReader, ) -> None: large_payload = b"b" * (1 + 16 * 2) - large_payload_len = len(large_payload) - with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: - m.return_value = [(1, WSMsgType.BINARY, large_payload, False)] - - parser_low_limit.feed_data(b"") - + large_payload_size = len(large_payload) + parser_low_limit._handle_frame(True, WSMsgType.BINARY, large_payload, 0) res = out_low_limit._buffer[0] - assert res == (WSMessage(WSMsgType.BINARY, large_payload, ""), large_payload_len) + assert res == (WSMessage(WSMsgType.BINARY, large_payload, ""), large_payload_size) assert protocol._reading_paused is True def test_flow_control_multi_byte_text( protocol: BaseProtocol, out_low_limit: WebSocketDataQueue, - parser_low_limit: WebSocketReader, + parser_low_limit: PatchableWebSocketReader, ) -> None: large_payload_text = "𒀁" * (1 + 16 * 2) large_payload = large_payload_text.encode("utf-8") - large_payload_len = len(large_payload) - - with mock.patch.object(parser_low_limit, "parse_frame", autospec=True) as m: - m.return_value = [(1, WSMsgType.TEXT, large_payload, False)] - - parser_low_limit.feed_data(b"") - + large_payload_size = len(large_payload) + parser_low_limit._handle_frame(True, WSMsgType.TEXT, large_payload, 0) res = out_low_limit._buffer[0] - assert res == (WSMessage(WSMsgType.TEXT, large_payload_text, ""), large_payload_len) + assert res == ( + WSMessage(WSMsgType.TEXT, large_payload_text, ""), + large_payload_size, + ) assert protocol._reading_paused is True From 82648123d7c2f065cf6ee578212bb12e2a1cc5bb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 08:08:37 +0000 Subject: [PATCH 1345/1511] [PR #10744/23d3ee06 backport][3.12] Refactor WebSocket reader to avoid frequent realloc when frames are fragmented (#10748) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10744.misc.rst | 1 + aiohttp/_websocket/reader_c.pxd | 25 ++++---- aiohttp/_websocket/reader_py.py | 103 ++++++++++++++++++-------------- 3 files changed, 72 insertions(+), 57 deletions(-) create mode 100644 CHANGES/10744.misc.rst diff --git a/CHANGES/10744.misc.rst b/CHANGES/10744.misc.rst new file mode 100644 index 00000000000..da0d379475d --- /dev/null +++ b/CHANGES/10744.misc.rst @@ -0,0 +1 @@ +Improved performance of the WebSocket reader with large messages -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 3efebeb81dc..a7620d8e87f 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -68,14 +68,14 @@ cdef class WebSocketReader: cdef int _opcode cdef bint _frame_fin cdef int _frame_opcode - cdef object _frame_payload - cdef unsigned long long _frame_payload_len + cdef list _payload_fragments + cdef Py_ssize_t _frame_payload_len cdef bytes _tail cdef bint _has_mask cdef bytes _frame_mask - cdef unsigned long long _payload_length - cdef unsigned int _payload_length_flag + cdef Py_ssize_t _payload_bytes_to_read + cdef unsigned int _payload_len_flag cdef int _compressed cdef object _decompressobj cdef bint _compress @@ -91,17 +91,20 @@ cdef class WebSocketReader: cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except * @cython.locals( - start_pos="unsigned int", - data_len="unsigned int", - length="unsigned int", - chunk_size="unsigned int", - chunk_len="unsigned int", - data_length="unsigned int", + start_pos=Py_ssize_t, + data_len=Py_ssize_t, + length=Py_ssize_t, + chunk_size=Py_ssize_t, + chunk_len=Py_ssize_t, + data_len=Py_ssize_t, data_cstr="const unsigned char *", first_byte="unsigned char", second_byte="unsigned char", - end_pos="unsigned int", + f_start_pos=Py_ssize_t, + f_end_pos=Py_ssize_t, has_mask=bint, fin=bint, + had_fragments=Py_ssize_t, + payload_bytearray=bytearray, ) cpdef void _feed_data(self, bytes data) except * diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index a8a8eb7eb01..8a775742df1 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -144,14 +144,14 @@ def __init__( self._opcode: int = OP_CODE_NOT_SET self._frame_fin = False self._frame_opcode: int = OP_CODE_NOT_SET - self._frame_payload: Union[bytes, bytearray] = b"" + self._payload_fragments: list[bytes] = [] self._frame_payload_len = 0 self._tail: bytes = b"" self._has_mask = False self._frame_mask: Optional[bytes] = None - self._payload_length = 0 - self._payload_length_flag = 0 + self._payload_bytes_to_read = 0 + self._payload_len_flag = 0 self._compressed: int = COMPRESSED_NOT_SET self._decompressobj: Optional[ZLibDecompressor] = None self._compress = compress @@ -325,13 +325,13 @@ def _feed_data(self, data: bytes) -> None: data, self._tail = self._tail + data, b"" start_pos: int = 0 - data_length = len(data) + data_len = len(data) data_cstr = data while True: # read header if self._state == READ_HEADER: - if data_length - start_pos < 2: + if data_len - start_pos < 2: break first_byte = data_cstr[start_pos] second_byte = data_cstr[start_pos + 1] @@ -390,77 +390,88 @@ def _feed_data(self, data: bytes) -> None: self._frame_fin = bool(fin) self._frame_opcode = opcode self._has_mask = bool(has_mask) - self._payload_length_flag = length + self._payload_len_flag = length self._state = READ_PAYLOAD_LENGTH # read payload length if self._state == READ_PAYLOAD_LENGTH: - length_flag = self._payload_length_flag - if length_flag == 126: - if data_length - start_pos < 2: + len_flag = self._payload_len_flag + if len_flag == 126: + if data_len - start_pos < 2: break first_byte = data_cstr[start_pos] second_byte = data_cstr[start_pos + 1] start_pos += 2 - self._payload_length = first_byte << 8 | second_byte - elif length_flag > 126: - if data_length - start_pos < 8: + self._payload_bytes_to_read = first_byte << 8 | second_byte + elif len_flag > 126: + if data_len - start_pos < 8: break - self._payload_length = UNPACK_LEN3(data, start_pos)[0] + self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0] start_pos += 8 else: - self._payload_length = length_flag + self._payload_bytes_to_read = len_flag self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD # read payload mask if self._state == READ_PAYLOAD_MASK: - if data_length - start_pos < 4: + if data_len - start_pos < 4: break self._frame_mask = data_cstr[start_pos : start_pos + 4] start_pos += 4 self._state = READ_PAYLOAD if self._state == READ_PAYLOAD: - chunk_len = data_length - start_pos - if self._payload_length >= chunk_len: - end_pos = data_length - self._payload_length -= chunk_len + chunk_len = data_len - start_pos + if self._payload_bytes_to_read >= chunk_len: + f_end_pos = data_len + self._payload_bytes_to_read -= chunk_len else: - end_pos = start_pos + self._payload_length - self._payload_length = 0 - - if self._frame_payload_len: - if type(self._frame_payload) is not bytearray: - self._frame_payload = bytearray(self._frame_payload) - self._frame_payload += data_cstr[start_pos:end_pos] - else: - # Fast path for the first frame - self._frame_payload = data_cstr[start_pos:end_pos] - - self._frame_payload_len += end_pos - start_pos - start_pos = end_pos - - if self._payload_length != 0: + f_end_pos = start_pos + self._payload_bytes_to_read + self._payload_bytes_to_read = 0 + + had_fragments = self._frame_payload_len + self._frame_payload_len += f_end_pos - start_pos + f_start_pos = start_pos + start_pos = f_end_pos + + if self._payload_bytes_to_read != 0: + # If we don't have a complete frame, we need to save the + # data for the next call to feed_data. + self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) break - if self._has_mask: + payload: Union[bytes, bytearray] + if had_fragments: + # We have to join the payload fragments get the payload + self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) + if self._has_mask: + assert self._frame_mask is not None + payload_bytearray = bytearray() + payload_bytearray.join(self._payload_fragments) + websocket_mask(self._frame_mask, payload_bytearray) + payload = payload_bytearray + else: + payload = b"".join(self._payload_fragments) + self._payload_fragments.clear() + elif self._has_mask: assert self._frame_mask is not None - if type(self._frame_payload) is not bytearray: - self._frame_payload = bytearray(self._frame_payload) - websocket_mask(self._frame_mask, self._frame_payload) + payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment] + if type(payload_bytearray) is not bytearray: # pragma: no branch + # Cython will do the conversion for us + # but we need to do it for Python and we + # will always get here in Python + payload_bytearray = bytearray(payload_bytearray) + websocket_mask(self._frame_mask, payload_bytearray) + payload = payload_bytearray + else: + payload = data_cstr[f_start_pos:f_end_pos] self._handle_frame( - self._frame_fin, - self._frame_opcode, - self._frame_payload, - self._compressed, + self._frame_fin, self._frame_opcode, payload, self._compressed ) - self._frame_payload = b"" self._frame_payload_len = 0 self._state = READ_HEADER # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. - self._tail = ( - data_cstr[start_pos:data_length] if start_pos < data_length else b"" - ) + self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b"" From 1d00bd2c1cdb43298ac6eba0c8bbc316123a154e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 08:13:54 +0000 Subject: [PATCH 1346/1511] [PR #10744/23d3ee06 backport][3.11] Refactor WebSocket reader to avoid frequent realloc when frames are fragmented (#10747) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10744.misc.rst | 1 + aiohttp/_websocket/reader_c.pxd | 25 ++++---- aiohttp/_websocket/reader_py.py | 103 ++++++++++++++++++-------------- 3 files changed, 72 insertions(+), 57 deletions(-) create mode 100644 CHANGES/10744.misc.rst diff --git a/CHANGES/10744.misc.rst b/CHANGES/10744.misc.rst new file mode 100644 index 00000000000..da0d379475d --- /dev/null +++ b/CHANGES/10744.misc.rst @@ -0,0 +1 @@ +Improved performance of the WebSocket reader with large messages -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index 3efebeb81dc..a7620d8e87f 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -68,14 +68,14 @@ cdef class WebSocketReader: cdef int _opcode cdef bint _frame_fin cdef int _frame_opcode - cdef object _frame_payload - cdef unsigned long long _frame_payload_len + cdef list _payload_fragments + cdef Py_ssize_t _frame_payload_len cdef bytes _tail cdef bint _has_mask cdef bytes _frame_mask - cdef unsigned long long _payload_length - cdef unsigned int _payload_length_flag + cdef Py_ssize_t _payload_bytes_to_read + cdef unsigned int _payload_len_flag cdef int _compressed cdef object _decompressobj cdef bint _compress @@ -91,17 +91,20 @@ cdef class WebSocketReader: cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except * @cython.locals( - start_pos="unsigned int", - data_len="unsigned int", - length="unsigned int", - chunk_size="unsigned int", - chunk_len="unsigned int", - data_length="unsigned int", + start_pos=Py_ssize_t, + data_len=Py_ssize_t, + length=Py_ssize_t, + chunk_size=Py_ssize_t, + chunk_len=Py_ssize_t, + data_len=Py_ssize_t, data_cstr="const unsigned char *", first_byte="unsigned char", second_byte="unsigned char", - end_pos="unsigned int", + f_start_pos=Py_ssize_t, + f_end_pos=Py_ssize_t, has_mask=bint, fin=bint, + had_fragments=Py_ssize_t, + payload_bytearray=bytearray, ) cpdef void _feed_data(self, bytes data) except * diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 5c5dbc3b0c4..2c7ae5779e2 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -144,14 +144,14 @@ def __init__( self._opcode: int = OP_CODE_NOT_SET self._frame_fin = False self._frame_opcode: int = OP_CODE_NOT_SET - self._frame_payload: Union[bytes, bytearray] = b"" + self._payload_fragments: list[bytes] = [] self._frame_payload_len = 0 self._tail: bytes = b"" self._has_mask = False self._frame_mask: Optional[bytes] = None - self._payload_length = 0 - self._payload_length_flag = 0 + self._payload_bytes_to_read = 0 + self._payload_len_flag = 0 self._compressed: int = COMPRESSED_NOT_SET self._decompressobj: Optional[ZLibDecompressor] = None self._compress = compress @@ -317,13 +317,13 @@ def _feed_data(self, data: bytes) -> None: data, self._tail = self._tail + data, b"" start_pos: int = 0 - data_length = len(data) + data_len = len(data) data_cstr = data while True: # read header if self._state == READ_HEADER: - if data_length - start_pos < 2: + if data_len - start_pos < 2: break first_byte = data_cstr[start_pos] second_byte = data_cstr[start_pos + 1] @@ -382,77 +382,88 @@ def _feed_data(self, data: bytes) -> None: self._frame_fin = bool(fin) self._frame_opcode = opcode self._has_mask = bool(has_mask) - self._payload_length_flag = length + self._payload_len_flag = length self._state = READ_PAYLOAD_LENGTH # read payload length if self._state == READ_PAYLOAD_LENGTH: - length_flag = self._payload_length_flag - if length_flag == 126: - if data_length - start_pos < 2: + len_flag = self._payload_len_flag + if len_flag == 126: + if data_len - start_pos < 2: break first_byte = data_cstr[start_pos] second_byte = data_cstr[start_pos + 1] start_pos += 2 - self._payload_length = first_byte << 8 | second_byte - elif length_flag > 126: - if data_length - start_pos < 8: + self._payload_bytes_to_read = first_byte << 8 | second_byte + elif len_flag > 126: + if data_len - start_pos < 8: break - self._payload_length = UNPACK_LEN3(data, start_pos)[0] + self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0] start_pos += 8 else: - self._payload_length = length_flag + self._payload_bytes_to_read = len_flag self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD # read payload mask if self._state == READ_PAYLOAD_MASK: - if data_length - start_pos < 4: + if data_len - start_pos < 4: break self._frame_mask = data_cstr[start_pos : start_pos + 4] start_pos += 4 self._state = READ_PAYLOAD if self._state == READ_PAYLOAD: - chunk_len = data_length - start_pos - if self._payload_length >= chunk_len: - end_pos = data_length - self._payload_length -= chunk_len + chunk_len = data_len - start_pos + if self._payload_bytes_to_read >= chunk_len: + f_end_pos = data_len + self._payload_bytes_to_read -= chunk_len else: - end_pos = start_pos + self._payload_length - self._payload_length = 0 - - if self._frame_payload_len: - if type(self._frame_payload) is not bytearray: - self._frame_payload = bytearray(self._frame_payload) - self._frame_payload += data_cstr[start_pos:end_pos] - else: - # Fast path for the first frame - self._frame_payload = data_cstr[start_pos:end_pos] - - self._frame_payload_len += end_pos - start_pos - start_pos = end_pos - - if self._payload_length != 0: + f_end_pos = start_pos + self._payload_bytes_to_read + self._payload_bytes_to_read = 0 + + had_fragments = self._frame_payload_len + self._frame_payload_len += f_end_pos - start_pos + f_start_pos = start_pos + start_pos = f_end_pos + + if self._payload_bytes_to_read != 0: + # If we don't have a complete frame, we need to save the + # data for the next call to feed_data. + self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) break - if self._has_mask: + payload: Union[bytes, bytearray] + if had_fragments: + # We have to join the payload fragments get the payload + self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) + if self._has_mask: + assert self._frame_mask is not None + payload_bytearray = bytearray() + payload_bytearray.join(self._payload_fragments) + websocket_mask(self._frame_mask, payload_bytearray) + payload = payload_bytearray + else: + payload = b"".join(self._payload_fragments) + self._payload_fragments.clear() + elif self._has_mask: assert self._frame_mask is not None - if type(self._frame_payload) is not bytearray: - self._frame_payload = bytearray(self._frame_payload) - websocket_mask(self._frame_mask, self._frame_payload) + payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment] + if type(payload_bytearray) is not bytearray: # pragma: no branch + # Cython will do the conversion for us + # but we need to do it for Python and we + # will always get here in Python + payload_bytearray = bytearray(payload_bytearray) + websocket_mask(self._frame_mask, payload_bytearray) + payload = payload_bytearray + else: + payload = data_cstr[f_start_pos:f_end_pos] self._handle_frame( - self._frame_fin, - self._frame_opcode, - self._frame_payload, - self._compressed, + self._frame_fin, self._frame_opcode, payload, self._compressed ) - self._frame_payload = b"" self._frame_payload_len = 0 self._state = READ_HEADER # XXX: Cython needs slices to be bounded, so we can't omit the slice end here. - self._tail = ( - data_cstr[start_pos:data_length] if start_pos < data_length else b"" - ) + self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b"" From 8b9888dea1a1306d23e9ad70cd87c851a37f1ed7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 09:33:09 +0000 Subject: [PATCH 1347/1511] [PR #10749/d702fb30 backport][3.11] Add compressed binary WebSocket roundtrip benchmark (#10750) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 64 ++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index c244d33f6bd..044c1c1eb6d 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -105,3 +105,67 @@ async def run_websocket_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) + + +def test_client_send_large_websocket_compressed_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark send of compressed WebSocket binary messages.""" + message_count = 10 + raw_message = b"x" * 2**19 # 512 KiB + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/", compress=15) + for _ in range(message_count): + await resp.send_bytes(raw_message) + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) + + +def test_client_receive_large_websocket_compressed_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark receive of compressed WebSocket binary messages.""" + message_count = 10 + raw_message = b"x" * 2**19 # 512 KiB + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_bytes(raw_message) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/", compress=15) + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) From 03d17e5ff8decabaddcf7839ea2873a076e47b88 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 09:37:49 +0000 Subject: [PATCH 1348/1511] [PR #10749/d702fb30 backport][3.12] Add compressed binary WebSocket roundtrip benchmark (#10751) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_ws.py | 66 ++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/tests/test_benchmarks_client_ws.py b/tests/test_benchmarks_client_ws.py index c244d33f6bd..0338b52fb9d 100644 --- a/tests/test_benchmarks_client_ws.py +++ b/tests/test_benchmarks_client_ws.py @@ -105,3 +105,69 @@ async def run_websocket_benchmark() -> None: @benchmark def _run() -> None: loop.run_until_complete(run_websocket_benchmark()) + + +@pytest.mark.usefixtures("parametrize_zlib_backend") +def test_client_send_large_websocket_compressed_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark send of compressed WebSocket binary messages.""" + message_count = 10 + raw_message = b"x" * 2**19 # 512 KiB + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/", compress=15) + for _ in range(message_count): + await resp.send_bytes(raw_message) + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) + + +@pytest.mark.usefixtures("parametrize_zlib_backend") +def test_client_receive_large_websocket_compressed_messages( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark receive of compressed WebSocket binary messages.""" + message_count = 10 + raw_message = b"x" * 2**19 # 512 KiB + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + for _ in range(message_count): + await ws.send_bytes(raw_message) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_websocket_benchmark() -> None: + client = await aiohttp_client(app) + resp = await client.ws_connect("/", compress=15) + for _ in range(message_count): + await resp.receive() + await resp.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_websocket_benchmark()) From d8ad35fd2ebe8e1a7d4740bb830b4baa49a1f96b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 09:00:54 -1000 Subject: [PATCH 1349/1511] [PR #10752/07590cd2 backport][3.12] Add a test to the WebSocket parser for sending one byte at a time (#10755) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_websocket_parser.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 04c83f19610..52c34454886 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -346,6 +346,17 @@ def test_simple_binary( assert res == ((WSMsgType.BINARY, b"binary", ""), 6) +def test_one_byte_at_a_time( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + """Send one byte at a time to the parser.""" + data = build_frame(b"binary", WSMsgType.BINARY) + for i in range(len(data)): + parser._feed_data(data[i : i + 1]) + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, b"binary", ""), 6) + + def test_fragmentation_header( out: WebSocketDataQueue, parser: PatchableWebSocketReader ) -> None: From 0615314569fc1da56a03ebed8b5586acb6e4c4df Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 19:07:09 +0000 Subject: [PATCH 1350/1511] [PR #10752/07590cd2 backport][3.11] Add a test to the WebSocket parser for sending one byte at a time (#10754) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_websocket_parser.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 8a65ac11d50..fc4888df5e5 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -344,6 +344,17 @@ def test_simple_binary( assert res == ((WSMsgType.BINARY, b"binary", ""), 6) +def test_one_byte_at_a_time( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + """Send one byte at a time to the parser.""" + data = build_frame(b"binary", WSMsgType.BINARY) + for i in range(len(data)): + parser._feed_data(data[i : i + 1]) + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, b"binary", ""), 6) + + def test_fragmentation_header( out: WebSocketDataQueue, parser: PatchableWebSocketReader ) -> None: From 11be7e2ff8775e0ee96c027475474cdef8ec3e37 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 19 Apr 2025 10:53:32 -1000 Subject: [PATCH 1351/1511] Release 3.11.17 (#10756) --- CHANGES.rst | 42 ++++++++++++++++++++++++++++++++++++++++++ CHANGES/10713.misc.rst | 1 - CHANGES/10714.misc.rst | 1 - CHANGES/10740.misc.rst | 1 - CHANGES/10744.misc.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 43 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/10713.misc.rst delete mode 100644 CHANGES/10714.misc.rst delete mode 100644 CHANGES/10740.misc.rst delete mode 100644 CHANGES/10744.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 00d728e775d..3b62b221e4a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,48 @@ .. towncrier release notes start +3.11.17 (2025-04-19) +==================== + +Miscellaneous internal changes +------------------------------ + +- Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10713`. + + + +- Improved web server performance when connection can be reused -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10714`. + + + +- Improved performance of the WebSocket reader -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10740`. + + + +- Improved performance of the WebSocket reader with large messages -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10744`. + + + + +---- + + 3.11.16 (2025-04-01) ==================== diff --git a/CHANGES/10713.misc.rst b/CHANGES/10713.misc.rst deleted file mode 100644 index a556d11e1e0..00000000000 --- a/CHANGES/10713.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. diff --git a/CHANGES/10714.misc.rst b/CHANGES/10714.misc.rst deleted file mode 100644 index a36a80872f5..00000000000 --- a/CHANGES/10714.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved web server performance when connection can be reused -- by :user:`bdraco`. diff --git a/CHANGES/10740.misc.rst b/CHANGES/10740.misc.rst deleted file mode 100644 index 34ed19aebba..00000000000 --- a/CHANGES/10740.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of the WebSocket reader -- by :user:`bdraco`. diff --git a/CHANGES/10744.misc.rst b/CHANGES/10744.misc.rst deleted file mode 100644 index da0d379475d..00000000000 --- a/CHANGES/10744.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of the WebSocket reader with large messages -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 8a3d34a4f87..ab1d5bdedcc 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.17.dev0" +__version__ = "3.11.17" from typing import TYPE_CHECKING, Tuple From 83690e532065c33c569328d49b6e353f45587e8e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 19 Apr 2025 12:19:42 -1000 Subject: [PATCH 1352/1511] Increment version to 3.11.18.dev0 (#10758) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index ab1d5bdedcc..e967a21bed0 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.17" +__version__ = "3.11.18.dev0" from typing import TYPE_CHECKING, Tuple From f69333ded38348c0db41d0dd07a2501958dceaff Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 20 Apr 2025 22:07:35 -1000 Subject: [PATCH 1353/1511] [PR #10764/e0cc020 backport][3.11] Fix WebSocket reader with fragmented masked messages (#10765) --- CHANGES/10764.bugfix.rst | 3 +++ aiohttp/_websocket/reader_py.py | 3 +-- tests/test_websocket_parser.py | 45 +++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10764.bugfix.rst diff --git a/CHANGES/10764.bugfix.rst b/CHANGES/10764.bugfix.rst new file mode 100644 index 00000000000..04cd71cd190 --- /dev/null +++ b/CHANGES/10764.bugfix.rst @@ -0,0 +1,3 @@ +Fixed reading fragmented WebSocket messages when the payload was masked -- by :user:`bdraco`. + +The problem first appeared in 3.11.17 diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 2c7ae5779e2..f0060fd723c 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -439,8 +439,7 @@ def _feed_data(self, data: bytes) -> None: self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) if self._has_mask: assert self._frame_mask is not None - payload_bytearray = bytearray() - payload_bytearray.join(self._payload_fragments) + payload_bytearray = bytearray(b"".join(self._payload_fragments)) websocket_mask(self._frame_mask, payload_bytearray) payload = payload_bytearray else: diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index fc4888df5e5..d1d96f716fd 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -366,6 +366,51 @@ def test_fragmentation_header( assert res == (WSMessage(WSMsgType.TEXT, "a", ""), 1) +def test_large_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 131072 + data = build_frame(large_payload, WSMsgType.BINARY) + parser._feed_data(data) + + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 131072) + + +def test_large_masked_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 131072 + data = build_frame(large_payload, WSMsgType.BINARY, use_mask=True) + parser._feed_data(data) + + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 131072) + + +def test_fragmented_masked_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 100 + data = build_frame(large_payload, WSMsgType.BINARY, use_mask=True) + for i in range(len(data)): + parser._feed_data(data[i : i + 1]) + + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 100) + + +def test_large_fragmented_masked_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 131072 + data = build_frame(large_payload, WSMsgType.BINARY, use_mask=True) + for i in range(0, len(data), 16384): + parser._feed_data(data[i : i + 16384]) + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 131072) + + def test_continuation( out: WebSocketDataQueue, parser: PatchableWebSocketReader ) -> None: From 4182657c2cfd385c2d504f722e56c5cd144dd297 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 20 Apr 2025 22:15:38 -1000 Subject: [PATCH 1354/1511] [PR #10764/e0cc020 backport][3.12] Fix WebSocket reader with fragmented masked messages (#10766) --- CHANGES/10764.bugfix.rst | 3 +++ aiohttp/_websocket/reader_py.py | 3 +-- tests/test_websocket_parser.py | 45 +++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10764.bugfix.rst diff --git a/CHANGES/10764.bugfix.rst b/CHANGES/10764.bugfix.rst new file mode 100644 index 00000000000..04cd71cd190 --- /dev/null +++ b/CHANGES/10764.bugfix.rst @@ -0,0 +1,3 @@ +Fixed reading fragmented WebSocket messages when the payload was masked -- by :user:`bdraco`. + +The problem first appeared in 3.11.17 diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 8a775742df1..855f9c6d600 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -447,8 +447,7 @@ def _feed_data(self, data: bytes) -> None: self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) if self._has_mask: assert self._frame_mask is not None - payload_bytearray = bytearray() - payload_bytearray.join(self._payload_fragments) + payload_bytearray = bytearray(b"".join(self._payload_fragments)) websocket_mask(self._frame_mask, payload_bytearray) payload = payload_bytearray else: diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 52c34454886..37e15b64c18 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -368,6 +368,51 @@ def test_fragmentation_header( assert res == (WSMessage(WSMsgType.TEXT, "a", ""), 1) +def test_large_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 131072 + data = build_frame(large_payload, WSMsgType.BINARY) + parser._feed_data(data) + + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 131072) + + +def test_large_masked_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 131072 + data = build_frame(large_payload, WSMsgType.BINARY, use_mask=True) + parser._feed_data(data) + + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 131072) + + +def test_fragmented_masked_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 100 + data = build_frame(large_payload, WSMsgType.BINARY, use_mask=True) + for i in range(len(data)): + parser._feed_data(data[i : i + 1]) + + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 100) + + +def test_large_fragmented_masked_message( + out: WebSocketDataQueue, parser: PatchableWebSocketReader +) -> None: + large_payload = b"b" * 131072 + data = build_frame(large_payload, WSMsgType.BINARY, use_mask=True) + for i in range(0, len(data), 16384): + parser._feed_data(data[i : i + 16384]) + res = out._buffer[0] + assert res == ((WSMsgType.BINARY, large_payload, ""), 131072) + + def test_continuation( out: WebSocketDataQueue, parser: PatchableWebSocketReader ) -> None: From 51aa3589608bbbdb45fbad75f06b6380afe9df07 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 20 Apr 2025 22:36:17 -1000 Subject: [PATCH 1355/1511] [PR #10726/feff48d backport][3.12] Disable TLS in TLS warning for uvloop (#10768) Co-authored-by: Matthew Go <lezgomatt@gmail.com> --- CHANGES/7686.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 8 +++++++- tests/conftest.py | 18 +++++++++++++++++- tests/test_proxy_functional.py | 27 +++++++++++++++++++++++++++ 5 files changed, 53 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7686.bugfix.rst diff --git a/CHANGES/7686.bugfix.rst b/CHANGES/7686.bugfix.rst new file mode 100644 index 00000000000..7b575ff3564 --- /dev/null +++ b/CHANGES/7686.bugfix.rst @@ -0,0 +1 @@ +Disabled TLS in TLS warning (when using HTTPS proxies) for uvloop and newer Python versions -- by :user:`lezgomatt`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 4c44c5f4001..fb7bcf8e168 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -243,6 +243,7 @@ Martin Sucha Mathias Fröjdman Mathieu Dugré Matt VanEseltine +Matthew Go Matthias Marquardt Matthieu Hauglustaine Matthieu Rigal diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 2a41438ab6a..dd0d27a7054 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1222,7 +1222,13 @@ def _warn_about_tls_in_tls( if req.request_info.url.scheme != "https": return - asyncio_supports_tls_in_tls = getattr( + # Check if uvloop is being used, which supports TLS in TLS, + # otherwise assume that asyncio's native transport is being used. + if type(underlying_transport).__module__.startswith("uvloop"): + return + + # Support in asyncio was added in Python 3.11 (bpo-44011) + asyncio_supports_tls_in_tls = sys.version_info >= (3, 11) or getattr( underlying_transport, "_start_tls_compatible", False, diff --git a/tests/conftest.py b/tests/conftest.py index be763400f45..437bccf9ba7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,7 @@ from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Generator +from typing import Any, Generator, Iterator from unittest import mock from uuid import uuid4 @@ -32,6 +32,12 @@ except ImportError: TRUSTME = False + +try: + import uvloop +except ImportError: + uvloop = None # type: ignore[assignment] + pytest_plugins = ["aiohttp.pytest_plugin", "pytester"] IS_HPUX = sys.platform.startswith("hp-ux") @@ -227,6 +233,16 @@ def selector_loop(): yield _loop +@pytest.fixture +def uvloop_loop() -> Iterator[asyncio.AbstractEventLoop]: + policy = uvloop.EventLoopPolicy() + asyncio.set_event_loop_policy(policy) + + with loop_context(policy.new_event_loop) as _loop: + asyncio.set_event_loop(_loop) + yield _loop + + @pytest.fixture def netrc_contents( tmp_path: Path, diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 0921d5487bb..02d77700d96 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -1,6 +1,7 @@ import asyncio import os import pathlib +import platform import ssl import sys from re import match as match_regex @@ -202,6 +203,32 @@ async def test_https_proxy_unsupported_tls_in_tls( await asyncio.sleep(0.1) +@pytest.mark.usefixtures("uvloop_loop") +@pytest.mark.skipif( + platform.system() == "Windows" or sys.implementation.name != "cpython", + reason="uvloop is not supported on Windows and non-CPython implementations", +) +@pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") +# Filter out the warning from +# https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 +# otherwise this test will fail because the proxy will die with an error. +async def test_uvloop_secure_https_proxy( + client_ssl_ctx: ssl.SSLContext, + secure_proxy_url: URL, +) -> None: + """Ensure HTTPS sites are accessible through a secure proxy without warning when using uvloop.""" + conn = aiohttp.TCPConnector() + sess = aiohttp.ClientSession(connector=conn) + url = URL("https://example.com") + + async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx) as response: + assert response.status == 200 + + await sess.close() + await conn.close() + await asyncio.sleep(0.1) + + @pytest.fixture def proxy_test_server(aiohttp_raw_server, loop, monkeypatch): # Handle all proxy requests and imitate remote server response. From a003df3c16f41d1ee29a844f6621fc573c4cefbc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 20 Apr 2025 22:36:24 -1000 Subject: [PATCH 1356/1511] [PR #10726/feff48d backport][3.11] Disable TLS in TLS warning for uvloop (#10767) Co-authored-by: Matthew Go <lezgomatt@gmail.com> --- CHANGES/7686.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 8 +++++++- tests/conftest.py | 18 +++++++++++++++++- tests/test_proxy_functional.py | 27 +++++++++++++++++++++++++++ 5 files changed, 53 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7686.bugfix.rst diff --git a/CHANGES/7686.bugfix.rst b/CHANGES/7686.bugfix.rst new file mode 100644 index 00000000000..7b575ff3564 --- /dev/null +++ b/CHANGES/7686.bugfix.rst @@ -0,0 +1 @@ +Disabled TLS in TLS warning (when using HTTPS proxies) for uvloop and newer Python versions -- by :user:`lezgomatt`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 953af52498a..9fec4933dc0 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -241,6 +241,7 @@ Martin Sucha Mathias Fröjdman Mathieu Dugré Matt VanEseltine +Matthew Go Matthias Marquardt Matthieu Hauglustaine Matthieu Rigal diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 7420bd6070a..7d5bcf755ec 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1203,7 +1203,13 @@ def _warn_about_tls_in_tls( if req.request_info.url.scheme != "https": return - asyncio_supports_tls_in_tls = getattr( + # Check if uvloop is being used, which supports TLS in TLS, + # otherwise assume that asyncio's native transport is being used. + if type(underlying_transport).__module__.startswith("uvloop"): + return + + # Support in asyncio was added in Python 3.11 (bpo-44011) + asyncio_supports_tls_in_tls = sys.version_info >= (3, 11) or getattr( underlying_transport, "_start_tls_compatible", False, diff --git a/tests/conftest.py b/tests/conftest.py index 95a98cd4fc0..bceec5212a9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Generator +from typing import Any, Generator, Iterator from unittest import mock from uuid import uuid4 @@ -27,6 +27,12 @@ except ImportError: TRUSTME = False + +try: + import uvloop +except ImportError: + uvloop = None # type: ignore[assignment] + pytest_plugins = ["aiohttp.pytest_plugin", "pytester"] IS_HPUX = sys.platform.startswith("hp-ux") @@ -193,6 +199,16 @@ def selector_loop(): yield _loop +@pytest.fixture +def uvloop_loop() -> Iterator[asyncio.AbstractEventLoop]: + policy = uvloop.EventLoopPolicy() + asyncio.set_event_loop_policy(policy) + + with loop_context(policy.new_event_loop) as _loop: + asyncio.set_event_loop(_loop) + yield _loop + + @pytest.fixture def netrc_contents( tmp_path: Path, diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 0921d5487bb..02d77700d96 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -1,6 +1,7 @@ import asyncio import os import pathlib +import platform import ssl import sys from re import match as match_regex @@ -202,6 +203,32 @@ async def test_https_proxy_unsupported_tls_in_tls( await asyncio.sleep(0.1) +@pytest.mark.usefixtures("uvloop_loop") +@pytest.mark.skipif( + platform.system() == "Windows" or sys.implementation.name != "cpython", + reason="uvloop is not supported on Windows and non-CPython implementations", +) +@pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") +# Filter out the warning from +# https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 +# otherwise this test will fail because the proxy will die with an error. +async def test_uvloop_secure_https_proxy( + client_ssl_ctx: ssl.SSLContext, + secure_proxy_url: URL, +) -> None: + """Ensure HTTPS sites are accessible through a secure proxy without warning when using uvloop.""" + conn = aiohttp.TCPConnector() + sess = aiohttp.ClientSession(connector=conn) + url = URL("https://example.com") + + async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx) as response: + assert response.status == 200 + + await sess.close() + await conn.close() + await asyncio.sleep(0.1) + + @pytest.fixture def proxy_test_server(aiohttp_raw_server, loop, monkeypatch): # Handle all proxy requests and imitate remote server response. From 58b512cf07976651a5ef2beaff819d45bdced76a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 20 Apr 2025 22:37:46 -1000 Subject: [PATCH 1357/1511] [PR #10770/a88a2436 backport][3.11] Increase benchmark timeout to 9 minutes in the CI (#10771) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a794dc65d77..23266b2b2d5 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -244,7 +244,7 @@ jobs: needs: gen_llhttp runs-on: ubuntu-latest - timeout-minutes: 7 + timeout-minutes: 9 steps: - name: Checkout project uses: actions/checkout@v4 From 8b9974615b629d1059baca142c4312ea507c16b9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 20 Apr 2025 22:37:58 -1000 Subject: [PATCH 1358/1511] [PR #10770/a88a2436 backport][3.12] Increase benchmark timeout to 9 minutes in the CI (#10772) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b00051b8668..ec85713319b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -244,7 +244,7 @@ jobs: needs: gen_llhttp runs-on: ubuntu-latest - timeout-minutes: 7 + timeout-minutes: 9 steps: - name: Checkout project uses: actions/checkout@v4 From 0258e4de7d5c4e247e3ec7ae0cc672e438206c6d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 20 Apr 2025 22:43:40 -1000 Subject: [PATCH 1359/1511] [PR #10761/d884799 backport][3.12] Speed up tests (#10769) Co-authored-by: Roman Postnov <59239573+dikos1337@users.noreply.github.com> --- CHANGES/9705.contrib.rst | 1 + CONTRIBUTORS.txt | 1 + setup.cfg | 1 + tests/conftest.py | 8 +++++++- tests/test_web_sendfile_functional.py | 11 +++++++++-- 5 files changed, 19 insertions(+), 3 deletions(-) create mode 100644 CHANGES/9705.contrib.rst diff --git a/CHANGES/9705.contrib.rst b/CHANGES/9705.contrib.rst new file mode 100644 index 00000000000..771fb442629 --- /dev/null +++ b/CHANGES/9705.contrib.rst @@ -0,0 +1 @@ +Speed up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index fb7bcf8e168..3815ae6829d 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -297,6 +297,7 @@ Required Field Robert Lu Robert Nikolich Roman Podoliaka +Roman Postnov Rong Zhang Samir Akarioh Samuel Colvin diff --git a/setup.cfg b/setup.cfg index 9da34e0b5ce..83b33d01532 100644 --- a/setup.cfg +++ b/setup.cfg @@ -182,3 +182,4 @@ xfail_strict = true markers = dev_mode: mark test to run in dev mode. internal: tests which may cause issues for packagers, but should be run in aiohttp's CI. + skip_blockbuster: mark test to skip the blockbuster fixture. diff --git a/tests/conftest.py b/tests/conftest.py index 437bccf9ba7..de7f8316cb0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,7 +45,13 @@ @pytest.fixture(autouse=True) -def blockbuster(request): +def blockbuster(request: pytest.FixtureRequest) -> Iterator[None]: + # Allow selectively disabling blockbuster for specific tests + # using the @pytest.mark.skip_blockbuster marker. + if "skip_blockbuster" in request.node.keywords: + yield + return + # No blockbuster for benchmark tests. node = request.node.parent while node: diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index fc4db06a307..0c3e9ba68b5 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -11,6 +11,7 @@ import aiohttp from aiohttp import web from aiohttp.compression_utils import ZLibBackend +from aiohttp.pytest_plugin import AiohttpClient try: import brotlicffi as brotli @@ -642,7 +643,10 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: await client.close() -async def test_static_file_huge(aiohttp_client, tmp_path) -> None: +@pytest.mark.skip_blockbuster +async def test_static_file_huge( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: file_path = tmp_path / "huge_data.unknown_mime_type" # fill 20MB file @@ -1073,7 +1077,10 @@ async def handler(request): await client.close() -async def test_static_file_huge_cancel(aiohttp_client, tmp_path) -> None: +@pytest.mark.skip_blockbuster +async def test_static_file_huge_cancel( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: file_path = tmp_path / "huge_data.unknown_mime_type" # fill 100MB file From 2be611a64cfcee84e96cb3a4e2fb69cfaa20e06b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 20 Apr 2025 22:58:17 -1000 Subject: [PATCH 1360/1511] Release 3.11.18 (#10773) --- CHANGES.rst | 28 ++++++++++++++++++++++++++++ CHANGES/10764.bugfix.rst | 3 --- CHANGES/7686.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 29 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/10764.bugfix.rst delete mode 100644 CHANGES/7686.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 3b62b221e4a..11fd19153e3 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,34 @@ .. towncrier release notes start +3.11.18 (2025-04-20) +==================== + +Bug fixes +--------- + +- Disabled TLS in TLS warning (when using HTTPS proxies) for uvloop and newer Python versions -- by :user:`lezgomatt`. + + + *Related issues and pull requests on GitHub:* + :issue:`7686`. + + + +- Fixed reading fragmented WebSocket messages when the payload was masked -- by :user:`bdraco`. + + The problem first appeared in 3.11.17 + + + *Related issues and pull requests on GitHub:* + :issue:`10764`. + + + + +---- + + 3.11.17 (2025-04-19) ==================== diff --git a/CHANGES/10764.bugfix.rst b/CHANGES/10764.bugfix.rst deleted file mode 100644 index 04cd71cd190..00000000000 --- a/CHANGES/10764.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed reading fragmented WebSocket messages when the payload was masked -- by :user:`bdraco`. - -The problem first appeared in 3.11.17 diff --git a/CHANGES/7686.bugfix.rst b/CHANGES/7686.bugfix.rst deleted file mode 100644 index 7b575ff3564..00000000000 --- a/CHANGES/7686.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Disabled TLS in TLS warning (when using HTTPS proxies) for uvloop and newer Python versions -- by :user:`lezgomatt`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e967a21bed0..e3e0f3cc51e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.11.18.dev0" +__version__ = "3.11.18" from typing import TYPE_CHECKING, Tuple From d17fc42411e60cf724a99650eff2d9ee176990cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 11:59:32 +0000 Subject: [PATCH 1361/1511] Bump pycares from 4.6.0 to 4.6.1 (#10778) Bumps [pycares](https://github.com/saghul/pycares) from 4.6.0 to 4.6.1. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/saghul/pycares/commit/90bd7fc12910d53c9982d0dd0a0f658683487212"><code>90bd7fc</code></a> Bump version to 4.6.1</li> <li><a href="https://github.com/saghul/pycares/commit/46e04db8857f6e7ac4f9608de775f91dad9a1e9c"><code>46e04db</code></a> Fix missing attribute type information for errno (<a href="https://redirect.github.com/saghul/pycares/issues/215">#215</a>)</li> <li>See full diff in <a href="https://github.com/saghul/pycares/compare/v4.6.0...v4.6.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycares&package-manager=pip&previous-version=4.6.0&new-version=4.6.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index b4366c8fa26..60e17822367 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.6.0 +pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9cf1615af28..d50203137a2 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.6.0 +pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/dev.txt b/requirements/dev.txt index fb26879cabc..9b93f12fd3d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,7 +145,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.6.0 +pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/lint.txt b/requirements/lint.txt index ab419411f50..b01ef0c2978 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -59,7 +59,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in -pycares==4.6.0 +pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a1d1a47cf00..e6bcad92614 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,7 +32,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.6.0 +pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/test.txt b/requirements/test.txt index ab2185d9ee7..10abc497509 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -79,7 +79,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.6.0 +pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi From b000a88ddfa9eda5dedaeaad7c127d33d22f24f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 12:11:24 +0000 Subject: [PATCH 1362/1511] Bump packaging from 24.2 to 25.0 (#10780) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [packaging](https://github.com/pypa/packaging) from 24.2 to 25.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/packaging/releases">packaging's releases</a>.</em></p> <blockquote> <h2>25.0</h2> <h2>What's Changed</h2> <ul> <li>Re-add a test for Unicode file name parsing by <a href="https://github.com/Siddhesh-Agarwal"><code>@​Siddhesh-Agarwal</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/863">pypa/packaging#863</a></li> <li>Upgrade to ruff 0.9.1 by <a href="https://github.com/DimitriPapadopoulos"><code>@​DimitriPapadopoulos</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/865">pypa/packaging#865</a></li> <li>Add support for PEP 738 Android tags by <a href="https://github.com/mhsmith"><code>@​mhsmith</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/880">pypa/packaging#880</a></li> <li>feat(markers): support 'extras' and 'dependency_groups' markers by <a href="https://github.com/frostming"><code>@​frostming</code></a> in <a href="https://redirect.github.com/pypa/packaging/pull/888">pypa/packaging#888</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/Siddhesh-Agarwal"><code>@​Siddhesh-Agarwal</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/863">pypa/packaging#863</a></li> <li><a href="https://github.com/mhsmith"><code>@​mhsmith</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/880">pypa/packaging#880</a></li> <li><a href="https://github.com/frostming"><code>@​frostming</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/packaging/pull/888">pypa/packaging#888</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/packaging/compare/24.2...25.0">https://github.com/pypa/packaging/compare/24.2...25.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/packaging/blob/main/CHANGELOG.rst">packaging's changelog</a>.</em></p> <blockquote> <p>25.0 - 2025-04-19</p> <pre><code> * PEP 751: Add support for ``extras`` and ``dependency_groups`` markers. (:issue:`885`) * PEP 738: Add support for Android platform tags. (:issue:`880`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/packaging/commit/f58537628042c7f29780b9d33f31597e7fc9d664"><code>f585376</code></a> Bump for release</li> <li><a href="https://github.com/pypa/packaging/commit/600ecea15b2388037b8dc94883504ca612947576"><code>600ecea</code></a> Add changelog entries</li> <li><a href="https://github.com/pypa/packaging/commit/3910129009b25dd1aa1fe32e644bc891188c56fe"><code>3910129</code></a> support 'extras' and 'dependency_groups' markers (<a href="https://redirect.github.com/pypa/packaging/issues/888">#888</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/8e49b4373731bffb110c9583e64ad802cb67c7ea"><code>8e49b43</code></a> Add support for PEP 738 Android tags (<a href="https://redirect.github.com/pypa/packaging/issues/880">#880</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/e624d8edfaa28865de7b5a7da8bd59fd410e5331"><code>e624d8e</code></a> Bump the github-actions group with 3 updates (<a href="https://redirect.github.com/pypa/packaging/issues/886">#886</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/71f38d872a6e88b28da9d1b270f8512475bc90d4"><code>71f38d8</code></a> Bump the github-actions group with 2 updates (<a href="https://redirect.github.com/pypa/packaging/issues/878">#878</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/9b4922dd3c26c8522d716bec79d7e0ed408631c1"><code>9b4922d</code></a> Bump the github-actions group with 3 updates (<a href="https://redirect.github.com/pypa/packaging/issues/870">#870</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/8510bd9d3bab5571974202ec85f6ef7b0359bfaf"><code>8510bd9</code></a> Upgrade to ruff 0.9.1 (<a href="https://redirect.github.com/pypa/packaging/issues/865">#865</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/9375ec2eff48257967c97d331b9a76019e95bdb0"><code>9375ec2</code></a> Re-add tests for Unicode file name parsing (<a href="https://redirect.github.com/pypa/packaging/issues/863">#863</a>)</li> <li><a href="https://github.com/pypa/packaging/commit/2256ed4ac261309a09daa04cc801abd7cff2e6f0"><code>2256ed4</code></a> Bump the github-actions group across 1 directory with 2 updates (<a href="https://redirect.github.com/pypa/packaging/issues/864">#864</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pypa/packaging/compare/24.2...25.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=packaging&package-manager=pip&previous-version=24.2&new-version=25.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 60e17822367..8539638c3fa 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -30,7 +30,7 @@ multidict==6.4.3 # via # -r requirements/runtime-deps.in # yarl -packaging==24.2 +packaging==25.0 # via gunicorn propcache==0.3.1 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d50203137a2..e3c5315c4b0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -128,7 +128,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.2 +packaging==25.0 # via # build # gunicorn diff --git a/requirements/dev.txt b/requirements/dev.txt index 9b93f12fd3d..03377ac2cd2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -125,7 +125,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.2 +packaging==25.0 # via # build # gunicorn diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index fe5d7e5708d..85f4e321835 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -30,7 +30,7 @@ jinja2==3.1.6 # towncrier markupsafe==3.0.2 # via jinja2 -packaging==24.2 +packaging==25.0 # via sphinx pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/doc.txt b/requirements/doc.txt index 086c945725e..4a559724883 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -30,7 +30,7 @@ jinja2==3.1.6 # towncrier markupsafe==3.0.2 # via jinja2 -packaging==24.2 +packaging==25.0 # via sphinx pygments==2.19.1 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index b01ef0c2978..bee2eb3b2d0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -51,7 +51,7 @@ mypy-extensions==1.0.0 # via mypy nodeenv==1.9.1 # via pre-commit -packaging==24.2 +packaging==25.0 # via pytest platformdirs==4.3.7 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 10abc497509..45afe22f063 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ mypy==1.15.0 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy -packaging==24.2 +packaging==25.0 # via # gunicorn # pytest From fcfb0d8239ad953da25327fef2ba8411ca6d7afc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 12:14:52 +0000 Subject: [PATCH 1363/1511] Bump identify from 2.6.9 to 2.6.10 (#10781) Bumps [identify](https://github.com/pre-commit/identify) from 2.6.9 to 2.6.10. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pre-commit/identify/commit/e200468dd1b921e75a300545bd1e28abe1d5985c"><code>e200468</code></a> v2.6.10</li> <li><a href="https://github.com/pre-commit/identify/commit/41f40e2b7e0e9e0a25b0d74dbff14086c816147a"><code>41f40e2</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/517">#517</a> from sebastiaanspeck/patch-1</li> <li><a href="https://github.com/pre-commit/identify/commit/2ae839d33d6094bea9a26b1deea975a135d13bf3"><code>2ae839d</code></a> Add support for Magik</li> <li><a href="https://github.com/pre-commit/identify/commit/dc20df20bda102dc74ca8531465bfcd20a7f26bf"><code>dc20df2</code></a> Merge pull request <a href="https://redirect.github.com/pre-commit/identify/issues/516">#516</a> from pre-commit/pre-commit-ci-update-config</li> <li><a href="https://github.com/pre-commit/identify/commit/cba874fadf0f8c032853fff8762ac4caeb551056"><code>cba874f</code></a> [pre-commit.ci] auto fixes from pre-commit.com hooks</li> <li><a href="https://github.com/pre-commit/identify/commit/e839dfb32782ec1248840f7eeb47dc3495a99e72"><code>e839dfb</code></a> [pre-commit.ci] pre-commit autoupdate</li> <li>See full diff in <a href="https://github.com/pre-commit/identify/compare/v2.6.9...v2.6.10">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=identify&package-manager=pip&previous-version=2.6.9&new-version=2.6.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e3c5315c4b0..bdb5e135127 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -88,7 +88,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.9 +identify==2.6.10 # via pre-commit idna==3.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 03377ac2cd2..d8608ed720d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -86,7 +86,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.6.9 +identify==2.6.10 # via pre-commit idna==3.4 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index bee2eb3b2d0..e68392bef1e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -33,7 +33,7 @@ forbiddenfruit==0.1.4 # via blockbuster freezegun==1.5.1 # via -r requirements/lint.in -identify==2.6.9 +identify==2.6.10 # via pre-commit idna==3.7 # via trustme From 397bb0ac26efcf7644fad1eefef3bc848e18b9e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 18:56:30 +0000 Subject: [PATCH 1364/1511] Bump setuptools from 78.1.0 to 79.0.0 (#10782) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 78.1.0 to 79.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v79.0.0</h1> <h2>Deprecations and Removals</h2> <ul> <li>Removed support for 'legacy-editable' installs. (<a href="https://redirect.github.com/pypa/setuptools/issues/917">#917</a>)</li> </ul> <h1>v78.1.1</h1> <h2>Bugfixes</h2> <ul> <li>More fully sanitized the filename in PackageIndex._download. (<a href="https://redirect.github.com/pypa/setuptools/issues/4946">#4946</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/56962ec38bb53e1681de00dc5dc5b2e96b1b02b8"><code>56962ec</code></a> Bump version: 78.1.1 → 79.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/b137521018b68c9900b25af5c6ab44491bbf843c"><code>b137521</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4953">#4953</a> from pypa/debt/917/remove-legacy-editable</li> <li><a href="https://github.com/pypa/setuptools/commit/f89e652a79ecd4afbb71eabaf04a6709e11a4d5a"><code>f89e652</code></a> Removed support for the 'legacy-editable' feature.</li> <li><a href="https://github.com/pypa/setuptools/commit/8e4868a036b7fae3208d16cb4e5fe6d63c3752df"><code>8e4868a</code></a> Bump version: 78.1.0 → 78.1.1</li> <li><a href="https://github.com/pypa/setuptools/commit/100e9a61ad24d5a147ada57357425a8d40626d09"><code>100e9a6</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4951">#4951</a></li> <li><a href="https://github.com/pypa/setuptools/commit/8faf1d7e0ca309983252e4f21837b73ee12e960f"><code>8faf1d7</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/2ca4a9fe4758fcd39d771d3d3a5b4840aacebdf7"><code>2ca4a9f</code></a> Rely on re.sub to perform the decision in one expression.</li> <li><a href="https://github.com/pypa/setuptools/commit/e409e8002932f2b86aae7b1abc8f8c2ebf96df2c"><code>e409e80</code></a> Extract _sanitize method for sanitizing the filename.</li> <li><a href="https://github.com/pypa/setuptools/commit/250a6d17978f9f6ac3ac887091f2d32886fbbb0b"><code>250a6d1</code></a> Add a check to ensure the name resolves relative to the tmpdir.</li> <li><a href="https://github.com/pypa/setuptools/commit/d8390feaa99091d1ba9626bec0e4ba7072fc507a"><code>d8390fe</code></a> Extract _resolve_download_filename with test.</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v78.1.0...v79.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=78.1.0&new-version=79.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bdb5e135127..a964de9972d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==78.1.0 +setuptools==79.0.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index d8608ed720d..fbc496f6279 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==78.1.0 +setuptools==79.0.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 85f4e321835..a005c791a44 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==78.1.0 +setuptools==79.0.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 4a559724883..28c10d44a0c 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==78.1.0 +setuptools==79.0.0 # via incremental From 60f15a489f84a666990a2cdf984271f30a72bbec Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 19:05:04 +0000 Subject: [PATCH 1365/1511] [PR #10774/b0404741 backport][3.12] Rewrite changelog message for #9705 to be in the past tense (#10786) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10761.contrib.rst | 1 + CHANGES/9705.contrib.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10761.contrib.rst diff --git a/CHANGES/10761.contrib.rst b/CHANGES/10761.contrib.rst new file mode 120000 index 00000000000..3d35184e09d --- /dev/null +++ b/CHANGES/10761.contrib.rst @@ -0,0 +1 @@ +9705.contrib.rst \ No newline at end of file diff --git a/CHANGES/9705.contrib.rst b/CHANGES/9705.contrib.rst index 771fb442629..5d23e964fa1 100644 --- a/CHANGES/9705.contrib.rst +++ b/CHANGES/9705.contrib.rst @@ -1 +1 @@ -Speed up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. +Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. From f35bc390b064a96e5a5383dc851b7b67140621e0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 11:10:04 +0000 Subject: [PATCH 1366/1511] Bump setuptools from 79.0.0 to 79.0.1 (#10793) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 79.0.0 to 79.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v79.0.1</h1> <h2>Bugfixes</h2> <ul> <li>Merge with pypa/distutils@24bd3179b including fix for <a href="https://redirect.github.com/pypa/distutils/issues/355">pypa/distutils#355</a>.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/607f6be776db97d83c9cc54e0eaa578567dcc44c"><code>607f6be</code></a> Bump version: 79.0.0 → 79.0.1</li> <li><a href="https://github.com/pypa/setuptools/commit/07d05da506395d834712c23333769af0c7f48b05"><code>07d05da</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4959">#4959</a> from pypa/bugfix/distutils-356</li> <li><a href="https://github.com/pypa/setuptools/commit/3bbd90c2b3a12edceb73423f21dc1f1dc3c6877b"><code>3bbd90c</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/77f92141403a843fbf9df0ed0070ee83622810ed"><code>77f9214</code></a> Merge <a href="https://github.com/pypa/distutils">https://github.com/pypa/distutils</a></li> <li><a href="https://github.com/pypa/setuptools/commit/98a5169cefd78df7f60135c173259b78f8cbf49a"><code>98a5169</code></a> Merge pull request <a href="https://redirect.github.com/pypa/distutils/issues/356">pypa/distutils#356</a> from vfazio/vfazio-cxx-linking-arg-parse</li> <li><a href="https://github.com/pypa/setuptools/commit/55f9116bc92aa6f01de97dd1939728f51df6aa2a"><code>55f9116</code></a> Fix new test case</li> <li><a href="https://github.com/pypa/setuptools/commit/c0d6d7158fcf68b4f748d751b3175e6b79c6ae5c"><code>c0d6d71</code></a> Add test for argument parsing for CXX targets on UNIX</li> <li><a href="https://github.com/pypa/setuptools/commit/b8c06fffe4b118f3c549fc23d196c2f3d41aa17c"><code>b8c06ff</code></a> Respect CXX when parsing linker parameters for UNIX c++ targets</li> <li><a href="https://github.com/pypa/setuptools/commit/24bd3179ba784afc00aae52101df064b826be0c0"><code>24bd317</code></a> Merge commit '2f093b'</li> <li><a href="https://github.com/pypa/setuptools/commit/2f093b54305e508eb4239e8c9fd94d4b02da9620"><code>2f093b5</code></a> Remove latent comment.</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v79.0.0...v79.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=79.0.0&new-version=79.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a964de9972d..2affe3dd267 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==79.0.0 +setuptools==79.0.1 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index fbc496f6279..ee56630ba79 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.0.1 # via pip-tools -setuptools==79.0.0 +setuptools==79.0.1 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index a005c791a44..2832f919ec6 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==79.0.0 +setuptools==79.0.1 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 28c10d44a0c..e71d185f8dd 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==79.0.0 +setuptools==79.0.1 # via incremental From ad5d1393e3680a1a6a38e1af88c1fbc57726d152 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 12:16:21 +0000 Subject: [PATCH 1367/1511] Bump pip from 25.0.1 to 25.1 (#10804) Bumps [pip](https://github.com/pypa/pip) from 25.0.1 to 25.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pip/blob/main/NEWS.rst">pip's changelog</a>.</em></p> <blockquote> <h1>25.1 (2025-04-26)</h1> <h2>Deprecations and Removals</h2> <ul> <li>Drop support for Python 3.8. (<code>[#12989](https://github.com/pypa/pip/issues/12989) <https://github.com/pypa/pip/issues/12989></code>_)</li> <li>On python 3.14+, the <code>pkg_resources</code> metadata backend cannot be used anymore. (<code>[#13010](https://github.com/pypa/pip/issues/13010) <https://github.com/pypa/pip/issues/13010></code>_)</li> <li>Hide <code>--no-python-version-warning</code> from CLI help and documentation as it's useless since Python 2 support was removed. Despite being formerly slated for removal, the flag will remain as a no-op to avoid breakage. (<code>[#13303](https://github.com/pypa/pip/issues/13303) <https://github.com/pypa/pip/issues/13303></code>_)</li> <li>A warning is emitted when the deprecated <code>pkg_resources</code> library is used to inspect and discover installed packages. This warning should only be visible to users who set an undocumented environment variable to disable the default <code>importlib.metadata</code> backend. (<code>[#13318](https://github.com/pypa/pip/issues/13318) <https://github.com/pypa/pip/issues/13318></code>_)</li> <li>Deprecate the legacy <code>setup.py bdist_wheel</code> mechanism. To silence the warning, and future-proof their setup, users should enable <code>--use-pep517</code> or add a <code>pyproject.toml</code> file to the projects they control. (<code>[#13319](https://github.com/pypa/pip/issues/13319) <https://github.com/pypa/pip/issues/13319></code>_)</li> </ul> <h2>Features</h2> <ul> <li> <p>Suggest checking "pip config debug" in case of an InvalidProxyURL error. (<code>[#12649](https://github.com/pypa/pip/issues/12649) <https://github.com/pypa/pip/issues/12649></code>_)</p> </li> <li> <p>Using <code>--debug</code> also enables verbose logging. (<code>[#12710](https://github.com/pypa/pip/issues/12710) <https://github.com/pypa/pip/issues/12710></code>_)</p> </li> <li> <p>Display a transient progress bar during package installation. (<code>[#12712](https://github.com/pypa/pip/issues/12712) <https://github.com/pypa/pip/issues/12712></code>_)</p> </li> <li> <p>Minor performance improvement when installing packages with a large number of dependencies by increasing the requirement string cache size. (<code>[#12873](https://github.com/pypa/pip/issues/12873) <https://github.com/pypa/pip/issues/12873></code>_)</p> </li> <li> <p>Add a <code>--group</code> option which allows installation from :pep:<code>735</code> Dependency Groups. <code>--group</code> accepts arguments of the form <code>group</code> or <code>path:group</code>, where the default path is <code>pyproject.toml</code>, and installs the named Dependency Group from the provided <code>pyproject.toml</code> file. (<code>[#12963](https://github.com/pypa/pip/issues/12963) <https://github.com/pypa/pip/issues/12963></code>_)</p> </li> <li> <p>Add support to enable resuming incomplete downloads.</p> <p>Control the number of retry attempts using the <code>--resume-retries</code> flag. (<code>[#12991](https://github.com/pypa/pip/issues/12991) <https://github.com/pypa/pip/issues/12991></code>_)</p> </li> <li> <p>Use :pep:<code>753</code> "Well-known Project URLs in Metadata" normalization rules when identifying an equivalent project URL to replace a missing <code>Home-Page</code> field in <code>pip show</code>. (<code>[#13135](https://github.com/pypa/pip/issues/13135) <https://github.com/pypa/pip/issues/13135></code>_)</p> </li> <li> <p>Remove <code>experimental</code> warning from <code>pip index versions</code> command. (<code>[#13188](https://github.com/pypa/pip/issues/13188) <https://github.com/pypa/pip/issues/13188></code>_)</p> </li> <li> <p>Add a structured <code>--json</code> output to <code>pip index versions</code> (<code>[#13194](https://github.com/pypa/pip/issues/13194) <https://github.com/pypa/pip/issues/13194></code>_)</p> </li> <li> <p>Add a new, <em>experimental</em>, <code>pip lock</code> command, implementing :pep:<code>751</code>. (<code>[#13213](https://github.com/pypa/pip/issues/13213) <https://github.com/pypa/pip/issues/13213></code>_)</p> </li> <li> <p>Speed up resolution by first only considering the preference of candidates that must be required to complete the resolution. (<code>[#13253](https://github.com/pypa/pip/issues/13253) <https://github.com/pypa/pip/issues/13253></code>_)</p> </li> <li> <p>Improved heuristics for determining the order of dependency resolution. (<code>[#13273](https://github.com/pypa/pip/issues/13273) <https://github.com/pypa/pip/issues/13273></code>_)</p> </li> <li> <p>Provide hint, documentation, and link to the documentation when resolution too deep error occurs. (<code>[#13282](https://github.com/pypa/pip/issues/13282) <https://github.com/pypa/pip/issues/13282></code>_)</p> </li> <li> <p>Include traceback on failure to import <code>setuptools</code> when <code>setup.py</code> is being invoked directly. (<code>[#13290](https://github.com/pypa/pip/issues/13290) <https://github.com/pypa/pip/issues/13290></code>_)</p> </li> <li> <p>Support for :pep:<code>738</code> Android wheels. (<code>[#13299](https://github.com/pypa/pip/issues/13299) <https://github.com/pypa/pip/issues/13299></code>_)</p> </li> <li> <p>Display wheel build tag in <code>pip list</code> columns output if set. (<code>[#5210](https://github.com/pypa/pip/issues/5210) <https://github.com/pypa/pip/issues/5210></code>_)</p> </li> <li> <p>Build environment dependencies are no longer compiled to bytecode during</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pip/commit/daa7e5448312392c621bbaec4204d961c363e5f7"><code>daa7e54</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/06c3182a6ebb79ae95aa6ca42e43a9f750c9df57"><code>06c3182</code></a> Update AUTHORS.txt</li> <li><a href="https://github.com/pypa/pip/commit/b88324fe98b510fbc6ddd8951f006b1c8f0e7a3c"><code>b88324f</code></a> Add a news file for the pip lock command</li> <li><a href="https://github.com/pypa/pip/commit/38253a6002c23706153a0cb741b8caca290c9165"><code>38253a6</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13319">#13319</a> from sbidoul</li> <li><a href="https://github.com/pypa/pip/commit/2791a8b35a4e9e4ebacf18cc08be81f53998701d"><code>2791a8b</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13344">#13344</a> from pypa/dependabot/pip/build-project/setuptools-7...</li> <li><a href="https://github.com/pypa/pip/commit/24f4600851bbb3d7f22aed0ba6b1e2dcc4973412"><code>24f4600</code></a> Remove LRU cache from methods [ruff rule cached-instance-method] (<a href="https://redirect.github.com/pypa/pip/issues/13306">#13306</a>)</li> <li><a href="https://github.com/pypa/pip/commit/d852ebd2868abc526189fc7172babca9b1d2b395"><code>d852ebd</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/12308">#12308</a></li> <li><a href="https://github.com/pypa/pip/commit/d35c08df09cebe2f4887b0a31bb1127e730d8ead"><code>d35c08d</code></a> Clarify what the removal of the pkg_ressources backend implies</li> <li><a href="https://github.com/pypa/pip/commit/e8794224f513a2b964d5f969026f283dc9a23003"><code>e879422</code></a> Rename find_linked to find_legacy_editables</li> <li><a href="https://github.com/pypa/pip/commit/4a765606f9c1d39059e429cd5394c246045fb34a"><code>4a76560</code></a> Fix uninstallation of zipped eggs</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pip/compare/25.0.1...25.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=25.0.1&new-version=25.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2affe3dd267..239323965e2 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -297,7 +297,7 @@ zlib-ng==0.5.1 # -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: -pip==25.0.1 +pip==25.1 # via pip-tools setuptools==79.0.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index ee56630ba79..5c3f99c07e2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -288,7 +288,7 @@ zlib-ng==0.5.1 # -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: -pip==25.0.1 +pip==25.1 # via pip-tools setuptools==79.0.1 # via From 8207b94a96d21abe30c4a1dabed03d7fab49dbea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 12:29:35 +0000 Subject: [PATCH 1368/1511] Bump setuptools from 79.0.1 to 80.0.0 (#10802) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 79.0.1 to 80.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.0.0</h1> <h2>Bugfixes</h2> <ul> <li>Update test to honor new behavior in importlib_metadata 8.7. (<a href="https://redirect.github.com/pypa/setuptools/issues/4961">#4961</a>)</li> </ul> <h2>Deprecations and Removals</h2> <ul> <li>Removed support for the easy_install command including the sandbox module. (<a href="https://redirect.github.com/pypa/setuptools/issues/2908">#2908</a>)</li> <li>Develop command no longer uses easy_install, but instead defers execution to pip (which then will re-invoke Setuptools via PEP 517 to build the editable wheel). Most of the options to develop are dropped. This is the final warning before the command is dropped completely in a few months. Use-cases relying on 'setup.py develop' should pin to older Setuptools version or migrate to modern build tooling. (<a href="https://redirect.github.com/pypa/setuptools/issues/4955">#4955</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/aeea79266d82f99dbe556126b90b64215a663a2c"><code>aeea792</code></a> Bump version: 79.0.1 → 80.0.0</li> <li><a href="https://github.com/pypa/setuptools/commit/2c874e78f4240963f74debcaadcccb97cb302ded"><code>2c874e7</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4962">#4962</a> from pypa/bugfix/4961-validated-eps</li> <li><a href="https://github.com/pypa/setuptools/commit/82c588aedd8142e7615031358e2d2640213a351d"><code>82c588a</code></a> Update test to honor new behavior in importlib_metadata 8.7</li> <li><a href="https://github.com/pypa/setuptools/commit/ef4cd2960d75f2d49f40f5495347523be62d20e5"><code>ef4cd29</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/2908">#2908</a> from pypa/debt/remove-easy-install</li> <li><a href="https://github.com/pypa/setuptools/commit/85bbad4945d874a2444e4531c74c5074cdeca010"><code>85bbad4</code></a> Merge branch 'main' into debt/remove-easy-install</li> <li><a href="https://github.com/pypa/setuptools/commit/9653305c35a143b8d1bad2c190f918887dd1e6d5"><code>9653305</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4955">#4955</a> from pypa/debt/develop-uses-pip</li> <li><a href="https://github.com/pypa/setuptools/commit/da119e7e996b00b6e26f79995bec55684a3fabbe"><code>da119e7</code></a> Set a due date 6 months in advance.</li> <li><a href="https://github.com/pypa/setuptools/commit/a7603da5d3c709f6f01c8df8031ba7a7ae7959a0"><code>a7603da</code></a> Rename news fragment to reference the pull request for better precise locality.</li> <li><a href="https://github.com/pypa/setuptools/commit/018a20cb130e9357f39c176b59c83738a09d7daa"><code>018a20c</code></a> Restore a few of the options to develop.</li> <li><a href="https://github.com/pypa/setuptools/commit/a5f02fe88d46e963bc470a60a9f8613d7f889d49"><code>a5f02fe</code></a> Remove another test relying on setup.py develop.</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v79.0.1...v80.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=79.0.1&new-version=80.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 239323965e2..c64b82e2d96 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1 # via pip-tools -setuptools==79.0.1 +setuptools==80.0.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 5c3f99c07e2..f4bc6e75475 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1 # via pip-tools -setuptools==79.0.1 +setuptools==80.0.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 2832f919ec6..af5cdaadead 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==79.0.1 +setuptools==80.0.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index e71d185f8dd..0dea484b6be 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==79.0.1 +setuptools==80.0.0 # via incremental From 007a251ee148f801881a9b943fc13fffc3bfb3c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 12:39:35 +0000 Subject: [PATCH 1369/1511] Bump pypa/cibuildwheel from 2.23.2 to 2.23.3 (#10806) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.23.2 to 2.23.3. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.23.3</h2> <ul> <li>🛠 Dependency updates, including Python 3.13.3 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2371">#2371</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.23.3</h3> <p><em>26 April 2025</em></p> <ul> <li>🛠 Dependency updates, including Python 3.13.3 (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2371">#2371</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/faf86a6ed7efa889faf6996aa23820831055001a"><code>faf86a6</code></a> Bump version: v2.23.3</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/4241f37b2c5be7f7ed96214b83f8cfbe1496cc28"><code>4241f37</code></a> [2.x] Update dependencies (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/2371">#2371</a>)</li> <li>See full diff in <a href="https://github.com/pypa/cibuildwheel/compare/v2.23.2...v2.23.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.23.2&new-version=2.23.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index ec85713319b..564aa1fea14 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -414,7 +414,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.23.2 + uses: pypa/cibuildwheel@v2.23.3 env: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 From bc64b83670796bbe4df27eb482916587a23ba787 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Apr 2025 11:22:02 +0000 Subject: [PATCH 1370/1511] Bump pydantic from 2.11.3 to 2.11.4 (#10809) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.11.3 to 2.11.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.11.4 2025-04-29</h2> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Bump <code>mkdocs-llmstxt</code> to v0.2.0 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11725">#11725</a></li> </ul> <h4>Changes</h4> <ul> <li>Allow config and bases to be specified together in <code>create_model()</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11714">#11714</a>. This change was backported as it was previously possible (although not meant to be supported) to provide <code>model_config</code> as a field, which would make it possible to provide both configuration and bases.</li> </ul> <h4>Fixes</h4> <ul> <li>Remove generics cache workaround by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11755">#11755</a></li> <li>Remove coercion of decimal constraints by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11772">#11772</a></li> <li>Fix crash when expanding root type in the mypy plugin by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11735">#11735</a></li> <li>Fix issue with recursive generic models by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11775">#11775</a></li> <li>Traverse <code>function-before</code> schemas during schema gathering by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11801">#11801</a></li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.11.4 (2025-04-29)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.4">GitHub release</a></p> <h3>What's Changed</h3> <h4>Packaging</h4> <ul> <li>Bump <code>mkdocs-llmstxt</code> to v0.2.0 by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11725">#11725</a></li> </ul> <h4>Changes</h4> <ul> <li>Allow config and bases to be specified together in <code>create_model()</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11714">#11714</a>. This change was backported as it was previously possible (although not meant to be supported) to provide <code>model_config</code> as a field, which would make it possible to provide both configuration and bases.</li> </ul> <h4>Fixes</h4> <ul> <li>Remove generics cache workaround by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11755">#11755</a></li> <li>Remove coercion of decimal constraints by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11772">#11772</a></li> <li>Fix crash when expanding root type in the mypy plugin by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11735">#11735</a></li> <li>Fix issue with recursive generic models by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11775">#11775</a></li> <li>Traverse <code>function-before</code> schemas during schema gathering by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11801">#11801</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/d444cd1cf6c5af54b23a335aff2ea45eaac2c2f6"><code>d444cd1</code></a> Prepare release v2.11.4</li> <li><a href="https://github.com/pydantic/pydantic/commit/828fc48d55a73c43a500a1d572dbc04ded67438f"><code>828fc48</code></a> Add documentation note about common pitfall with the annotated pattern</li> <li><a href="https://github.com/pydantic/pydantic/commit/42bf1fd784a3c8666ff7ed68f8d4fa2d395c6492"><code>42bf1fd</code></a> Bump <code>pydantic-core</code> to v2.33.2 (<a href="https://redirect.github.com/pydantic/pydantic/issues/11804">#11804</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/7b3f5132159af75e0a13cf66a75880e007c81cbc"><code>7b3f513</code></a> Allow config and bases to be specified together in <code>create_model()</code></li> <li><a href="https://github.com/pydantic/pydantic/commit/fc521388f212d3f7cf20f36c3714a3b2abc4d723"><code>fc52138</code></a> Traverse <code>function-before</code> schemas during schema gathering</li> <li><a href="https://github.com/pydantic/pydantic/commit/25af78934ab5c58380c9b52370c15825a97b57e7"><code>25af789</code></a> Fix issue with recursive generic models</li> <li><a href="https://github.com/pydantic/pydantic/commit/91ef6bb39e596a275d46d73485dd65bb00b7ca09"><code>91ef6bb</code></a> Update monthly download count in documentation</li> <li><a href="https://github.com/pydantic/pydantic/commit/a830775328d11f5adc9d6c5c943d1c1c75f1adaf"><code>a830775</code></a> Bump <code>mkdocs-llmstxt</code> to v0.2.0</li> <li><a href="https://github.com/pydantic/pydantic/commit/f5d1c871286da0fdffa2fd488ff1a67d8b584d3c"><code>f5d1c87</code></a> Fix crash when expanding root type in the mypy plugin</li> <li><a href="https://github.com/pydantic/pydantic/commit/c80bb355d73e563fd4bc53e3cfe261ec3ac01d72"><code>c80bb35</code></a> Remove coercion of decimal constraints</li> <li>Additional commits viewable in <a href="https://github.com/pydantic/pydantic/compare/v2.11.3...v2.11.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.11.3&new-version=2.11.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c64b82e2d96..1842a494464 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -152,9 +152,9 @@ pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.3 +pydantic==2.11.4 # via python-on-whales -pydantic-core==2.33.1 +pydantic-core==2.33.2 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index f4bc6e75475..fed94b86d7f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -149,9 +149,9 @@ pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.3 +pydantic==2.11.4 # via python-on-whales -pydantic-core==2.33.1 +pydantic-core==2.33.2 # via pydantic pygments==2.19.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index e68392bef1e..1642cd701dc 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -63,9 +63,9 @@ pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.3 +pydantic==2.11.4 # via python-on-whales -pydantic-core==2.33.1 +pydantic-core==2.33.2 # via pydantic pygments==2.19.1 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index 45afe22f063..f8519c407e0 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -83,9 +83,9 @@ pycares==4.6.1 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.3 +pydantic==2.11.4 # via python-on-whales -pydantic-core==2.33.1 +pydantic-core==2.33.2 # via pydantic pygments==2.19.1 # via rich From 299fe00a2b7b1f5def7daa98260263a61bc9772d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Apr 2025 11:26:50 +0000 Subject: [PATCH 1371/1511] Bump setuptools from 80.0.0 to 80.0.1 (#10810) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 80.0.0 to 80.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.0.1</h1> <h2>Bugfixes</h2> <ul> <li>Fixed index_url logic in develop compatibility shim. (<a href="https://redirect.github.com/pypa/setuptools/issues/4966">#4966</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/76b041dadc1d8fcbe78e7037ed2c6cba24325600"><code>76b041d</code></a> Fixup</li> <li><a href="https://github.com/pypa/setuptools/commit/6c748caaaf128b7a64a5e4629787f06780b6d68d"><code>6c748ca</code></a> Bump version: 80.0.0 → 80.0.1</li> <li><a href="https://github.com/pypa/setuptools/commit/92ff129754ece790b1129e756bbf2baacff65e1b"><code>92ff129</code></a> Fixed index_url logic in develop compatibility shim.</li> <li><a href="https://github.com/pypa/setuptools/commit/9f091ff603ccd192d0f717df27dfdcb891d6f6a3"><code>9f091ff</code></a> Fix <code>master</code>-><code>main</code> in PR template (<a href="https://redirect.github.com/pypa/setuptools/issues/4967">#4967</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/7245a99c2b42eff78c3eab406d27b7d259075d48"><code>7245a99</code></a> Tiny nit fix: <code>master</code>-><code>main</code> in PR template</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v80.0.0...v80.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=80.0.0&new-version=80.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1842a494464..8f18ed2443d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1 # via pip-tools -setuptools==80.0.0 +setuptools==80.0.1 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index fed94b86d7f..a918a3bf403 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1 # via pip-tools -setuptools==80.0.0 +setuptools==80.0.1 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index af5cdaadead..a302570ab48 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.0.0 +setuptools==80.0.1 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 0dea484b6be..2eea796696c 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.0.0 +setuptools==80.0.1 # via incremental From fc614bd2cd764a1e3491a2925362d7a0e6b5f7cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 10:35:41 +0000 Subject: [PATCH 1372/1511] Bump setuptools from 80.0.1 to 80.1.0 (#10813) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 80.0.1 to 80.1.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.1.0</h1> <h2>Features</h2> <ul> <li>Added a deadline of Oct 31 to the setup.py install deprecation.</li> </ul> <h2>Bugfixes</h2> <ul> <li>With <code>setup.py install --prefix=...</code>, fall back to distutils install rather than failing. Note that running <code>setup.py install</code> is deprecated. (<a href="https://redirect.github.com/pypa/setuptools/issues/3143">#3143</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/6f7b6ddf095c35db92b5a0724ebbc179d897adb4"><code>6f7b6dd</code></a> Bump version: 80.0.1 → 80.1.0</li> <li><a href="https://github.com/pypa/setuptools/commit/25ac162f7406f760f9b5647a9de862e1c643658c"><code>25ac162</code></a> Fix error message string formatting (<a href="https://redirect.github.com/pypa/setuptools/issues/4949">#4949</a>)</li> <li><a href="https://github.com/pypa/setuptools/commit/4566569dc9f48adf970962277286615ab50df881"><code>4566569</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4970">#4970</a> from pypa/bugfix/3143-simple-install</li> <li><a href="https://github.com/pypa/setuptools/commit/7fc5e05df9e239cf2938396dcb1ca93d00621e2a"><code>7fc5e05</code></a> Add a due date on the deprecation.</li> <li><a href="https://github.com/pypa/setuptools/commit/d8071d6625e41281726fb5aa866b8a40fa2d9da0"><code>d8071d6</code></a> Remove do_egg_install (unused).</li> <li><a href="https://github.com/pypa/setuptools/commit/a1ecac4f96d56938a7bb45f840923719efac2369"><code>a1ecac4</code></a> Remove run override as it now unconditionally calls super().</li> <li><a href="https://github.com/pypa/setuptools/commit/2b0d1739783955c5b018ac63850a70f06bee49b3"><code>2b0d173</code></a> Unify the behavior around the return type when calling super(install).</li> <li><a href="https://github.com/pypa/setuptools/commit/0dc924ad325edcc4478532eb5ec58ad7518f0b5c"><code>0dc924a</code></a> Fall back to distutils install rather than failing.</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v80.0.1...v80.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=80.0.1&new-version=80.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8f18ed2443d..fe5c208f216 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1 # via pip-tools -setuptools==80.0.1 +setuptools==80.1.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index a918a3bf403..4f0ab50e631 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1 # via pip-tools -setuptools==80.0.1 +setuptools==80.1.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index a302570ab48..41de69397c0 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.0.1 +setuptools==80.1.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 2eea796696c..328d0cc4cec 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.0.1 +setuptools==80.1.0 # via incremental From 7be295c384e45bdb21188eb405d33681c984ddd3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 11:11:42 +0000 Subject: [PATCH 1373/1511] Bump charset-normalizer from 3.4.1 to 3.4.2 (#10817) Bumps [charset-normalizer](https://github.com/jawah/charset_normalizer) from 3.4.1 to 3.4.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/jawah/charset_normalizer/releases">charset-normalizer's releases</a>.</em></p> <blockquote> <h2>Version 3.4.2</h2> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2">3.4.2</a> (2025-05-02)</h2> <h3>Fixed</h3> <ul> <li>Addressed the DeprecationWarning in our CLI regarding <code>argparse.FileType</code> by backporting the target class into the package. (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/591">#591</a>)</li> <li>Improved the overall reliability of the detector with CJK Ideographs. (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/605">#605</a>) (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/587">#587</a>)</li> </ul> <h3>Changed</h3> <ul> <li>Optional mypyc compilation upgraded to version 1.15 for Python >= 3.9</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md">charset-normalizer's changelog</a>.</em></p> <blockquote> <h2><a href="https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2">3.4.2</a> (2025-05-02)</h2> <h3>Fixed</h3> <ul> <li>Addressed the DeprecationWarning in our CLI regarding <code>argparse.FileType</code> by backporting the target class into the package. (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/591">#591</a>)</li> <li>Improved the overall reliability of the detector with CJK Ideographs. (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/605">#605</a>) (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/587">#587</a>)</li> </ul> <h3>Changed</h3> <ul> <li>Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/jawah/charset_normalizer/commit/6422af1d50f1f46624deab585424e1ac4d08ba94"><code>6422af1</code></a> :pencil: update release date</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/0e60ec18eb00bfe312dde86739929165839b633c"><code>0e60ec1</code></a> :bookmark: Release 3.4.2 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/614">#614</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/f6630ce3b7fd34f234759a0c7a5ca413640e8ffb"><code>f6630ce</code></a> :arrow_up: Bump pypa/cibuildwheel from 2.23.2 to 2.23.3 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/617">#617</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/677c99953f1a728de9f88d8659abff1246a80ade"><code>677c999</code></a> :arrow_up: Bump actions/download-artifact from 4.2.1 to 4.3.0 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/618">#618</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/960ab1e676b7ef73c8c4f36c387b5a0da21e01c0"><code>960ab1e</code></a> :arrow_up: Bump actions/setup-python from 5.5.0 to 5.6.0 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/619">#619</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/6eb632521c9b3f92df2cd944c7c524c205abfffc"><code>6eb6325</code></a> :arrow_up: Bump github/codeql-action from 3.28.10 to 3.28.16 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/620">#620</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/c99c0f2c33ce54e41fefcae1c4fb96fa901d5cd4"><code>c99c0f2</code></a> :arrow_up: Update coverage requirement from <7.7,>=7.2.7 to >=7.2.7,<7.9 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/606">#606</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/270f28e474db75f2a5085d6da732a90c2ac338f4"><code>270f28e</code></a> :arrow_up: Bump actions/setup-python from 5.4.0 to 5.5.0 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/607">#607</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/d4d89a092d262a37ce427ee39ba5091ffd188f0b"><code>d4d89a0</code></a> :arrow_up: Bump pypa/cibuildwheel from 2.22.0 to 2.23.2 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/608">#608</a>)</li> <li><a href="https://github.com/jawah/charset_normalizer/commit/905fcf5cbe93e8b59c12103217434bdbc703d105"><code>905fcf5</code></a> :arrow_up: Bump slsa-framework/slsa-github-generator from 2.0.0 to 2.1.0 (<a href="https://redirect.github.com/jawah/charset_normalizer/issues/609">#609</a>)</li> <li>Additional commits viewable in <a href="https://github.com/jawah/charset_normalizer/compare/3.4.1...3.4.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=charset-normalizer&package-manager=pip&previous-version=3.4.1&new-version=3.4.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fe5c208f216..53471ff1651 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -43,7 +43,7 @@ cffi==1.17.1 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests cherry-picker==2.5.0 # via -r requirements/dev.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 4f0ab50e631..d04934b688a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -43,7 +43,7 @@ cffi==1.17.1 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests cherry-picker==2.5.0 # via -r requirements/dev.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 41de69397c0..3b83cd7fa12 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -12,7 +12,7 @@ babel==2.17.0 # via sphinx certifi==2025.1.31 # via requests -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests click==8.1.8 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 328d0cc4cec..2d6c130c3a5 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -12,7 +12,7 @@ babel==2.17.0 # via sphinx certifi==2025.1.31 # via requests -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests click==8.1.8 # via towncrier From b932fb46024a79ebdd2a7abb3e841f2357bf8193 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 11:18:50 +0000 Subject: [PATCH 1374/1511] Bump pycares from 4.6.1 to 4.7.0 (#10818) Bumps [pycares](https://github.com/saghul/pycares) from 4.6.1 to 4.7.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/saghul/pycares/commit/a48b73f6e46596f632b44b47bc6ceb1c83739132"><code>a48b73f</code></a> Set version to 4.7.0</li> <li><a href="https://github.com/saghul/pycares/commit/a610e6ba86f845b3f8a1a3cd5eda0416ce34bbe6"><code>a610e6b</code></a> Add event thread support</li> <li><a href="https://github.com/saghul/pycares/commit/f99e32c0e426261b12493a1f21562c6e1da8fb3d"><code>f99e32c</code></a> Update c-ares to 1.29.0 to add reinit support to Channel (<a href="https://redirect.github.com/saghul/pycares/issues/219">#219</a>)</li> <li>See full diff in <a href="https://github.com/saghul/pycares/compare/v4.6.1...v4.7.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycares&package-manager=pip&previous-version=4.6.1&new-version=4.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 8539638c3fa..f4e64d57256 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.6.1 +pycares==4.7.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 53471ff1651..520229fad25 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.6.1 +pycares==4.7.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/dev.txt b/requirements/dev.txt index d04934b688a..b9c7c5ce797 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,7 +145,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.6.1 +pycares==4.7.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/lint.txt b/requirements/lint.txt index 1642cd701dc..41e3a3f993f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -59,7 +59,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in -pycares==4.6.1 +pycares==4.7.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index e6bcad92614..b7cebc81576 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -32,7 +32,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.6.1 +pycares==4.7.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/test.txt b/requirements/test.txt index f8519c407e0..aec39654171 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -79,7 +79,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.6.1 +pycares==4.7.0 # via aiodns pycparser==2.22 # via cffi From 10fb7cd9c247807edc0dd598cca6c9cc47140bca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 11:27:52 +0000 Subject: [PATCH 1375/1511] Bump certifi from 2025.1.31 to 2025.4.26 (#10803) Bumps [certifi](https://github.com/certifi/python-certifi) from 2025.1.31 to 2025.4.26. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/certifi/python-certifi/commit/275c9eb55733a464589c15fb4566fddd4598e5b2"><code>275c9eb</code></a> 2025.04.26 (<a href="https://redirect.github.com/certifi/python-certifi/issues/347">#347</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/37883310b51e56570919cdc2d44becc1c6940559"><code>3788331</code></a> Bump actions/setup-python from 5.4.0 to 5.5.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/346">#346</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/9d1f1b782000baedf57026de5b79e193bcb7ef7b"><code>9d1f1b7</code></a> Bump actions/download-artifact from 4.1.9 to 4.2.1 (<a href="https://redirect.github.com/certifi/python-certifi/issues/344">#344</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/96b97a5afe26bc1adef98cb0bfe68e34948a73b6"><code>96b97a5</code></a> Bump actions/upload-artifact from 4.6.1 to 4.6.2 (<a href="https://redirect.github.com/certifi/python-certifi/issues/343">#343</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/c054ed3ac3d3505efc929b71cfd87a257bbdb6b3"><code>c054ed3</code></a> Bump peter-evans/create-pull-request from 7.0.7 to 7.0.8 (<a href="https://redirect.github.com/certifi/python-certifi/issues/342">#342</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/44547fc77121b12bb276b44b3b2b49cfcbeac06f"><code>44547fc</code></a> Bump actions/download-artifact from 4.1.8 to 4.1.9 (<a href="https://redirect.github.com/certifi/python-certifi/issues/341">#341</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/5ea51247afecf1bf4ebfa8f2db3082e89a8bfaed"><code>5ea5124</code></a> Bump actions/upload-artifact from 4.6.0 to 4.6.1 (<a href="https://redirect.github.com/certifi/python-certifi/issues/340">#340</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/2f142b7ae0b2d13fee4ba4b9fbd73a9cd5069060"><code>2f142b7</code></a> Bump peter-evans/create-pull-request from 7.0.6 to 7.0.7 (<a href="https://redirect.github.com/certifi/python-certifi/issues/339">#339</a>)</li> <li><a href="https://github.com/certifi/python-certifi/commit/80d2ebdc77d2d005f408f789fe2fb1fe5f4e0265"><code>80d2ebd</code></a> Bump actions/setup-python from 5.3.0 to 5.4.0 (<a href="https://redirect.github.com/certifi/python-certifi/issues/337">#337</a>)</li> <li>See full diff in <a href="https://github.com/certifi/python-certifi/compare/2025.01.31...2025.04.26">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2025.1.31&new-version=2025.4.26)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 520229fad25..d847f0b105d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 # via pip-tools -certifi==2025.1.31 +certifi==2025.4.26 # via requests cffi==1.17.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index b9c7c5ce797..1e65f484415 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.2.2.post1 # via pip-tools -certifi==2025.1.31 +certifi==2025.4.26 # via requests cffi==1.17.1 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 3b83cd7fa12..4a910c84110 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.17.0 # via sphinx -certifi==2025.1.31 +certifi==2025.4.26 # via requests charset-normalizer==3.4.2 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index 2d6c130c3a5..cd4eb34e8e1 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.17.0 # via sphinx -certifi==2025.1.31 +certifi==2025.4.26 # via requests charset-normalizer==3.4.2 # via requests From e76655a18cbb3b453d2503399b11f06e43f6a4ad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 19:04:24 +0000 Subject: [PATCH 1376/1511] Bump aiodns from 3.2.0 to 3.3.0 (#10820) Bumps [aiodns](https://github.com/saghul/aiodns) from 3.2.0 to 3.3.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiodns/commit/dd81a9337d01a789fda478c7031a771d13519ea3"><code>dd81a93</code></a> Fix release workflow for breaking changes in upload/download artifact (<a href="https://redirect.github.com/saghul/aiodns/issues/148">#148</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/7feb3d0048a59532cbceaa06dc78cd3b4a20f123"><code>7feb3d0</code></a> Release 3.3.0 (<a href="https://redirect.github.com/saghul/aiodns/issues/147">#147</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/b6cce69644c3dc70457dc3cc5d248574d43bcb23"><code>b6cce69</code></a> Use c-ares event thread when available (<a href="https://redirect.github.com/saghul/aiodns/issues/145">#145</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/bca3ae9a63c0a00e8f79beb57e716c7811c108d8"><code>bca3ae9</code></a> Bump pycares from 4.6.1 to 4.7.0 (<a href="https://redirect.github.com/saghul/aiodns/issues/146">#146</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/ed3c50701a7ee703c6b52c789bdafbcae236b5d0"><code>ed3c507</code></a> Bump pycares from 4.5.0 to 4.6.1 (<a href="https://redirect.github.com/saghul/aiodns/issues/143">#143</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/a5cc25c96ec325094f87fb67edd44151b1cf5a1c"><code>a5cc25c</code></a> Bump actions/download-artifact from 4.2.1 to 4.3.0 (<a href="https://redirect.github.com/saghul/aiodns/issues/144">#144</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/7759363145a2de5efddeb30594eea42a41872bb4"><code>7759363</code></a> Bump pytest-cov from 6.1.0 to 6.1.1 (<a href="https://redirect.github.com/saghul/aiodns/issues/142">#142</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/804a2a419ed0ecc9744d3dd7a2a74c993fde19c2"><code>804a2a4</code></a> Bump actions/upload-artifact from 2 to 4 (<a href="https://redirect.github.com/saghul/aiodns/issues/133">#133</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/82b066af1a32dcd857fe489f5c76f8b169602f63"><code>82b066a</code></a> Bump actions/download-artifact from 4.1.7 to 4.2.1 (<a href="https://redirect.github.com/saghul/aiodns/issues/131">#131</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/10207a2763ea5289fce174ac90ba3ae667ae180a"><code>10207a2</code></a> Fix test coverage (<a href="https://redirect.github.com/saghul/aiodns/issues/140">#140</a>)</li> <li>Additional commits viewable in <a href="https://github.com/saghul/aiodns/compare/v3.2.0...v3.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiodns&package-manager=pip&previous-version=3.2.0&new-version=3.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index f4e64d57256..e456f72ab22 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d847f0b105d..c13b965cc39 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # -aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 1e65f484415..bc15a7f0713 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # -aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 41e3a3f993f..9b348fa9d47 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in # -aiodns==3.2.0 +aiodns==3.3.0 # via -r requirements/lint.in annotated-types==0.7.0 # via pydantic diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index b7cebc81576..cb69af4ee1f 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index aec39654171..02891dd04e7 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in # -aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in From f3d2fbb5119fd610cc0100e3cb9281c782713af4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 19:17:39 +0000 Subject: [PATCH 1377/1511] Bump pip from 25.1 to 25.1.1 (#10822) Bumps [pip](https://github.com/pypa/pip) from 25.1 to 25.1.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/pip/blob/main/NEWS.rst">pip's changelog</a>.</em></p> <blockquote> <h1>25.1.1 (2025-05-02)</h1> <h2>Bug Fixes</h2> <ul> <li>Fix <code>req.source_dir</code> AssertionError when using the legacy resolver. (<code>[#13353](https://github.com/pypa/pip/issues/13353) <https://github.com/pypa/pip/issues/13353></code>_)</li> <li>Fix crash on Python 3.9.6 and lower when pip failed to compile a Python module during installation. (<code>[#13364](https://github.com/pypa/pip/issues/13364) <https://github.com/pypa/pip/issues/13364></code>_)</li> <li>Names in dependency group includes are now normalized before lookup, which fixes incorrect <code>Dependency group '...' not found</code> errors. (<code>[#13372](https://github.com/pypa/pip/issues/13372) <https://github.com/pypa/pip/issues/13372></code>_)</li> </ul> <h2>Vendored Libraries</h2> <ul> <li>Fix issues with using tomllib from the stdlib if available, rather than tomli</li> <li>Upgrade dependency-groups to 1.3.1</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/pip/commit/01857ef79f59a98db592bacb6e7b48f354528c80"><code>01857ef</code></a> Bump for release</li> <li><a href="https://github.com/pypa/pip/commit/08d8bb91e2c7734f98f828e28215aba15784012a"><code>08d8bb9</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13374">#13374</a> from pfmoore/fixups</li> <li><a href="https://github.com/pypa/pip/commit/2bff84e495a3d31008088c168c5ab9bfa633a172"><code>2bff84e</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13363">#13363</a> from sbidoul/fix-source_dir-assert</li> <li><a href="https://github.com/pypa/pip/commit/644e71d6e339035836dce0adbf59f881b334e186"><code>644e71d</code></a> News file fixups</li> <li><a href="https://github.com/pypa/pip/commit/426856f496a8f84f1e36fded83b3d5e74968a786"><code>426856f</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13364">#13364</a> from ichard26/bugfix/python39</li> <li><a href="https://github.com/pypa/pip/commit/b7e3aead483baf42ca00e29b9758338ad19c130b"><code>b7e3aea</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13356">#13356</a> from eli-schwartz/tomllib</li> <li><a href="https://github.com/pypa/pip/commit/8c678fe85daaf11d8dd6a43b7835088513944655"><code>8c678fe</code></a> Merge pull request <a href="https://redirect.github.com/pypa/pip/issues/13373">#13373</a> from sirosen/update-vendored-dependency-groups</li> <li><a href="https://github.com/pypa/pip/commit/7d006399c0d0d38e55d56a6b0732e959bf75f796"><code>7d00639</code></a> Update newsfiles for dependency-groups patch</li> <li><a href="https://github.com/pypa/pip/commit/6d28bbf065a292f67d3d66d8f47fba15a1a2d512"><code>6d28bbf</code></a> Update version of <code>dependency-groups</code> to v1.3.1</li> <li><a href="https://github.com/pypa/pip/commit/94bd66d615d5f9036c53196f4f2acb7c71d5010c"><code>94bd66d</code></a> Revert StreamWrapper removal to restore Python 3.9.{0,6} compat</li> <li>Additional commits viewable in <a href="https://github.com/pypa/pip/compare/25.1...25.1.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=25.1&new-version=25.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c13b965cc39..7cb5ccf0c20 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -297,7 +297,7 @@ zlib-ng==0.5.1 # -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: -pip==25.1 +pip==25.1.1 # via pip-tools setuptools==80.1.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index bc15a7f0713..ca22e9c09c0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -288,7 +288,7 @@ zlib-ng==0.5.1 # -r requirements/test.in # The following packages are considered to be unsafe in a requirements file: -pip==25.1 +pip==25.1.1 # via pip-tools setuptools==80.1.0 # via From 1d6961ab648834df29bde4f4d431e9e0cd00dc9d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 3 May 2025 18:25:41 -0500 Subject: [PATCH 1378/1511] [PR #10823/d17f2a4c backport][3.12] Remove constraint that prevented aiodns from being installed on Windows (#10825) Co-authored-by: J. Nick Koston <nick@koston.org> closes #8121 --- CHANGES/10823.packaging.rst | 3 +++ requirements/runtime-deps.in | 2 +- setup.cfg | 3 +-- 3 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 CHANGES/10823.packaging.rst diff --git a/CHANGES/10823.packaging.rst b/CHANGES/10823.packaging.rst new file mode 100644 index 00000000000..c65f8bea795 --- /dev/null +++ b/CHANGES/10823.packaging.rst @@ -0,0 +1,3 @@ +``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + +As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 425abdc85f6..7b0382a7a2b 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,6 +1,6 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` -aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" +aiodns >= 3.3.0 aiohappyeyeballs >= 2.5.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" diff --git a/setup.cfg b/setup.cfg index 83b33d01532..649a5aaa4eb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -67,8 +67,7 @@ install_requires = [options.extras_require] speedups = - # required c-ares (aiodns' backend) will not build on windows - aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" + aiodns >= 3.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' From 2e00ed5db25466ed44b104677450c3d1265fcba9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 4 May 2025 01:58:29 +0000 Subject: [PATCH 1379/1511] [PR #10797/ceed5028 backport][3.12] Build armv7l manylinux wheels (#10827) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 3 +++ CHANGES/10797.feature.rst | 1 + 2 files changed, 4 insertions(+) create mode 100644 CHANGES/10797.feature.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 564aa1fea14..daa701c2aa9 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -364,6 +364,9 @@ jobs: - os: ubuntu-latest qemu: s390x musl: musllinux + - os: ubuntu-latest + qemu: armv7l + musl: "" - os: ubuntu-latest qemu: armv7l musl: musllinux diff --git a/CHANGES/10797.feature.rst b/CHANGES/10797.feature.rst new file mode 100644 index 00000000000..fc68d09f34e --- /dev/null +++ b/CHANGES/10797.feature.rst @@ -0,0 +1 @@ +Started building armv7l manylinux wheels -- by :user:`bdraco`. From 9e9b8cd2e097ed0a357adc4b53b8a0cc702debd2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 4 May 2025 14:11:17 +0100 Subject: [PATCH 1380/1511] [PR #10798/73a8de00 backport][3.12] Fix error messages grammar (#10828) **This is a backport of PR #10798 as merged into master (73a8de00014e53ebcd2dded06b0932cca96c0e92).** Co-authored-by: David Xia <david@davidxia.com> --- .github/PULL_REQUEST_TEMPLATE.md | 4 ++-- aiohttp/_http_parser.pyx | 4 ++-- aiohttp/http_exceptions.py | 2 +- aiohttp/http_parser.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index d4b1dba4340..7a34e15c9bd 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -24,8 +24,8 @@ entertain early in the review process. Thank you in advance! ## Related issue number -<!-- Are there any issues opened that will be resolved by merging this change? --> -<!-- Remember to prefix with 'Fixes' if it should close the issue (e.g. 'Fixes #123'). --> +<!-- Will this resolve any open issues? --> +<!-- Remember to prefix with 'Fixes' if it closes an issue (e.g. 'Fixes #123'). --> ## Checklist diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 19dc3e63b74..16893f00e74 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -506,10 +506,10 @@ cdef class HttpParser: if self._payload is not None: if self._cparser.flags & cparser.F_CHUNKED: raise TransferEncodingError( - "Not enough data for satisfy transfer length header.") + "Not enough data to satisfy transfer length header.") elif self._cparser.flags & cparser.F_CONTENT_LENGTH: raise ContentLengthError( - "Not enough data for satisfy content length header.") + "Not enough data to satisfy content length header.") elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: desc = cparser.llhttp_get_error_reason(self._cparser) raise PayloadEncodingError(desc.decode('latin-1')) diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index b8dda999acf..773830211e6 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -71,7 +71,7 @@ class TransferEncodingError(PayloadEncodingError): class ContentLengthError(PayloadEncodingError): - """Not enough data for satisfy content length header.""" + """Not enough data to satisfy content length header.""" class LineTooLong(BadHttpMessage): diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1b8b5b4d49e..db61ab5264c 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -804,11 +804,11 @@ def feed_eof(self) -> None: self.payload.feed_eof() elif self._type == ParseState.PARSE_LENGTH: raise ContentLengthError( - "Not enough data for satisfy content length header." + "Not enough data to satisfy content length header." ) elif self._type == ParseState.PARSE_CHUNKED: raise TransferEncodingError( - "Not enough data for satisfy transfer length header." + "Not enough data to satisfy transfer length header." ) def feed_data( From b227daeab404036d4e256945d49c9872c6dbdcb0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 11:38:32 +0000 Subject: [PATCH 1381/1511] Bump cryptography from 44.0.2 to 44.0.3 (#10833) Bumps [cryptography](https://github.com/pyca/cryptography) from 44.0.2 to 44.0.3. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>44.0.3 - 2025-05-02</p> <pre><code> * Fixed compilation when using LibreSSL 4.1.0. <p>.. _v44-0-2: </code></pre></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/b92c8416bf71bf5aa9c343490984dbcf7feb2cec"><code>b92c841</code></a> [44.0.x] backports for libressl 4.1.0 support release (<a href="https://redirect.github.com/pyca/cryptography/issues/12848">#12848</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/387ca00cec0ea63b2804a411ae0e953b300397e7"><code>387ca00</code></a> Hopefully add Window / MacOS PyPy 3.10 / 3.11 support (<a href="https://redirect.github.com/pyca/cryptography/issues/12559">#12559</a>)</li> <li>See full diff in <a href="https://github.com/pyca/cryptography/compare/44.0.2...44.0.3">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=44.0.2&new-version=44.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 2 +- requirements/test.txt | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7cb5ccf0c20..38fad946d5c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # -aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 # via # -r requirements/lint.in # -r requirements/runtime-deps.in @@ -58,7 +58,7 @@ coverage==7.8.0 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.2 +cryptography==44.0.3 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index ca22e9c09c0..e9fa57ebecd 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # -aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 # via # -r requirements/lint.in # -r requirements/runtime-deps.in @@ -58,7 +58,7 @@ coverage==7.8.0 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.2 +cryptography==44.0.3 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index 9b348fa9d47..8f161696935 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -21,7 +21,7 @@ cfgv==3.4.0 # via pre-commit click==8.1.8 # via slotscheck -cryptography==44.0.2 +cryptography==44.0.3 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 02891dd04e7..83f10badeac 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in # -aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in @@ -31,7 +31,7 @@ coverage==7.8.0 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.2 +cryptography==44.0.3 # via trustme exceptiongroup==1.2.2 # via pytest From 8ac93fc6545555df95b8533f65979cd63692124a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 11:41:59 +0000 Subject: [PATCH 1382/1511] Bump setuptools from 80.1.0 to 80.3.1 (#10834) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 80.1.0 to 80.3.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.3.1</h1> <h2>Bugfixes</h2> <ul> <li>Restored select attributes in easy_install for temporary pbr compatibility. (<a href="https://redirect.github.com/pypa/setuptools/issues/4976">#4976</a>)</li> </ul> <h1>v80.3.0</h1> <h2>Features</h2> <ul> <li>Removed easy_install and package_index modules. (<a href="https://redirect.github.com/pypa/setuptools/issues/917">#917</a>)</li> <li>Restored license declaration in package metadata. See <a href="https://redirect.github.com/jaraco/skeleton/issues/171">jaraco/skeleton#171</a>. (<a href="https://redirect.github.com/pypa/setuptools/issues/4956">#4956</a>)</li> </ul> <h1>v80.2.0</h1> <h2>Features</h2> <ul> <li>Restored support for install_scripts --executable (and classic behavior for the executable for those invocations). Instead, build_editable provides the portable form of the executables for downstream installers to rewrite. (<a href="https://redirect.github.com/pypa/setuptools/issues/4934">#4934</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/f37845bce6bb06ec25c24cf30210a485e945d21e"><code>f37845b</code></a> Bump version: 80.3.0 → 80.3.1</li> <li><a href="https://github.com/pypa/setuptools/commit/a6f8db0c3932879f5e1876d97d32b3a7b567b9d5"><code>a6f8db0</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4980">#4980</a> from pypa/debt/4976-pbr-compat</li> <li><a href="https://github.com/pypa/setuptools/commit/05cf544d23b8bbe5f914d198c2620abced8b7477"><code>05cf544</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/5b39e4e50510e62902260fd4a437143cbf42c7f8"><code>5b39e4e</code></a> Add the deprecation warning to attribute access.</li> <li><a href="https://github.com/pypa/setuptools/commit/30c00380093b1a7ff5693f98d06ab4fa4f8923cf"><code>30c0038</code></a> Render the attributes dynamically.</li> <li><a href="https://github.com/pypa/setuptools/commit/d6229353cd459aea9ccb70a4b76dfae1261a1270"><code>d622935</code></a> Restore ScriptWriter and sys_executable properties.</li> <li><a href="https://github.com/pypa/setuptools/commit/88bd892e78c8b5a6855eac06383726a84a91a45e"><code>88bd892</code></a> Add a failing integration test. Ref <a href="https://redirect.github.com/pypa/setuptools/issues/4976">#4976</a></li> <li><a href="https://github.com/pypa/setuptools/commit/9dccfa41c351672697df031ce9a30bb4af44c573"><code>9dccfa4</code></a> Moved pbr setup into a fixture.</li> <li><a href="https://github.com/pypa/setuptools/commit/af8b3228487554d93ed15ec69cfe45f7c086e9b4"><code>af8b322</code></a> Bump version: 80.2.0 → 80.3.0</li> <li><a href="https://github.com/pypa/setuptools/commit/e7b80848f1d72a06ed042e5f41c3e72203c54c6a"><code>e7b8084</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4963">#4963</a> from pypa/debt/remove-easy-install</li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v80.1.0...v80.3.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=80.1.0&new-version=80.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 38fad946d5c..6d961fe3bd0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.1.0 +setuptools==80.3.1 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index e9fa57ebecd..438e4260559 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.1.0 +setuptools==80.3.1 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 4a910c84110..5024465497b 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.1.0 +setuptools==80.3.1 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index cd4eb34e8e1..6c3accba1ae 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.1.0 +setuptools==80.3.1 # via incremental From beaa695184ca2a1fcb2ba6338a10202c1666cddc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 19:32:40 +0000 Subject: [PATCH 1383/1511] Bump mypy-extensions from 1.0.0 to 1.1.0 (#10789) Bumps [mypy-extensions](https://github.com/python/mypy_extensions) from 1.0.0 to 1.1.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/python/mypy_extensions/commit/70d9435779df27a522b146f2c5dee06ccacae373"><code>70d9435</code></a> remove dev from version</li> <li><a href="https://github.com/python/mypy_extensions/commit/8d272bb1264a6114610a3df9b43f87247c856d92"><code>8d272bb</code></a> Switch build-backend to flit_core + use License-Expression for project metada...</li> <li><a href="https://github.com/python/mypy_extensions/commit/1b461029c25d2e677792e1483734f352bd589aa8"><code>1b46102</code></a> Bump dev version to 1.1.0-dev (<a href="https://redirect.github.com/python/mypy_extensions/issues/57">#57</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/400534fb50c683ff1c00051ca101741ced69330b"><code>400534f</code></a> Deprecate mypy_extensions.NoReturn (<a href="https://redirect.github.com/python/mypy_extensions/issues/56">#56</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/23fbfa5674aca52517564628e12c228812ac2bf0"><code>23fbfa5</code></a> Update flake8 to 7.1.1 (<a href="https://redirect.github.com/python/mypy_extensions/issues/54">#54</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/9ddbb08b20cfba7efe18f5fbf926131e583c0b22"><code>9ddbb08</code></a> Cleanup tests (<a href="https://redirect.github.com/python/mypy_extensions/issues/55">#55</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/6d9c7b756486a654e795095d393bf8206cd11dea"><code>6d9c7b7</code></a> Move metadata to pyproject + drop Python 3.7 (<a href="https://redirect.github.com/python/mypy_extensions/issues/53">#53</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/812066c7a88e78572332e6e0f428bf3e0c248193"><code>812066c</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/python/mypy_extensions/issues/49">#49</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/9dd6d98da338a3924b0ed7440b843988cd5805df"><code>9dd6d98</code></a> Add support for Python 3.12 (<a href="https://redirect.github.com/python/mypy_extensions/issues/48">#48</a>)</li> <li><a href="https://github.com/python/mypy_extensions/commit/e0c6670e05a87507d59b7d3a0aa2eec88e9813b0"><code>e0c6670</code></a> Deprecate <code>mypy_extensions.TypedDict</code> (<a href="https://redirect.github.com/python/mypy_extensions/issues/47">#47</a>)</li> <li>Additional commits viewable in <a href="https://github.com/python/mypy_extensions/compare/1.0.0...1.1.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy-extensions&package-manager=pip&previous-version=1.0.0&new-version=1.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6d961fe3bd0..a60e47c7bf1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -124,7 +124,7 @@ mypy==1.15.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy nodeenv==1.9.1 # via pre-commit diff --git a/requirements/dev.txt b/requirements/dev.txt index 438e4260559..3abd04c1316 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -121,7 +121,7 @@ mypy==1.15.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy nodeenv==1.9.1 # via pre-commit diff --git a/requirements/lint.txt b/requirements/lint.txt index 8f161696935..c5e09327cac 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ mdurl==0.1.2 # via markdown-it-py mypy==1.15.0 ; implementation_name == "cpython" # via -r requirements/lint.in -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy nodeenv==1.9.1 # via pre-commit diff --git a/requirements/test.txt b/requirements/test.txt index 83f10badeac..b48ade24f17 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -65,7 +65,7 @@ multidict==6.4.3 # yarl mypy==1.15.0 ; implementation_name == "cpython" # via -r requirements/test.in -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==25.0 # via From 6be1a5a1354d7483d65a7550f213c943210f7cd7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 19:32:51 +0000 Subject: [PATCH 1384/1511] Bump pycares from 4.7.0 to 4.8.0 (#10832) Bumps [pycares](https://github.com/saghul/pycares) from 4.7.0 to 4.8.0. <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/saghul/pycares/commit/6405d1f679213d10159424474dbbc5e152718321"><code>6405d1f</code></a> Set version to 4.8.0</li> <li><a href="https://github.com/saghul/pycares/commit/a5638960171116a3d07f1552424a25c09719f7aa"><code>a563896</code></a> Add ARES_FLAG_NO_DFLT_SVR and ARES_FLAG_EDNS to API</li> <li><a href="https://github.com/saghul/pycares/commit/da561b29779e611c9dafba74e42388b1fb962c70"><code>da561b2</code></a> Update bundled c-ares to v1.34.5 (<a href="https://redirect.github.com/saghul/pycares/issues/221">#221</a>)</li> <li><a href="https://github.com/saghul/pycares/commit/129c07ca89f7647e7610aecca6b12047b4cc4195"><code>129c07c</code></a> Cancel previous CI jobs on pull request update</li> <li>See full diff in <a href="https://github.com/saghul/pycares/compare/v4.7.0...v4.8.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pycares&package-manager=pip&previous-version=4.7.0&new-version=4.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 4 ++-- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 4 ++-- requirements/test.txt | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e456f72ab22..7542a9d4bc2 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in @@ -36,7 +36,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.7.0 +pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a60e47c7bf1..542704645da 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -148,7 +148,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.7.0 +pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/dev.txt b/requirements/dev.txt index 3abd04c1316..2211e7101e9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -145,7 +145,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.7.0 +pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/lint.txt b/requirements/lint.txt index c5e09327cac..aef877972af 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -59,7 +59,7 @@ pluggy==1.5.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in -pycares==4.7.0 +pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index cb69af4ee1f..ca591313650 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.3.0 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.3.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in @@ -32,7 +32,7 @@ propcache==0.3.1 # via # -r requirements/runtime-deps.in # yarl -pycares==4.7.0 +pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi diff --git a/requirements/test.txt b/requirements/test.txt index b48ade24f17..ba44a71270b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -79,7 +79,7 @@ propcache==0.3.1 # yarl proxy-py==2.4.10 # via -r requirements/test.in -pycares==4.7.0 +pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi From 8c74667d4a8e51458e3b0f2eabe10c5bfbcd4d90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 May 2025 11:00:42 +0000 Subject: [PATCH 1385/1511] Bump virtualenv from 20.30.0 to 20.31.1 (#10836) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.30.0 to 20.31.1. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.31.1</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.31.0 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2879">pypa/virtualenv#2879</a></li> <li>Bump setuptools and pip by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2880">pypa/virtualenv#2880</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.31.0...20.31.1">https://github.com/pypa/virtualenv/compare/20.31.0...20.31.1</a></p> <h2>20.31.0</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.30.0 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2864">pypa/virtualenv#2864</a></li> <li>Stop including 'wheel', setuptools 70.1 has native bdist_wheel support by <a href="https://github.com/stefanor"><code>@​stefanor</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2868">pypa/virtualenv#2868</a></li> <li>Revert a large part of the wheel removal, to support Python 3.8 by <a href="https://github.com/stefanor"><code>@​stefanor</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2876">pypa/virtualenv#2876</a></li> <li>Fix HelpFormatter for Python 3.14 by <a href="https://github.com/cdce8p"><code>@​cdce8p</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2878">pypa/virtualenv#2878</a></li> <li>Fix get_embed_wheel for unknown wheels by <a href="https://github.com/tiran"><code>@​tiran</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2877">pypa/virtualenv#2877</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/cdce8p"><code>@​cdce8p</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2878">pypa/virtualenv#2878</a></li> <li><a href="https://github.com/tiran"><code>@​tiran</code></a> made their first contribution in <a href="https://redirect.github.com/pypa/virtualenv/pull/2877">pypa/virtualenv#2877</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.30.0...20.31.0">https://github.com/pypa/virtualenv/compare/20.30.0...20.31.0</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.31.1 (2025-05-05)</h2> <p>Bugfixes - 20.31.1</p> <pre><code>- Upgrade embedded wheels: <ul> <li>pip to <code>25.1.1</code> from <code>25.1</code></li> <li>setuptools to <code>80.3.1</code> from <code>78.1.0</code> (:issue:<code>2880</code>)</li> </ul> <h2>v20.31.0 (2025-05-05)</h2> <p>Features - 20.31.0 </code></pre></p> <ul> <li>No longer bundle <code>wheel</code> wheels (except on Python 3.8), <code>setuptools</code> includes native <code>bdist_wheel</code> support. Update <code>pip</code> to <code>25.1</code>. (:issue:<code>2868</code>)</li> </ul> <p>Bugfixes - 20.31.0</p> <pre><code>- ``get_embed_wheel()`` no longer fails with a :exc:`TypeError` when it is called with an unknown *distribution*. (:issue:`2877`) - Fix ``HelpFormatter`` error with Python 3.14.0b1. (:issue:`2878`) </code></pre> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/572aa2319783c58d2729dd94a0e3fb5435d62ff3"><code>572aa23</code></a> release 20.31.1</li> <li><a href="https://github.com/pypa/virtualenv/commit/715268e5fb0a323690b722efcd2e2cf5a14cd5ba"><code>715268e</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2880">#2880</a> from gaborbernat/main</li> <li><a href="https://github.com/pypa/virtualenv/commit/472157d7ad566dc999948f7d87d9ec5dc6508806"><code>472157d</code></a> Bump setuptools and pip</li> <li><a href="https://github.com/pypa/virtualenv/commit/f7d440dc29c0a6c6d29c1fdd4ddf65b7b10daa0b"><code>f7d440d</code></a> release 20.31.0 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2879">#2879</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/2b7ea303bcd64959f88f2617ebba74a6e2ecb646"><code>2b7ea30</code></a> release 20.31.0</li> <li><a href="https://github.com/pypa/virtualenv/commit/a2e76cb116adfc20d05623b4fa075e4ca222cf6e"><code>a2e76cb</code></a> Fix get_embed_wheel for unknown wheels (<a href="https://redirect.github.com/pypa/virtualenv/issues/2877">#2877</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/6d262846c5b4c32a8038005e03aededadbec9013"><code>6d26284</code></a> Fix HelpFormatter for Python 3.14 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2878">#2878</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/9ee93da5b16e0f96dce8d4a1f6b44d219d697afd"><code>9ee93da</code></a> Revert a large part of the wheel removal, to support Python 3.8 (<a href="https://redirect.github.com/pypa/virtualenv/issues/2876">#2876</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/f900bb66c1d5f6725802915b6c7e1a54bb77d81c"><code>f900bb6</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pypa/virtualenv/issues/2875">#2875</a>)</li> <li><a href="https://github.com/pypa/virtualenv/commit/3fa94b7147f92cb56b69de235de5d5adf156ee56"><code>3fa94b7</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2868">#2868</a> from stefanor/no-wheel</li> <li>Additional commits viewable in <a href="https://github.com/pypa/virtualenv/compare/20.30.0...20.31.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.30.0&new-version=20.31.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 542704645da..3a68e5577e0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -283,7 +283,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.30.0 +virtualenv==20.31.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 2211e7101e9..37633e5db9e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -274,7 +274,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.30.0 +virtualenv==20.31.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index aef877972af..3d44d329d5c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -111,7 +111,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.30.0 +virtualenv==20.31.1 # via pre-commit zlib-ng==0.5.1 # via -r requirements/lint.in From b65daf6b8a71056feb8b55ee8bbeda568d09ee13 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 May 2025 11:18:59 +0000 Subject: [PATCH 1386/1511] Bump platformdirs from 4.3.7 to 4.3.8 (#10840) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/tox-dev/platformdirs) from 4.3.7 to 4.3.8. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/tox-dev/platformdirs/releases">platformdirs's releases</a>.</em></p> <blockquote> <h2>4.3.8</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>Add missing examples and fix order of examples in README by <a href="https://github.com/gene1wood"><code>@​gene1wood</code></a> in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/355">tox-dev/platformdirs#355</a></li> </ul> <h2>New Contributors</h2> <ul> <li><a href="https://github.com/gene1wood"><code>@​gene1wood</code></a> made their first contribution in <a href="https://redirect.github.com/tox-dev/platformdirs/pull/355">tox-dev/platformdirs#355</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/tox-dev/platformdirs/compare/4.3.7...4.3.8">https://github.com/tox-dev/platformdirs/compare/4.3.7...4.3.8</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/tox-dev/platformdirs/commit/22089f1e1ff477037cd7b3e03ad38ae2bf251031"><code>22089f1</code></a> Add missing examples and fix order of examples in README (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/355">#355</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/29398b1ef0d602929102b5a3cc4aca25ff954f2a"><code>29398b1</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/354">#354</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/70114b05f1144a451247a465b65caa34d9a88375"><code>70114b0</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/353">#353</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/48916117e8d756a51468d6ead75aa645a6a378c6"><code>4891611</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/351">#351</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/707be6f8dd2784d092f6de6530f6c0d64faee2f3"><code>707be6f</code></a> Bump astral-sh/setup-uv from 5 to 6 in the all group (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/352">#352</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/5519a4fa93059949cf1078fac01cf5dce12c3b50"><code>5519a4f</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/349">#349</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/063275e097e2910d56291b168e5d827237be5cc5"><code>063275e</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/346">#346</a>)</li> <li><a href="https://github.com/tox-dev/platformdirs/commit/39fc38448d9d8a5a5be489115122669a76403012"><code>39fc384</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/tox-dev/platformdirs/issues/345">#345</a>)</li> <li>See full diff in <a href="https://github.com/tox-dev/platformdirs/compare/4.3.7...4.3.8">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.3.7&new-version=4.3.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3a68e5577e0..ea02b7fac7c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -136,7 +136,7 @@ packaging==25.0 # sphinx pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.7 +platformdirs==4.3.8 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/dev.txt b/requirements/dev.txt index 37633e5db9e..fa9b70b829a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -133,7 +133,7 @@ packaging==25.0 # sphinx pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==4.3.7 +platformdirs==4.3.8 # via virtualenv pluggy==1.5.0 # via pytest diff --git a/requirements/lint.txt b/requirements/lint.txt index 3d44d329d5c..856101b364c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -53,7 +53,7 @@ nodeenv==1.9.1 # via pre-commit packaging==25.0 # via pytest -platformdirs==4.3.7 +platformdirs==4.3.8 # via virtualenv pluggy==1.5.0 # via pytest From 3d34912d8c1bf9d4a7a74ba25a2b56b7f24f0431 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 May 2025 11:18:28 +0000 Subject: [PATCH 1387/1511] Bump virtualenv from 20.31.1 to 20.31.2 (#10844) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.31.1 to 20.31.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/releases">virtualenv's releases</a>.</em></p> <blockquote> <h2>20.31.2</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <ul> <li>release 20.31.1 by <a href="https://github.com/gaborbernat"><code>@​gaborbernat</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2882">pypa/virtualenv#2882</a></li> <li>Reintroduce the --wheel CLI option, even though it has no effect on Python > 3.8 by <a href="https://github.com/hroncok"><code>@​hroncok</code></a> in <a href="https://redirect.github.com/pypa/virtualenv/pull/2884">pypa/virtualenv#2884</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pypa/virtualenv/compare/20.31.1...20.31.2">https://github.com/pypa/virtualenv/compare/20.31.1...20.31.2</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/virtualenv/blob/main/docs/changelog.rst">virtualenv's changelog</a>.</em></p> <blockquote> <h2>v20.31.2 (2025-05-08)</h2> <p>No significant changes.</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/virtualenv/commit/91cf77181ce159d8de6250224781f7103e6d6661"><code>91cf771</code></a> release 20.31.2</li> <li><a href="https://github.com/pypa/virtualenv/commit/9c4cd8e1a4744e4dc630b2ee35a506ad91ac4137"><code>9c4cd8e</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2884">#2884</a> from hroncok/wheel-option-back</li> <li><a href="https://github.com/pypa/virtualenv/commit/e9b6ba728f44ffb84ca44a40b58508c3dd5f7b29"><code>e9b6ba7</code></a> Merge pull request <a href="https://redirect.github.com/pypa/virtualenv/issues/2882">#2882</a> from pypa/release-20.31.1</li> <li><a href="https://github.com/pypa/virtualenv/commit/71d6f0a03c36619ca6bc4f37711f8fb8b2894758"><code>71d6f0a</code></a> Update changelog.rst</li> <li>See full diff in <a href="https://github.com/pypa/virtualenv/compare/20.31.1...20.31.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.31.1&new-version=20.31.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ea02b7fac7c..1de93e54a91 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -283,7 +283,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.31.1 +virtualenv==20.31.2 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index fa9b70b829a..45147987469 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -274,7 +274,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.31.1 +virtualenv==20.31.2 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 856101b364c..8b36761a25f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -111,7 +111,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.0 # via -r requirements/lint.in -virtualenv==20.31.1 +virtualenv==20.31.2 # via pre-commit zlib-ng==0.5.1 # via -r requirements/lint.in From 9671d88df758bed627ba14c6b099f05a47010e01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 May 2025 11:27:41 +0000 Subject: [PATCH 1388/1511] Bump snowballstemmer from 2.2.0 to 3.0.0.1 (#10846) Bumps [snowballstemmer](https://github.com/snowballstem/snowball) from 2.2.0 to 3.0.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/snowballstem/snowball/blob/master/NEWS">snowballstemmer's changelog</a>.</em></p> <blockquote> <h1>Snowball 3.0.1 (2025-05-09)</h1> <h2>Python</h2> <ul> <li> <p>The <strong>init</strong>.py in 3.0.0 was incorrectly generated due to a missing build dependency and the list of algorithms was empty. First reported by laymonage. Thanks to Dmitry Shachnev, Henry Schreiner and Adam Turner for diagnosing and fixing. (<a href="https://redirect.github.com/snowballstem/snowball/issues/229">#229</a>, <a href="https://redirect.github.com/snowballstem/snowball/issues/230">#230</a>, <a href="https://redirect.github.com/snowballstem/snowball/issues/231">#231</a>)</p> </li> <li> <p>Add trove classifiers for Armenian and Yiddish which have now been registered with PyPI. Thanks to Henry Schreiner and Dmitry Shachnev. (<a href="https://redirect.github.com/snowballstem/snowball/issues/228">#228</a>)</p> </li> <li> <p>Update documented details of Python 2 support in old versions.</p> </li> </ul> <h1>Snowball 3.0.0 (2025-05-08)</h1> <h2>Ada</h2> <ul> <li> <p>Bug fixes:</p> <ul> <li> <p>Fix invalid Ada code generated for Snowball <code>loop</code> (it was partly Pascal!) None of the stemmers shipped in previous releases triggered this bug, but the Turkish stemmer now does.</p> </li> <li> <p>The Ada runtime was not tracking the current length of the string but instead used the current limit value or some other substitute, which manifested as various incorrect behaviours for code inside of <code>setlimit</code>.</p> </li> <li> <p><code>size</code> was incorrectly returning the difference between the limit and the backwards limit.</p> </li> <li> <p><code>lenof</code> or <code>sizeof</code> on a string variable generated Ada code that didn't even compile.</p> </li> <li> <p>Fix incorrect preconditions on some methods in the runtime.</p> </li> <li> <p>Fix bug in runtime code used by <code>attach</code>, <code>insert</code>, <code><-</code> and string variable assignment when a (sub)string was replaced with a larger string. This bug was triggered by code in the Kraaij-Pohlmann Dutch stemmer implementation (which was previously not enabled by default but is now the standard Dutch stemmer).</p> </li> <li> <p>Fix invalid code generated for <code>insert</code>, <code><-</code> and string variable assignment. This bug was triggered by code in the Kraaij-Pohlmann Dutch stemmer implementation (which was previously not enabled by default but is now the standard Dutch stemmer).</p> </li> </ul> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/snowballstem/snowball/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=snowballstemmer&package-manager=pip&previous-version=2.2.0&new-version=3.0.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1de93e54a91..aa545f6ed8a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -215,7 +215,7 @@ six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in -snowballstemmer==2.2.0 +snowballstemmer==3.0.0.1 # via sphinx sphinx==8.1.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 45147987469..2ad0375dafe 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -209,7 +209,7 @@ six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in -snowballstemmer==2.2.0 +snowballstemmer==3.0.0.1 # via sphinx sphinx==8.1.3 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 5024465497b..f36508ff1d9 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -40,7 +40,7 @@ requests==2.32.3 # via # sphinx # sphinxcontrib-spelling -snowballstemmer==2.2.0 +snowballstemmer==3.0.0.1 # via sphinx sphinx==8.1.3 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index 6c3accba1ae..5e23790fad8 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -36,7 +36,7 @@ pygments==2.19.1 # via sphinx requests==2.32.3 # via sphinx -snowballstemmer==2.2.0 +snowballstemmer==3.0.0.1 # via sphinx sphinx==8.1.3 # via From 3e1251f0cdb15bf66fdaded383ec7e2cb1ca0588 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 May 2025 11:45:46 +0000 Subject: [PATCH 1389/1511] Bump aiodns from 3.3.0 to 3.4.0 (#10845) Bumps [aiodns](https://github.com/saghul/aiodns) from 3.3.0 to 3.4.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiodns/blob/master/ChangeLog">aiodns's changelog</a>.</em></p> <blockquote> <h1>3.4.0</h1> <ul> <li>Added fallback to <code>sock_state_cb</code> if <code>event_thread</code> creation fails (<a href="https://redirect.github.com/saghul/aiodns/issues/151">#151</a>) <ul> <li>Improved reliability on systems with exhausted inotify watches</li> <li>Implemented transparent fallback mechanism to ensure DNS resolution continues to work</li> </ul> </li> <li>Implemented strict typing (<a href="https://redirect.github.com/saghul/aiodns/issues/138">#138</a>) <ul> <li>Added comprehensive type annotations</li> <li>Improved mypy configuration</li> <li>Added py.typed marker file</li> </ul> </li> <li>Updated dependencies <ul> <li>Bumped pycares from 4.7.0 to 4.8.0 (<a href="https://redirect.github.com/saghul/aiodns/issues/149">#149</a>)</li> </ul> </li> <li>Added support for Python 3.13 (<a href="https://redirect.github.com/saghul/aiodns/issues/153">#153</a>) <ul> <li>Updated CI configuration to test with Python 3.13</li> </ul> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiodns/commit/be1f149ee34f025704f6788cf895fb2d00d04760"><code>be1f149</code></a> Release 3.4.0 (<a href="https://redirect.github.com/saghul/aiodns/issues/154">#154</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/796d500447cf7d7eb522ccb4cdfc1beeb519e593"><code>796d500</code></a> Start testing on Python 3.13 with the CI (<a href="https://redirect.github.com/saghul/aiodns/issues/153">#153</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/c856d44bfdf9ec665cda2f27ca3e68b262f728cb"><code>c856d44</code></a> Fallback to <code>sock_state_cb</code> if <code>event_thread</code> creation fails (<a href="https://redirect.github.com/saghul/aiodns/issues/151">#151</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/5c4b29c4847e532cb48976913fe31d0d8a51063a"><code>5c4b29c</code></a> Strict typing (<a href="https://redirect.github.com/saghul/aiodns/issues/138">#138</a>)</li> <li><a href="https://github.com/aio-libs/aiodns/commit/928b8a6affc15d7134c3a0f45e417661f26c7bd7"><code>928b8a6</code></a> Bump pycares from 4.7.0 to 4.8.0 (<a href="https://redirect.github.com/saghul/aiodns/issues/149">#149</a>)</li> <li>See full diff in <a href="https://github.com/saghul/aiodns/compare/v3.3.0...v3.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiodns&package-manager=pip&previous-version=3.3.0&new-version=3.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sam Bull <git@sambull.org> --- aiohttp/resolver.py | 7 ++----- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 8 insertions(+), 11 deletions(-) diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index e14179cc8a2..a5af5fddda6 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,6 +1,6 @@ import asyncio import socket -from typing import Any, Dict, List, Optional, Tuple, Type, Union +from typing import Any, Dict, Final, List, Optional, Tuple, Type, Union from .abc import AbstractResolver, ResolveResult @@ -153,10 +153,7 @@ async def resolve( async def _resolve_with_query( self, host: str, port: int = 0, family: int = socket.AF_INET ) -> List[Dict[str, Any]]: - if family == socket.AF_INET6: - qtype = "AAAA" - else: - qtype = "A" + qtype: Final = "AAAA" if family == socket.AF_INET6 else "A" try: resp = await self._resolver.query(host, qtype) diff --git a/requirements/base.txt b/requirements/base.txt index 7542a9d4bc2..1a0c6fe1046 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.3.0 +aiodns==3.4.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index aa545f6ed8a..ba568e73c18 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # -aiodns==3.3.0 +aiodns==3.4.0 # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 2ad0375dafe..e6ade218def 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # -aiodns==3.3.0 +aiodns==3.4.0 # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 8b36761a25f..97854dddbc5 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in # -aiodns==3.3.0 +aiodns==3.4.0 # via -r requirements/lint.in annotated-types==0.7.0 # via pydantic diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index ca591313650..863d4525cad 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.3.0 +aiodns==3.4.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index ba44a71270b..4949defcef3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in # -aiodns==3.3.0 +aiodns==3.4.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in From 34b8d0da148ece386c861173a24b8636fab556ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 11:21:17 +0000 Subject: [PATCH 1390/1511] Bump snowballstemmer from 3.0.0.1 to 3.0.1 (#10856) Bumps [snowballstemmer](https://github.com/snowballstem/snowball) from 3.0.0.1 to 3.0.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/snowballstem/snowball/blob/master/NEWS">snowballstemmer's changelog</a>.</em></p> <blockquote> <h1>Snowball 3.0.1 (2025-05-09)</h1> <h2>Python</h2> <ul> <li> <p>The <strong>init</strong>.py in 3.0.0 was incorrectly generated due to a missing build dependency and the list of algorithms was empty. First reported by laymonage. Thanks to Dmitry Shachnev, Henry Schreiner and Adam Turner for diagnosing and fixing. (<a href="https://redirect.github.com/snowballstem/snowball/issues/229">#229</a>, <a href="https://redirect.github.com/snowballstem/snowball/issues/230">#230</a>, <a href="https://redirect.github.com/snowballstem/snowball/issues/231">#231</a>)</p> </li> <li> <p>Add trove classifiers for Armenian and Yiddish which have now been registered with PyPI. Thanks to Henry Schreiner and Dmitry Shachnev. (<a href="https://redirect.github.com/snowballstem/snowball/issues/228">#228</a>)</p> </li> <li> <p>Update documented details of Python 2 support in old versions.</p> </li> </ul> <h1>Snowball 3.0.0 (2025-05-08)</h1> <h2>Ada</h2> <ul> <li> <p>Bug fixes:</p> <ul> <li> <p>Fix invalid Ada code generated for Snowball <code>loop</code> (it was partly Pascal!) None of the stemmers shipped in previous releases triggered this bug, but the Turkish stemmer now does.</p> </li> <li> <p>The Ada runtime was not tracking the current length of the string but instead used the current limit value or some other substitute, which manifested as various incorrect behaviours for code inside of <code>setlimit</code>.</p> </li> <li> <p><code>size</code> was incorrectly returning the difference between the limit and the backwards limit.</p> </li> <li> <p><code>lenof</code> or <code>sizeof</code> on a string variable generated Ada code that didn't even compile.</p> </li> <li> <p>Fix incorrect preconditions on some methods in the runtime.</p> </li> <li> <p>Fix bug in runtime code used by <code>attach</code>, <code>insert</code>, <code><-</code> and string variable assignment when a (sub)string was replaced with a larger string. This bug was triggered by code in the Kraaij-Pohlmann Dutch stemmer implementation (which was previously not enabled by default but is now the standard Dutch stemmer).</p> </li> <li> <p>Fix invalid code generated for <code>insert</code>, <code><-</code> and string variable assignment. This bug was triggered by code in the Kraaij-Pohlmann Dutch stemmer implementation (which was previously not enabled by default but is now the standard Dutch stemmer).</p> </li> </ul> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/snowballstem/snowball/commits/v3.0.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=snowballstemmer&package-manager=pip&previous-version=3.0.0.1&new-version=3.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ba568e73c18..4edcb8b982a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -215,7 +215,7 @@ six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in -snowballstemmer==3.0.0.1 +snowballstemmer==3.0.1 # via sphinx sphinx==8.1.3 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index e6ade218def..19f26d4f6e4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -209,7 +209,7 @@ six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in -snowballstemmer==3.0.0.1 +snowballstemmer==3.0.1 # via sphinx sphinx==8.1.3 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index f36508ff1d9..24f8b6852b3 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -40,7 +40,7 @@ requests==2.32.3 # via # sphinx # sphinxcontrib-spelling -snowballstemmer==3.0.0.1 +snowballstemmer==3.0.1 # via sphinx sphinx==8.1.3 # via diff --git a/requirements/doc.txt b/requirements/doc.txt index 5e23790fad8..7c7c8833321 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -36,7 +36,7 @@ pygments==2.19.1 # via sphinx requests==2.32.3 # via sphinx -snowballstemmer==3.0.0.1 +snowballstemmer==3.0.1 # via sphinx sphinx==8.1.3 # via From 772de2edf71103bdda96a7a55f1d13e587be6545 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 11:27:28 +0000 Subject: [PATCH 1391/1511] Bump setuptools from 80.3.1 to 80.4.0 (#10857) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 80.3.1 to 80.4.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.4.0</h1> <h2>Features</h2> <ul> <li>Simplified the error reporting in editable installs. (<a href="https://redirect.github.com/pypa/setuptools/issues/4984">#4984</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/a82f96dc43cbfb9968b100256cb50702becd614e"><code>a82f96d</code></a> Bump version: 80.3.1 → 80.4.0</li> <li><a href="https://github.com/pypa/setuptools/commit/aa4bdf8281e7d4e716abcff7ad2a916e0f05e27b"><code>aa4bdf8</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4985">#4985</a> from pypa/feature/user-focused-editable-installs</li> <li><a href="https://github.com/pypa/setuptools/commit/af2f2baf5b4ee81ed45a003070d68badf3f10b11"><code>af2f2ba</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/bcc23a221e4d811bd37f5fe73d08f4013890cfb0"><code>bcc23a2</code></a> Implement the editable debugging tips as a reference to the docs.</li> <li><a href="https://github.com/pypa/setuptools/commit/aa911c6db2c10c5ac022afaa8d6da1e6c5688524"><code>aa911c6</code></a> By default, provide a much more concise error message.</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v80.3.1...v80.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=80.3.1&new-version=80.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4edcb8b982a..3b028f12035 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.3.1 +setuptools==80.4.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 19f26d4f6e4..4821baa3595 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.3.1 +setuptools==80.4.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 24f8b6852b3..282c9ec50a3 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.3.1 +setuptools==80.4.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 7c7c8833321..265dcbb092e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.3.1 +setuptools==80.4.0 # via incremental From f08a55fdee4eaa098cbcd75a944f44cb06a5a7c3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 May 2025 11:25:58 +0000 Subject: [PATCH 1392/1511] Bump setuptools from 80.4.0 to 80.7.1 (#10863) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 80.4.0 to 80.7.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.7.1</h1> <h2>Bugfixes</h2> <ul> <li>Only attempt to fetch eggs for unsatisfied requirements. (<a href="https://redirect.github.com/pypa/setuptools/issues/4998">#4998</a>)</li> <li>In installer, when discovering egg dists, let metadata discovery search each egg. (<a href="https://redirect.github.com/pypa/setuptools/issues/4998">#4998</a>)</li> </ul> <h1>v80.7.0</h1> <h2>Features</h2> <ul> <li>Removed usage of pkg_resources from installer. Set an official deadline on the installer deprecation to 2025-10-31. (<a href="https://redirect.github.com/pypa/setuptools/issues/4997">#4997</a>)</li> </ul> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4996">#4996</a></li> </ul> <h1>v80.6.0</h1> <h2>Features</h2> <ul> <li>Added a build dependency on coherent.licensed to inject the declared license text at build time. (<a href="https://redirect.github.com/pypa/setuptools/issues/4981">#4981</a>)</li> </ul> <h2>Misc</h2> <ul> <li><a href="https://redirect.github.com/pypa/setuptools/issues/4995">#4995</a></li> </ul> <h1>v80.5.0</h1> <h2>Features</h2> <ul> <li>Replaced more references to pkg_resources with importlib equivalents. (<a href="https://redirect.github.com/pypa/setuptools/issues/3085">#3085</a>)</li> </ul> <p>Misc</p> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/12ca0186ba7d9bf387d65400bb05205d0bcf9e56"><code>12ca018</code></a> Bump version: 80.7.0 → 80.7.1</li> <li><a href="https://github.com/pypa/setuptools/commit/31f8cac5faa7837e15a6716744034dff52ebcb8c"><code>31f8cac</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4999">#4999</a> from pypa/bugfix/4998-directory-not-empty</li> <li><a href="https://github.com/pypa/setuptools/commit/e6f9ee9411566b276e52e828016b9d27007228bc"><code>e6f9ee9</code></a> In installer, when discovering egg dists, let metadata discovery search each ...</li> <li><a href="https://github.com/pypa/setuptools/commit/76d2923db3e60261e7e9f26a37287b27ad3933a1"><code>76d2923</code></a> Only attempt to fetch eggs for unsatisfied requirements.</li> <li><a href="https://github.com/pypa/setuptools/commit/486081e9eff38c6ed19aa24eab8200d9bba4cdce"><code>486081e</code></a> Bump version: 80.6.0 → 80.7.0</li> <li><a href="https://github.com/pypa/setuptools/commit/f2748d1c1c5046814d929cb270724c2b35ee6020"><code>f2748d1</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4997">#4997</a> from pypa/feature/remove-more-pkg_resources</li> <li><a href="https://github.com/pypa/setuptools/commit/1089223a382ca11ba70fcd9f1081ca22cd9997dc"><code>1089223</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/4996">#4996</a> from abravalheri/cleanup-distutils</li> <li><a href="https://github.com/pypa/setuptools/commit/00c16df484fa3ec67223037846eff4a280dccb80"><code>00c16df</code></a> Bump version: 80.5.0 → 80.6.0</li> <li><a href="https://github.com/pypa/setuptools/commit/89d3aef7fa7ff5e68cd5c2c29bc986bbd4afa782"><code>89d3aef</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/b74789e2aa3227e85d61b40708959b35d7f666cc"><code>b74789e</code></a> Merge <a href="https://github.com/jaraco/skeleton">https://github.com/jaraco/skeleton</a></li> <li>Additional commits viewable in <a href="https://github.com/pypa/setuptools/compare/v80.4.0...v80.7.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=80.4.0&new-version=80.7.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3b028f12035..155f431a317 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -299,7 +299,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.4.0 +setuptools==80.7.1 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 4821baa3595..8c5b84e4cdc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -290,7 +290,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.4.0 +setuptools==80.7.1 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 282c9ec50a3..e00e4b52226 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.4.0 +setuptools==80.7.1 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 265dcbb092e..0ee0b84218e 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.4.0 +setuptools==80.7.1 # via incremental From d615013e12eb096e1b464bf485026c3656e1274f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 May 2025 11:21:57 +0000 Subject: [PATCH 1393/1511] Bump pluggy from 1.5.0 to 1.6.0 (#10865) Bumps [pluggy](https://github.com/pytest-dev/pluggy) from 1.5.0 to 1.6.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pytest-dev/pluggy/blob/main/CHANGELOG.rst">pluggy's changelog</a>.</em></p> <blockquote> <h1>pluggy 1.6.0 (2025-05-15)</h1> <h2>Deprecations and Removals</h2> <ul> <li><code>[#556](https://github.com/pytest-dev/pluggy/issues/556) <https://github.com/pytest-dev/pluggy/issues/556></code>_: Python 3.8 is no longer supported.</li> </ul> <h2>Bug Fixes</h2> <ul> <li> <p><code>[#504](https://github.com/pytest-dev/pluggy/issues/504) <https://github.com/pytest-dev/pluggy/issues/504></code>_: Fix a regression in pluggy 1.1.0 where using :func:<code>result.get_result() <pluggy.Result.get_result></code> on the same failed :class:<code>~pluggy.Result</code> causes the exception's traceback to get longer and longer.</p> </li> <li> <p><code>[#544](https://github.com/pytest-dev/pluggy/issues/544) <https://github.com/pytest-dev/pluggy/issues/544></code>_: Correctly pass :class:<code>StopIteration</code> through hook wrappers.</p> <p>Raising a :class:<code>StopIteration</code> in a generator triggers a :class:<code>RuntimeError</code>.</p> <p>If the :class:<code>RuntimeError</code> of a generator has the passed in :class:<code>StopIteration</code> as cause resume with that :class:<code>StopIteration</code> as normal exception instead of failing with the :class:<code>RuntimeError</code>.</p> </li> <li> <p><code>[#573](https://github.com/pytest-dev/pluggy/issues/573) <https://github.com/pytest-dev/pluggy/issues/573></code>_: Fix python 3.14 SyntaxError by rearranging code.</p> </li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pytest-dev/pluggy/commit/fd08ab5f811a9b2fa9124ae8cbbd393221151e2c"><code>fd08ab5</code></a> Preparing release 1.6.0</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/c240362152d6f354a4056bfd6d28560e226fb70b"><code>c240362</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pytest-dev/pluggy/issues/578">#578</a>)</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/0ceb558860b5e228b67adbe8c19a368802e7d324"><code>0ceb558</code></a> Merge pull request <a href="https://redirect.github.com/pytest-dev/pluggy/issues/546">#546</a> from RonnyPfannschmidt/ronny/hookwrapper-wrap-legacy</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/1f4872e8cb2547dacea155a27f94f75e7e8f66ae"><code>1f4872e</code></a> [pre-commit.ci] auto fixes from pre-commit.com hooks</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/4be0c554ea22ca01c7dc30ecccac413a412c1ea7"><code>4be0c55</code></a> add changelog</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/615c6c57c2de62bd9652b59c7a89b3e1fcef08ae"><code>615c6c5</code></a> Merge branch 'main' into hookwrapper-wrap-legacy</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/2acc644f2ead34ac3a154bff2566037480b5c310"><code>2acc644</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pytest-dev/pluggy/issues/577">#577</a>)</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/ea5ada08d36d71bbd1f4e0c8472f8f9a4b210a2d"><code>ea5ada0</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pytest-dev/pluggy/issues/576">#576</a>)</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/dfd250bcfc3d73f72693d4f14cf48bb737ded9f0"><code>dfd250b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pytest-dev/pluggy/issues/575">#575</a>)</li> <li><a href="https://github.com/pytest-dev/pluggy/commit/1e1862fe458b663728b7523bee2c407455836931"><code>1e1862f</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/pytest-dev/pluggy/issues/574">#574</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pytest-dev/pluggy/compare/1.5.0...1.6.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pluggy&package-manager=pip&previous-version=1.5.0&new-version=1.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 155f431a317..88de39cb86f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -138,7 +138,7 @@ pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==4.3.8 # via virtualenv -pluggy==1.5.0 +pluggy==1.6.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 8c5b84e4cdc..7f005a26ef0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -135,7 +135,7 @@ pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==4.3.8 # via virtualenv -pluggy==1.5.0 +pluggy==1.6.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 97854dddbc5..f6ac13607c0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -55,7 +55,7 @@ packaging==25.0 # via pytest platformdirs==4.3.8 # via virtualenv -pluggy==1.5.0 +pluggy==1.6.0 # via pytest pre-commit==4.2.0 # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 4949defcef3..1454e96cd07 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,7 +71,7 @@ packaging==25.0 # via # gunicorn # pytest -pluggy==1.5.0 +pluggy==1.6.0 # via pytest propcache==0.3.1 # via From 34d259af3eb4c78ab907af14fe513a75889de633 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 16 May 2025 19:48:18 +0000 Subject: [PATCH 1394/1511] [PR #10848/97eae194 backport][3.12] Add benchmark requests without session and alternating clients (#10867) Co-authored-by: J. Nick Koston <nick@koston.org> resolver object churn in #10847 --- tests/test_benchmarks_client.py | 61 +++++++++++++++++++++++++++++++-- 1 file changed, 59 insertions(+), 2 deletions(-) diff --git a/tests/test_benchmarks_client.py b/tests/test_benchmarks_client.py index ef2a4d88c92..5e205549e9c 100644 --- a/tests/test_benchmarks_client.py +++ b/tests/test_benchmarks_client.py @@ -4,9 +4,10 @@ import pytest from pytest_codspeed import BenchmarkFixture +from yarl import URL -from aiohttp import hdrs, web -from aiohttp.pytest_plugin import AiohttpClient +from aiohttp import hdrs, request, web +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer def test_one_hundred_simple_get_requests( @@ -34,6 +35,62 @@ def _run() -> None: loop.run_until_complete(run_client_benchmark()) +def test_one_hundred_simple_get_requests_alternating_clients( + loop: asyncio.AbstractEventLoop, + aiohttp_client: AiohttpClient, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple GET requests with alternating clients.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + + async def run_client_benchmark() -> None: + client1 = await aiohttp_client(app) + client2 = await aiohttp_client(app) + for i in range(message_count): + if i % 2 == 0: + await client1.get("/") + else: + await client2.get("/") + await client1.close() + await client2.close() + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + +def test_one_hundred_simple_get_requests_no_session( + loop: asyncio.AbstractEventLoop, + aiohttp_server: AiohttpServer, + benchmark: BenchmarkFixture, +) -> None: + """Benchmark 100 simple GET requests without a session.""" + message_count = 100 + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + app = web.Application() + app.router.add_route("GET", "/", handler) + server = loop.run_until_complete(aiohttp_server(app)) + url = URL(f"http://{server.host}:{server.port}/") + + async def run_client_benchmark() -> None: + for _ in range(message_count): + async with request("GET", url): + pass + + @benchmark + def _run() -> None: + loop.run_until_complete(run_client_benchmark()) + + def test_one_hundred_simple_get_requests_multiple_methods_route( loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient, From 99a2234a294617088882ff5ac7e1226771018dcf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 17 May 2025 23:21:51 -0400 Subject: [PATCH 1395/1511] [PR #10868/323bdcf backport][3.12] Fix unclosed resources in proxy xfail tests (#10870) --- tests/test_proxy_functional.py | 110 +++++++++++++++++++-------------- 1 file changed, 62 insertions(+), 48 deletions(-) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 02d77700d96..d0e20eec6b4 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -5,6 +5,7 @@ import ssl import sys from re import match as match_regex +from typing import Awaitable, Callable from unittest import mock from uuid import uuid4 @@ -13,7 +14,7 @@ from yarl import URL import aiohttp -from aiohttp import web +from aiohttp import ClientResponse, web from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS @@ -498,17 +499,22 @@ async def xtest_proxy_https_connect_with_port(proxy_test_server, get_request): @pytest.mark.xfail -async def xtest_proxy_https_send_body(proxy_test_server, loop): - sess = aiohttp.ClientSession(loop=loop) - proxy = await proxy_test_server() - proxy.return_value = {"status": 200, "body": b"1" * (2**20)} - url = "https://www.google.com.ua/search?q=aiohttp proxy" +async def xtest_proxy_https_send_body( + proxy_test_server: Callable[[], Awaitable[mock.Mock]], + loop: asyncio.AbstractEventLoop, +) -> None: + sess = aiohttp.ClientSession() + try: + proxy = await proxy_test_server() + proxy.return_value = {"status": 200, "body": b"1" * (2**20)} + url = "https://www.google.com.ua/search?q=aiohttp proxy" - async with sess.get(url, proxy=proxy.url) as resp: - body = await resp.read() - await sess.close() + async with sess.get(url, proxy=proxy.url) as resp: + body = await resp.read() - assert body == b"1" * (2**20) + assert body == b"1" * (2**20) + finally: + await sess.close() @pytest.mark.xfail @@ -592,42 +598,46 @@ async def xtest_proxy_https_auth(proxy_test_server, get_request): async def xtest_proxy_https_acquired_cleanup(proxy_test_server, loop): url = "https://secure.aiohttp.io/path" - conn = aiohttp.TCPConnector(loop=loop) - sess = aiohttp.ClientSession(connector=conn, loop=loop) - proxy = await proxy_test_server() - - assert 0 == len(conn._acquired) + conn = aiohttp.TCPConnector() + sess = aiohttp.ClientSession(connector=conn) + try: + proxy = await proxy_test_server() - async def request(): - async with sess.get(url, proxy=proxy.url): - assert 1 == len(conn._acquired) + assert 0 == len(conn._acquired) - await request() + async def request() -> None: + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) - assert 0 == len(conn._acquired) + await request() - await sess.close() + assert 0 == len(conn._acquired) + finally: + await sess.close() + await conn.close() @pytest.mark.xfail async def xtest_proxy_https_acquired_cleanup_force(proxy_test_server, loop): url = "https://secure.aiohttp.io/path" - conn = aiohttp.TCPConnector(force_close=True, loop=loop) - sess = aiohttp.ClientSession(connector=conn, loop=loop) - proxy = await proxy_test_server() - - assert 0 == len(conn._acquired) + conn = aiohttp.TCPConnector(force_close=True) + sess = aiohttp.ClientSession(connector=conn) + try: + proxy = await proxy_test_server() - async def request(): - async with sess.get(url, proxy=proxy.url): - assert 1 == len(conn._acquired) + assert 0 == len(conn._acquired) - await request() + async def request() -> None: + async with sess.get(url, proxy=proxy.url): + assert 1 == len(conn._acquired) - assert 0 == len(conn._acquired) + await request() - await sess.close() + assert 0 == len(conn._acquired) + finally: + await sess.close() + await conn.close() @pytest.mark.xfail @@ -639,26 +649,30 @@ async def xtest_proxy_https_multi_conn_limit(proxy_test_server, loop): sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() - current_pid = None + try: + current_pid = None - async def request(pid): - # process requests only one by one - nonlocal current_pid + async def request(pid: int) -> ClientResponse: + # process requests only one by one + nonlocal current_pid - async with sess.get(url, proxy=proxy.url) as resp: - current_pid = pid - await asyncio.sleep(0.2, loop=loop) - assert current_pid == pid + async with sess.get(url, proxy=proxy.url) as resp: + current_pid = pid + await asyncio.sleep(0.2) + assert current_pid == pid - return resp + return resp - requests = [request(pid) for pid in range(multi_conn_num)] - responses = await asyncio.gather(*requests, loop=loop) + requests = [request(pid) for pid in range(multi_conn_num)] + responses = await asyncio.gather(*requests, return_exceptions=True) - assert len(responses) == multi_conn_num - assert {resp.status for resp in responses} == {200} - - await sess.close() + # Filter out exceptions to count actual responses + actual_responses = [r for r in responses if isinstance(r, ClientResponse)] + assert len(actual_responses) == multi_conn_num + assert {resp.status for resp in actual_responses} == {200} + finally: + await sess.close() + await conn.close() def _patch_ssl_transport(monkeypatch): @@ -809,7 +823,7 @@ async def xtest_proxy_from_env_https(proxy_test_server, get_request, mocker): url = "https://aiohttp.io/path" proxy = await proxy_test_server() mocker.patch.dict(os.environ, {"https_proxy": str(proxy.url)}) - mock.patch("pathlib.Path.is_file", mock_is_file) + mocker.patch("pathlib.Path.is_file", mock_is_file) await get_request(url=url, trust_env=True) From 5f0902b330e73b238b1b2c680c43a4688728eb96 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 18 May 2025 13:46:36 -0400 Subject: [PATCH 1396/1511] [PR #10851/e5d1240 backport][3.12] remove use of deprecated policy API from tests (#10871) Co-authored-by: Kumar Aditya <kumaraditya@python.org> --- CHANGES/10851.bugfix.rst | 1 + CHANGES/10851.contrib.rst | 2 ++ aiohttp/pytest_plugin.py | 34 +++++++++++++++++++--------------- tests/conftest.py | 14 +++++--------- tests/test_connector.py | 2 +- tests/test_loop.py | 6 +++--- tests/test_proxy_functional.py | 2 +- 7 files changed, 32 insertions(+), 29 deletions(-) create mode 100644 CHANGES/10851.bugfix.rst create mode 100644 CHANGES/10851.contrib.rst diff --git a/CHANGES/10851.bugfix.rst b/CHANGES/10851.bugfix.rst new file mode 100644 index 00000000000..9c47cc95905 --- /dev/null +++ b/CHANGES/10851.bugfix.rst @@ -0,0 +1 @@ +Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. diff --git a/CHANGES/10851.contrib.rst b/CHANGES/10851.contrib.rst new file mode 100644 index 00000000000..623f96bc227 --- /dev/null +++ b/CHANGES/10851.contrib.rst @@ -0,0 +1,2 @@ +Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and +make it compatible with Python 3.14. diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 128dc46081d..7d59fe820d6 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -10,7 +10,6 @@ Iterator, Optional, Protocol, - Type, Union, overload, ) @@ -208,9 +207,13 @@ def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] """Run coroutines in an event loop instead of a normal function call.""" fast = pyfuncitem.config.getoption("--aiohttp-fast") if inspect.iscoroutinefunction(pyfuncitem.function): - existing_loop = pyfuncitem.funcargs.get( - "proactor_loop" - ) or pyfuncitem.funcargs.get("loop", None) + existing_loop = ( + pyfuncitem.funcargs.get("proactor_loop") + or pyfuncitem.funcargs.get("selector_loop") + or pyfuncitem.funcargs.get("uvloop_loop") + or pyfuncitem.funcargs.get("loop", None) + ) + with _runtime_warning_context(): with _passthrough_loop_context(existing_loop, fast=fast) as _loop: testargs = { @@ -227,11 +230,11 @@ def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] return loops = metafunc.config.option.aiohttp_loop - avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]] - avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} + avail_factories: dict[str, Callable[[], asyncio.AbstractEventLoop]] + avail_factories = {"pyloop": asyncio.new_event_loop} if uvloop is not None: # pragma: no cover - avail_factories["uvloop"] = uvloop.EventLoopPolicy + avail_factories["uvloop"] = uvloop.new_event_loop if loops == "all": loops = "pyloop,uvloop?" @@ -255,11 +258,13 @@ def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] @pytest.fixture -def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] +def loop( + loop_factory: Callable[[], asyncio.AbstractEventLoop], + fast: bool, + loop_debug: bool, +) -> Iterator[asyncio.AbstractEventLoop]: """Return an instance of the event loop.""" - policy = loop_factory() - asyncio.set_event_loop_policy(policy) - with loop_context(fast=fast) as _loop: + with loop_context(loop_factory, fast=fast) as _loop: if loop_debug: _loop.set_debug(True) # pragma: no cover asyncio.set_event_loop(_loop) @@ -267,11 +272,10 @@ def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] @pytest.fixture -def proactor_loop(): # type: ignore[no-untyped-def] - policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] - asyncio.set_event_loop_policy(policy) +def proactor_loop() -> Iterator[asyncio.AbstractEventLoop]: + factory = asyncio.ProactorEventLoop # type: ignore[attr-defined] - with loop_context(policy.new_event_loop) as _loop: + with loop_context(factory) as _loop: asyncio.set_event_loop(_loop) yield _loop diff --git a/tests/conftest.py b/tests/conftest.py index de7f8316cb0..27cd5cbd6db 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -230,21 +230,17 @@ def _proto_factory(conn_closing_result=None, **kwargs): @pytest.fixture -def selector_loop(): - policy = asyncio.WindowsSelectorEventLoopPolicy() - asyncio.set_event_loop_policy(policy) - - with loop_context(policy.new_event_loop) as _loop: +def selector_loop() -> Iterator[asyncio.AbstractEventLoop]: + factory = asyncio.SelectorEventLoop + with loop_context(factory) as _loop: asyncio.set_event_loop(_loop) yield _loop @pytest.fixture def uvloop_loop() -> Iterator[asyncio.AbstractEventLoop]: - policy = uvloop.EventLoopPolicy() - asyncio.set_event_loop_policy(policy) - - with loop_context(policy.new_event_loop) as _loop: + factory = uvloop.new_event_loop + with loop_context(factory) as _loop: asyncio.set_event_loop(_loop) yield _loop diff --git a/tests/test_connector.py b/tests/test_connector.py index 28a2ae1d1d2..db0514e5f0d 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -107,7 +107,7 @@ def create_mocked_conn(conn_closing_result=None, **kwargs): try: loop = asyncio.get_running_loop() except RuntimeError: - loop = asyncio.get_event_loop_policy().get_event_loop() + loop = asyncio.get_event_loop() proto = mock.Mock(**kwargs) proto.closed = loop.create_future() diff --git a/tests/test_loop.py b/tests/test_loop.py index a973efe4c43..944f17e69f0 100644 --- a/tests/test_loop.py +++ b/tests/test_loop.py @@ -37,8 +37,8 @@ def test_default_loop(self) -> None: self.assertIs(self.loop, asyncio.get_event_loop_policy().get_event_loop()) -def test_default_loop(loop) -> None: - assert asyncio.get_event_loop_policy().get_event_loop() is loop +def test_default_loop(loop: asyncio.AbstractEventLoop) -> None: + assert asyncio.get_event_loop() is loop def test_setup_loop_non_main_thread() -> None: @@ -47,7 +47,7 @@ def test_setup_loop_non_main_thread() -> None: def target() -> None: try: with loop_context() as loop: - assert asyncio.get_event_loop_policy().get_event_loop() is loop + assert asyncio.get_event_loop() is loop loop.run_until_complete(test_subprocess_co(loop)) except Exception as exc: nonlocal child_exc diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index d0e20eec6b4..c6c6ac67c1b 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -204,7 +204,6 @@ async def test_https_proxy_unsupported_tls_in_tls( await asyncio.sleep(0.1) -@pytest.mark.usefixtures("uvloop_loop") @pytest.mark.skipif( platform.system() == "Windows" or sys.implementation.name != "cpython", reason="uvloop is not supported on Windows and non-CPython implementations", @@ -216,6 +215,7 @@ async def test_https_proxy_unsupported_tls_in_tls( async def test_uvloop_secure_https_proxy( client_ssl_ctx: ssl.SSLContext, secure_proxy_url: URL, + uvloop_loop: asyncio.AbstractEventLoop, ) -> None: """Ensure HTTPS sites are accessible through a secure proxy without warning when using uvloop.""" conn = aiohttp.TCPConnector() From 94901a221f374b60dfe2b56e1e5ae9cfe6fe303f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 10:29:12 +0000 Subject: [PATCH 1397/1511] Bump cryptography from 44.0.3 to 45.0.2 (#10873) Bumps [cryptography](https://github.com/pyca/cryptography) from 44.0.3 to 45.0.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst">cryptography's changelog</a>.</em></p> <blockquote> <p>45.0.2 - 2025-05-17</p> <pre><code> * Fixed using ``mypy`` with ``cryptography`` on older versions of Python. <p>.. _v45-0-1:</p> <p>45.0.1 - 2025-05-17<br /> </code></pre></p> <ul> <li>Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.5.0.</li> </ul> <p>.. _v45-0-0:</p> <p>45.0.0 - 2025-05-17 (YANKED)</p> <pre><code> * Support for Python 3.7 is deprecated and will be removed in the next ``cryptography`` release. * Updated the minimum supported Rust version (MSRV) to 1.74.0, from 1.65.0. * Added support for serialization of PKCS#12 Java truststores in :func:`~cryptography.hazmat.primitives.serialization.pkcs12.serialize_java_truststore` * Added :meth:`~cryptography.hazmat.primitives.kdf.argon2.Argon2id.derive_phc_encoded` and :meth:`~cryptography.hazmat.primitives.kdf.argon2.Argon2id.verify_phc_encoded` methods to support password hashing in the PHC string format * Added support for PKCS7 decryption and encryption using AES-256 as the content algorithm, in addition to AES-128. * **BACKWARDS INCOMPATIBLE:** Made SSH private key loading more consistent with other private key loading: :func:`~cryptography.hazmat.primitives.serialization.load_ssh_private_key` now raises a ``TypeError`` if the key is unencrypted but a password is provided (previously no exception was raised), and raises a ``TypeError`` if the key is encrypted but no password is provided (previously a ``ValueError`` was raised). * We significantly refactored how private key loading ( :func:`~cryptography.hazmat.primitives.serialization.load_pem_private_key` and :func:`~cryptography.hazmat.primitives.serialization.load_der_private_key`) works. This is intended to be backwards compatible for all well-formed keys, therefore if you discover a key that now raises an exception, please file a bug with instructions for reproducing. * Added ``unsafe_skip_rsa_key_validation`` keyword-argument to :func:`~cryptography.hazmat.primitives.serialization.load_ssh_private_key`. * Added :class:`~cryptography.hazmat.primitives.hashes.XOFHash` to support repeated :meth:`~cryptography.hazmat.primitives.hashes.XOFHash.squeeze` operations on extendable output functions. * Added :meth:`~cryptography.x509.ocsp.OCSPResponseBuilder.add_response_by_hash` method to allow creating OCSP responses using certificate hash values rather than full certificates. </tr></table> </code></pre> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pyca/cryptography/commit/f81c07535ddf2d26cb1a27e70a9967ab708b8056"><code>f81c075</code></a> Backport mypy fixes for release (<a href="https://redirect.github.com/pyca/cryptography/issues/12930">#12930</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/8ea28e0bc724e57433f4f062795d91c0a367e9ad"><code>8ea28e0</code></a> bump for 45.0.1 (<a href="https://redirect.github.com/pyca/cryptography/issues/12922">#12922</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/67840977c95a23d0dcfba154e303014026df0d3e"><code>6784097</code></a> bump for 45 release (<a href="https://redirect.github.com/pyca/cryptography/issues/12886">#12886</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/2d9c1c9cbe650f2888c14476a4b30ef85d3fd2bc"><code>2d9c1c9</code></a> bump MSRV to 1.74 (<a href="https://redirect.github.com/pyca/cryptography/issues/12919">#12919</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/6c18874cc2e76f173b77c67609cfb4d3495964c3"><code>6c18874</code></a> Bump BoringSSL, OpenSSL, AWS-LC in CI (<a href="https://redirect.github.com/pyca/cryptography/issues/12918">#12918</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/43fd312aea73e4ad79a54c78848e73bf5a640336"><code>43fd312</code></a> add test vectors for upcoming explicit curve loading (<a href="https://redirect.github.com/pyca/cryptography/issues/12913">#12913</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/6bfa0a31256a631a0543e3b0cee5a101f1ac3d3f"><code>6bfa0a3</code></a> chore(deps): bump asn1 from 0.21.2 to 0.21.3 (<a href="https://redirect.github.com/pyca/cryptography/issues/12914">#12914</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/a88dd6635c19ef282e0a6469a9604fef096cc843"><code>a88dd66</code></a> chore(deps): bump cc from 1.2.22 to 1.2.23 (<a href="https://redirect.github.com/pyca/cryptography/issues/12912">#12912</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/e4e98407e866064b55dfcad599117995279cecf6"><code>e4e9840</code></a> chore(deps): bump uv from 0.7.3 to 0.7.4 in /.github/requirements (<a href="https://redirect.github.com/pyca/cryptography/issues/12911">#12911</a>)</li> <li><a href="https://github.com/pyca/cryptography/commit/e140233902db423f031d79237533aacfe96ba119"><code>e140233</code></a> chore(deps): bump uv from 0.7.3 to 0.7.4 (<a href="https://redirect.github.com/pyca/cryptography/issues/12910">#12910</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pyca/cryptography/compare/44.0.3...45.0.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=44.0.3&new-version=45.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 88de39cb86f..b8d832fa429 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -58,7 +58,7 @@ coverage==7.8.0 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.3 +cryptography==45.0.2 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 7f005a26ef0..aa5b83d6cec 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -58,7 +58,7 @@ coverage==7.8.0 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.3 +cryptography==45.0.2 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index f6ac13607c0..fcf5f2d0235 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -21,7 +21,7 @@ cfgv==3.4.0 # via pre-commit click==8.1.8 # via slotscheck -cryptography==44.0.3 +cryptography==45.0.2 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index 1454e96cd07..bd64852cd18 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -31,7 +31,7 @@ coverage==7.8.0 # via # -r requirements/test.in # pytest-cov -cryptography==44.0.3 +cryptography==45.0.2 # via trustme exceptiongroup==1.2.2 # via pytest From cfe426951bfc734fe6c923072b52e6ecbe5c9e9f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 10:43:00 +0000 Subject: [PATCH 1398/1511] Bump exceptiongroup from 1.2.2 to 1.3.0 (#10859) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [exceptiongroup](https://github.com/agronholm/exceptiongroup) from 1.2.2 to 1.3.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/agronholm/exceptiongroup/releases">exceptiongroup's releases</a>.</em></p> <blockquote> <h2>1.3.0</h2> <ul> <li>Added <code>**kwargs</code> to function and method signatures as appropriate to match the signatures in the standard library</li> <li>In line with the stdlib typings in typeshed, updated <code>(Base)ExceptionGroup</code> generic types to define defaults for their generic arguments (defaulting to <code>BaseExceptionGroup[BaseException]</code> and <code>ExceptionGroup[Exception]</code>) (PR by <a href="https://github.com/mikenerone"><code>@​mikenerone</code></a>)</li> <li>Changed <code>BaseExceptionGroup.__init__()</code> to directly call <code>BaseException.__init__()</code> instead of the superclass <code>__init__()</code> in order to emulate the CPython behavior (broken or not) (PR by <a href="https://github.com/cfbolz"><code>@​cfbolz</code></a>)</li> <li>Changed the <code>exceptions</code> attribute to always return the same tuple of exceptions, created from the original exceptions sequence passed to <code>BaseExceptionGroup</code> to match CPython behavior (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/143">#143</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/agronholm/exceptiongroup/blob/main/CHANGES.rst">exceptiongroup's changelog</a>.</em></p> <blockquote> <h1>Version history</h1> <p>This library adheres to <code>Semantic Versioning 2.0 <http://semver.org/></code>_.</p> <p><strong>1.3.0</strong></p> <ul> <li>Added <code>**kwargs</code> to function and method signatures as appropriate to match the signatures in the standard library</li> <li>In line with the stdlib typings in typeshed, updated <code>(Base)ExceptionGroup</code> generic types to define defaults for their generic arguments (defaulting to <code>BaseExceptionGroup[BaseException]</code> and <code>ExceptionGroup[Exception]</code>) (PR by <a href="https://github.com/mikenerone"><code>@​mikenerone</code></a>)</li> <li>Changed <code>BaseExceptionGroup.__init__()</code> to directly call <code>BaseException.__init__()</code> instead of the superclass <code>__init__()</code> in order to emulate the CPython behavior (broken or not) (PR by <a href="https://github.com/cfbolz"><code>@​cfbolz</code></a>)</li> <li>Changed the <code>exceptions</code> attribute to always return the same tuple of exceptions, created from the original exceptions sequence passed to <code>BaseExceptionGroup</code> to match CPython behavior (<code>[#143](https://github.com/agronholm/exceptiongroup/issues/143) <https://github.com/agronholm/exceptiongroup/issues/143></code>_)</li> </ul> <p><strong>1.2.2</strong></p> <ul> <li>Removed an <code>assert</code> in <code>exceptiongroup._formatting</code> that caused compatibility issues with Sentry (<code>[#123](https://github.com/agronholm/exceptiongroup/issues/123) <https://github.com/agronholm/exceptiongroup/issues/123></code>_)</li> </ul> <p><strong>1.2.1</strong></p> <ul> <li>Updated the copying of <code>__notes__</code> to match CPython behavior (PR by CF Bolz-Tereick)</li> <li>Corrected the type annotation of the exception handler callback to accept a <code>BaseExceptionGroup</code> instead of <code>BaseException</code></li> <li>Fixed type errors on Python < 3.10 and the type annotation of <code>suppress()</code> (PR by John Litborn)</li> </ul> <p><strong>1.2.0</strong></p> <ul> <li>Added special monkeypatching if <code>Apport <https://github.com/canonical/apport></code>_ has overridden <code>sys.excepthook</code> so it will format exception groups correctly (PR by John Litborn)</li> <li>Added a backport of <code>contextlib.suppress()</code> from Python 3.12.1 which also handles suppressing exceptions inside exception groups</li> <li>Fixed bare <code>raise</code> in a handler reraising the original naked exception rather than an exception group which is what is raised when you do a <code>raise</code> in an <code>except*</code> handler</li> </ul> <p><strong>1.1.3</strong></p> <ul> <li><code>catch()</code> now raises a <code>TypeError</code> if passed an async exception handler instead of just giving a <code>RuntimeWarning</code> about the coroutine never being awaited. (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/66">#66</a>, PR by John Litborn)</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/agronholm/exceptiongroup/commit/77fba8a871408ff2c48f536e5e73b1918239ba5f"><code>77fba8a</code></a> Added the release version</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/5e153aa379ac53af79cc7f5e287f77929cb4d0dc"><code>5e153aa</code></a> Revert "Migrated test dependencies to dependency groups"</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/5000bfea208ad59e3a20e2fb91a513ad559711b1"><code>5000bfe</code></a> Migrated tox configuration to native TOML</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/427220d67a52585e98575103b090b5fdaf87a899"><code>427220d</code></a> Updated pytest options</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/4ca264fa3605d52067c20b351a0d3b947fa1f363"><code>4ca264f</code></a> Migrated test dependencies to dependency groups</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/163c3a8cb27f8a5325258b5a83e7cf8fc002c3b7"><code>163c3a8</code></a> Marked test_exceptions_mutate_original_sequence as xfail on pypy3.11</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/a1765740db2d55d1eb91d67a8fbbb355caf7881b"><code>a176574</code></a> Always create the exceptions tuple at init and return it from the exceptions ...</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/550b79621cc35892413fa91903a1d6c7951d0449"><code>550b796</code></a> Added BaseExceptionGroup.<strong>init</strong>, following CPython (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/142">#142</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/2a84dfd5599bca0c653143f0f4252d38afac9867"><code>2a84dfd</code></a> Added typevar defaults to (Base)ExceptionGroup (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/147">#147</a>)</li> <li><a href="https://github.com/agronholm/exceptiongroup/commit/fb9133b495fc82bc2907e8cfbdff6c6dc3087e2f"><code>fb9133b</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/agronholm/exceptiongroup/issues/145">#145</a>)</li> <li>Additional commits viewable in <a href="https://github.com/agronholm/exceptiongroup/compare/1.2.2...1.3.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=exceptiongroup&package-manager=pip&previous-version=1.2.2&new-version=1.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 3 ++- requirements/dev.txt | 3 ++- requirements/lint.txt | 3 ++- requirements/test.txt | 3 ++- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b8d832fa429..b70023e65d8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -68,7 +68,7 @@ distlib==0.3.9 # via virtualenv docutils==0.21.2 # via sphinx -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest execnet==2.1.1 # via pytest-xdist @@ -264,6 +264,7 @@ trustme==1.2.1 ; platform_machine != "i686" # -r requirements/test.in typing-extensions==4.13.2 # via + # exceptiongroup # multidict # mypy # pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index aa5b83d6cec..ce52430fbee 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ distlib==0.3.9 # via virtualenv docutils==0.21.2 # via sphinx -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest execnet==2.1.1 # via pytest-xdist @@ -255,6 +255,7 @@ trustme==1.2.1 ; platform_machine != "i686" # -r requirements/test.in typing-extensions==4.13.2 # via + # exceptiongroup # multidict # mypy # pydantic diff --git a/requirements/lint.txt b/requirements/lint.txt index fcf5f2d0235..28aa349a511 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -25,7 +25,7 @@ cryptography==45.0.2 # via trustme distlib==0.3.9 # via virtualenv -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest filelock==3.18.0 # via virtualenv @@ -99,6 +99,7 @@ trustme==1.2.1 # via -r requirements/lint.in typing-extensions==4.13.2 # via + # exceptiongroup # mypy # pydantic # pydantic-core diff --git a/requirements/test.txt b/requirements/test.txt index bd64852cd18..5b3444b3cc4 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -33,7 +33,7 @@ coverage==7.8.0 # pytest-cov cryptography==45.0.2 # via trustme -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest execnet==2.1.1 # via pytest-xdist @@ -127,6 +127,7 @@ trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test.in typing-extensions==4.13.2 # via + # exceptiongroup # multidict # mypy # pydantic From 6ea542ef4b78174b2b9c39146fc6f72abeaba2ce Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 May 2025 09:02:22 -0400 Subject: [PATCH 1399/1511] [3.12] Updates for Cython 3.1.1 (#10877) closes #10849 --- CHANGES/10877.packaging.rst | 1 + aiohttp/_websocket/reader_py.py | 2 +- requirements/constraints.txt | 2 +- requirements/cython.in | 2 +- requirements/cython.txt | 2 +- 5 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/10877.packaging.rst diff --git a/CHANGES/10877.packaging.rst b/CHANGES/10877.packaging.rst new file mode 100644 index 00000000000..0bc2ee03984 --- /dev/null +++ b/CHANGES/10877.packaging.rst @@ -0,0 +1 @@ +Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 855f9c6d600..f966a1593c5 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -79,7 +79,7 @@ def exception(self) -> Optional[BaseException]: def set_exception( self, - exc: "BaseException", + exc: BaseException, exc_cause: builtins.BaseException = _EXC_SENTINEL, ) -> None: self._eof = True diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b70023e65d8..9a53aaaea12 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==45.0.2 # via # pyjwt # trustme -cython==3.0.12 +cython==3.1.1 # via -r requirements/cython.in distlib==0.3.9 # via virtualenv diff --git a/requirements/cython.in b/requirements/cython.in index 6f0238f170d..6b848f6df9e 100644 --- a/requirements/cython.in +++ b/requirements/cython.in @@ -1,3 +1,3 @@ -r multidict.in -Cython +Cython >= 3.1.1 diff --git a/requirements/cython.txt b/requirements/cython.txt index 8686651881b..1dd3cc00fc4 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.12 +cython==3.1.1 # via -r requirements/cython.in multidict==6.4.3 # via -r requirements/multidict.in From 5044d537abe1ffff2f731631e4869ba025258b79 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 19 May 2025 11:41:30 -0400 Subject: [PATCH 1400/1511] [PR #9732/1e911ea backport][3.12] Add Client Middleware Support (#10879) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES/9732.feature.rst | 6 + aiohttp/__init__.py | 4 + aiohttp/client.py | 81 ++- aiohttp/client_middlewares.py | 58 ++ aiohttp/client_reqrep.py | 18 +- docs/client_advanced.rst | 212 ++++++ docs/client_reference.rst | 16 + tests/test_client_functional.py | 29 +- tests/test_client_middleware.py | 1116 +++++++++++++++++++++++++++++++ 9 files changed, 1512 insertions(+), 28 deletions(-) create mode 100644 CHANGES/9732.feature.rst create mode 100644 aiohttp/client_middlewares.py create mode 100644 tests/test_client_middleware.py diff --git a/CHANGES/9732.feature.rst b/CHANGES/9732.feature.rst new file mode 100644 index 00000000000..bf6dd8ebde3 --- /dev/null +++ b/CHANGES/9732.feature.rst @@ -0,0 +1,6 @@ +Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + +This change allows users to add middleware to the client session and requests, enabling features like +authentication, logging, and request/response modification without modifying the core +request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, +allowing middleware to access the session for making additional requests. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6321e713ed4..d18bab60d2e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -47,6 +47,7 @@ WSServerHandshakeError, request, ) +from .client_middlewares import ClientHandlerType, ClientMiddlewareType from .compression_utils import set_zlib_backend from .connector import ( AddrInfoType as AddrInfoType, @@ -175,6 +176,9 @@ "NamedPipeConnector", "WSServerHandshakeError", "request", + # client_middleware + "ClientMiddlewareType", + "ClientHandlerType", # cookiejar "CookieJar", "DummyCookieJar", diff --git a/aiohttp/client.py b/aiohttp/client.py index 8ba5e282e2c..2b7afe1344c 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -70,6 +70,7 @@ WSMessageTypeError, WSServerHandshakeError, ) +from .client_middlewares import ClientMiddlewareType, build_client_middlewares from .client_reqrep import ( ClientRequest as ClientRequest, ClientResponse as ClientResponse, @@ -191,6 +192,7 @@ class _RequestOptions(TypedDict, total=False): auto_decompress: Union[bool, None] max_line_size: Union[int, None] max_field_size: Union[int, None] + middlewares: Optional[Tuple[ClientMiddlewareType, ...]] @attr.s(auto_attribs=True, frozen=True, slots=True) @@ -258,6 +260,7 @@ class ClientSession: "_default_proxy", "_default_proxy_auth", "_retry_connection", + "_middlewares", "requote_redirect_url", ] ) @@ -298,6 +301,7 @@ def __init__( max_line_size: int = 8190, max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", + middlewares: Optional[Tuple[ClientMiddlewareType, ...]] = None, ) -> None: # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. @@ -410,6 +414,7 @@ def __init__( self._default_proxy = proxy self._default_proxy_auth = proxy_auth self._retry_connection: bool = True + self._middlewares = middlewares def __init_subclass__(cls: Type["ClientSession"]) -> None: warnings.warn( @@ -500,6 +505,7 @@ async def _request( auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, max_field_size: Optional[int] = None, + middlewares: Optional[Tuple[ClientMiddlewareType, ...]] = None, ) -> ClientResponse: # NOTE: timeout clamps existing connect and read timeouts. We cannot @@ -699,32 +705,33 @@ async def _request( trust_env=self.trust_env, ) - # connection timeout - try: - conn = await self._connector.connect( - req, traces=traces, timeout=real_timeout + # Core request handler - now includes connection logic + async def _connect_and_send_request( + req: ClientRequest, + ) -> ClientResponse: + # connection timeout + assert self._connector is not None + try: + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ConnectionTimeoutError( + f"Connection timeout to host {req.url}" + ) from exc + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=req.method in EMPTY_BODY_METHODS, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + read_timeout=real_timeout.sock_read, + read_bufsize=read_bufsize, + timeout_ceil_threshold=self._connector._timeout_ceil_threshold, + max_line_size=max_line_size, + max_field_size=max_field_size, ) - except asyncio.TimeoutError as exc: - raise ConnectionTimeoutError( - f"Connection timeout to host {url}" - ) from exc - - assert conn.transport is not None - - assert conn.protocol is not None - conn.protocol.set_response_params( - timer=timer, - skip_payload=method in EMPTY_BODY_METHODS, - read_until_eof=read_until_eof, - auto_decompress=auto_decompress, - read_timeout=real_timeout.sock_read, - read_bufsize=read_bufsize, - timeout_ceil_threshold=self._connector._timeout_ceil_threshold, - max_line_size=max_line_size, - max_field_size=max_field_size, - ) - - try: try: resp = await req.send(conn) try: @@ -735,6 +742,30 @@ async def _request( except BaseException: conn.close() raise + return resp + + # Apply middleware (if any) - per-request middleware overrides session middleware + effective_middlewares = ( + self._middlewares if middlewares is None else middlewares + ) + + if effective_middlewares: + handler = build_client_middlewares( + _connect_and_send_request, effective_middlewares + ) + else: + handler = _connect_and_send_request + + try: + resp = await handler(req) + # Client connector errors should not be retried + except ( + ConnectionTimeoutError, + ClientConnectorError, + ClientConnectorCertificateError, + ClientConnectorSSLError, + ): + raise except (ClientOSError, ServerDisconnectedError): if retry_persistent_connection: retry_persistent_connection = False diff --git a/aiohttp/client_middlewares.py b/aiohttp/client_middlewares.py new file mode 100644 index 00000000000..6be353c3a40 --- /dev/null +++ b/aiohttp/client_middlewares.py @@ -0,0 +1,58 @@ +"""Client middleware support.""" + +from collections.abc import Awaitable, Callable + +from .client_reqrep import ClientRequest, ClientResponse + +__all__ = ("ClientMiddlewareType", "ClientHandlerType", "build_client_middlewares") + +# Type alias for client request handlers - functions that process requests and return responses +ClientHandlerType = Callable[[ClientRequest], Awaitable[ClientResponse]] + +# Type for client middleware - similar to server but uses ClientRequest/ClientResponse +ClientMiddlewareType = Callable[ + [ClientRequest, ClientHandlerType], Awaitable[ClientResponse] +] + + +def build_client_middlewares( + handler: ClientHandlerType, + middlewares: tuple[ClientMiddlewareType, ...], +) -> ClientHandlerType: + """ + Apply middlewares to request handler. + + The middlewares are applied in reverse order, so the first middleware + in the list wraps all subsequent middlewares and the handler. + + This implementation avoids using partial/update_wrapper to minimize overhead + and doesn't cache to avoid holding references to stateful middleware. + """ + if not middlewares: + return handler + + # Optimize for single middleware case + if len(middlewares) == 1: + middleware = middlewares[0] + + async def single_middleware_handler(req: ClientRequest) -> ClientResponse: + return await middleware(req, handler) + + return single_middleware_handler + + # Build the chain for multiple middlewares + current_handler = handler + + for middleware in reversed(middlewares): + # Create a new closure that captures the current state + def make_wrapper( + mw: ClientMiddlewareType, next_h: ClientHandlerType + ) -> ClientHandlerType: + async def wrapped(req: ClientRequest) -> ClientResponse: + return await mw(req, next_h) + + return wrapped + + current_handler = make_wrapper(middleware, current_handler) + + return current_handler diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 43b48063c6e..ef0dd42b969 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -272,7 +272,13 @@ class ClientRequest: auth = None response = None - __writer = None # async task for streaming data + __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data + + # These class defaults help create_autospec() work correctly. + # If autospec is improved in future, maybe these can be removed. + url = URL() + method = "GET" + _continue = None # waiter future for '100 Continue' response _skip_auto_headers: Optional["CIMultiDict[None]"] = None @@ -427,6 +433,16 @@ def request_info(self) -> RequestInfo: RequestInfo, (self.url, self.method, headers, self.original_url) ) + @property + def session(self) -> "ClientSession": + """Return the ClientSession instance. + + This property provides access to the ClientSession that initiated + this request, allowing middleware to make additional requests + using the same session. + """ + return self._session + def update_host(self, url: URL) -> None: """Update destination host, port and connection type (ssl).""" # get host/port diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 39cd259dc9e..8795b3d164a 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -98,6 +98,218 @@ background. ``Authorization`` header will be removed if you get redirected to a different host or protocol. +.. _aiohttp-client-middleware: + +Client Middleware +----------------- + +aiohttp client supports middleware to intercept requests and responses. This can be +useful for authentication, logging, request/response modification, and retries. + +To create a middleware, you need to define an async function that accepts the request +and a handler function, and returns the response. The middleware must match the +:type:`ClientMiddlewareType` type signature:: + + import logging + from aiohttp import ClientSession, ClientRequest, ClientResponse, ClientHandlerType + + _LOGGER = logging.getLogger(__name__) + + async def my_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + # Process request before sending + _LOGGER.debug(f"Request: {request.method} {request.url}") + + # Call the next handler + response = await handler(request) + + # Process response after receiving + _LOGGER.debug(f"Response: {response.status}") + + return response + +You can apply middleware to a client session or to individual requests:: + + # Apply to all requests in a session + async with ClientSession(middlewares=(my_middleware,)) as session: + resp = await session.get('http://example.com') + + # Apply to a specific request + async with ClientSession() as session: + resp = await session.get('http://example.com', middlewares=(my_middleware,)) + +Middleware Examples +^^^^^^^^^^^^^^^^^^^ + +Here's a simple example showing request modification:: + + async def add_api_key_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + # Add API key to all requests + request.headers['X-API-Key'] = 'my-secret-key' + return await handler(request) + +.. _client-middleware-retry: + +Middleware Retry Pattern +^^^^^^^^^^^^^^^^^^^^^^^^ + +Client middleware can implement retry logic internally using a ``while`` loop. This allows the middleware to: + +- Retry requests based on response status codes or other conditions +- Modify the request between retries (e.g., refreshing tokens) +- Maintain state across retry attempts +- Control when to stop retrying and return the response + +This pattern is particularly useful for: + +- Refreshing authentication tokens after a 401 response +- Switching to fallback servers or authentication methods +- Adding or modifying headers based on error responses +- Implementing back-off strategies with increasing delays + +The middleware can maintain state between retries to track which strategies have been tried and modify the request accordingly for the next attempt. + +Example: Retrying requests with middleware +"""""""""""""""""""""""""""""""""""""""""" + +:: + + import logging + import aiohttp + + _LOGGER = logging.getLogger(__name__) + + class RetryMiddleware: + def __init__(self, max_retries: int = 3): + self.max_retries = max_retries + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + retry_count = 0 + use_fallback_auth = False + + while True: + # Modify request based on retry state + if use_fallback_auth: + request.headers['Authorization'] = 'Bearer fallback-token' + + response = await handler(request) + + # Retry on 401 errors with different authentication + if response.status == 401 and retry_count < self.max_retries: + retry_count += 1 + use_fallback_auth = True + _LOGGER.debug(f"Retrying with fallback auth (attempt {retry_count})") + continue + + # Retry on 5xx errors + if response.status >= 500 and retry_count < self.max_retries: + retry_count += 1 + _LOGGER.debug(f"Retrying request (attempt {retry_count})") + continue + + return response + +Middleware Chaining +^^^^^^^^^^^^^^^^^^^ + +Multiple middlewares are applied in the order they are listed:: + + import logging + + _LOGGER = logging.getLogger(__name__) + + async def logging_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + _LOGGER.debug(f"[LOG] {request.method} {request.url}") + return await handler(request) + + async def auth_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + request.headers['Authorization'] = 'Bearer token123' + return await handler(request) + + # Middlewares are applied in order: logging -> auth -> request + async with ClientSession(middlewares=(logging_middleware, auth_middleware)) as session: + resp = await session.get('http://example.com') + +.. note:: + + Client middleware is a powerful feature but should be used judiciously. + Each middleware adds overhead to request processing. For simple use cases + like adding static headers, you can often use request parameters + (e.g., ``headers``) or session configuration instead. + +.. warning:: + + Using the same session from within middleware can cause infinite recursion if + the middleware makes HTTP requests using the same session that has the middleware + applied. + + To avoid recursion, use one of these approaches: + + **Recommended:** Pass ``middlewares=()`` to requests made inside the middleware to + disable middleware for those specific requests:: + + async def log_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + async with request.session.post( + "https://logapi.example/log", + json={"url": str(request.url)}, + middlewares=() # This prevents infinite recursion + ) as resp: + pass + + return await handler(request) + + **Alternative:** Check the request contents (URL, path, host) to avoid applying + middleware to certain requests:: + + async def log_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + if request.url.host != "logapi.example": # Avoid infinite recursion + async with request.session.post( + "https://logapi.example/log", + json={"url": str(request.url)} + ) as resp: + pass + + return await handler(request) + +Middleware Type +^^^^^^^^^^^^^^^ + +.. type:: ClientMiddlewareType + + Type alias for client middleware functions. Middleware functions must have this signature:: + + Callable[ + [ClientRequest, ClientHandlerType], + Awaitable[ClientResponse] + ] + +.. type:: ClientHandlerType + + Type alias for client request handler functions:: + + Callable[ClientRequest, Awaitable[ClientResponse]] + Custom Cookies -------------- diff --git a/docs/client_reference.rst b/docs/client_reference.rst index aa664b24ff4..039419ba965 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -53,6 +53,7 @@ The client session supports the context manager protocol for self closing. trust_env=False, \ requote_redirect_url=True, \ trace_configs=None, \ + middlewares=None, \ read_bufsize=2**16, \ max_line_size=8190, \ max_field_size=8190, \ @@ -229,6 +230,13 @@ The client session supports the context manager protocol for self closing. disabling. See :ref:`aiohttp-client-tracing-reference` for more information. + :param middlewares: A tuple of middleware instances to apply to all session requests. + Each middleware must match the :type:`ClientMiddlewareType` signature. + ``None`` (default) is used when no middleware is needed. + See :ref:`aiohttp-client-middleware` for more information. + + .. versionadded:: 3.12 + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). 64 KiB by default. @@ -387,6 +395,7 @@ The client session supports the context manager protocol for self closing. server_hostname=None, \ proxy_headers=None, \ trace_request_ctx=None, \ + middlewares=None, \ read_bufsize=None, \ auto_decompress=None, \ max_line_size=None, \ @@ -535,6 +544,13 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 + :param middlewares: A tuple of middleware instances to apply to this request only. + Each middleware must match the :type:`ClientMiddlewareType` signature. + ``None`` by default which uses session middlewares. + See :ref:`aiohttp-client-middleware` for more information. + + .. versionadded:: 3.12 + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). ``None`` by default, it means that the session global value is used. diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 0ea3ce1619a..1154c7e5805 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,11 +12,12 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Awaitable, Callable, List, Type +from typing import Any, AsyncIterator, Awaitable, Callable, List, NoReturn, Type from unittest import mock import pytest from multidict import MultiDict +from pytest_mock import MockerFixture from yarl import URL import aiohttp @@ -1065,7 +1066,31 @@ async def handler(request): assert resp.status == 200 -async def test_readline_error_on_conn_close(aiohttp_client) -> None: +async def test_connection_timeout_error( + aiohttp_client: AiohttpClient, mocker: MockerFixture +) -> None: + """Test that ConnectionTimeoutError is raised when connection times out.""" + + async def handler(request: web.Request) -> NoReturn: + assert False, "Handler should not be called" + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + # Mock the connector's connect method to raise asyncio.TimeoutError + mock_connect = mocker.patch.object( + client.session._connector, "connect", side_effect=asyncio.TimeoutError() + ) + + with pytest.raises(aiohttp.ConnectionTimeoutError) as exc_info: + await client.get("/", timeout=aiohttp.ClientTimeout(connect=0.01)) + + assert "Connection timeout to host" in str(exc_info.value) + mock_connect.assert_called_once() + + +async def test_readline_error_on_conn_close(aiohttp_client: AiohttpClient) -> None: loop = asyncio.get_event_loop() async def handler(request): diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py new file mode 100644 index 00000000000..7effa31c9f0 --- /dev/null +++ b/tests/test_client_middleware.py @@ -0,0 +1,1116 @@ +"""Tests for client middleware.""" + +import json +import socket +from typing import Dict, List, NoReturn, Optional, Union + +import pytest + +from aiohttp import ( + ClientError, + ClientHandlerType, + ClientRequest, + ClientResponse, + ClientSession, + ClientTimeout, + TCPConnector, + web, +) +from aiohttp.abc import ResolveResult +from aiohttp.client_middlewares import build_client_middlewares +from aiohttp.client_proto import ResponseHandler +from aiohttp.pytest_plugin import AiohttpServer +from aiohttp.resolver import ThreadedResolver +from aiohttp.tracing import Trace + + +class BlockedByMiddleware(ClientError): + """Custom exception for when middleware blocks a request.""" + + +async def test_client_middleware_called(aiohttp_server: AiohttpServer) -> None: + """Test that client middleware is called.""" + middleware_called = False + request_count = 0 + + async def handler(request: web.Request) -> web.Response: + nonlocal request_count + request_count += 1 + return web.Response(text=f"OK {request_count}") + + async def test_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + nonlocal middleware_called + middleware_called = True + response = await handler(request) + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(test_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK 1" + + assert middleware_called is True + assert request_count == 1 + + +async def test_client_middleware_retry(aiohttp_server: AiohttpServer) -> None: + """Test that middleware can trigger retries.""" + request_count = 0 + + async def handler(request: web.Request) -> web.Response: + nonlocal request_count + request_count += 1 + if request_count == 1: + return web.Response(status=503) + return web.Response(text=f"OK {request_count}") + + async def retry_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + retry_count = 0 + while True: + response = await handler(request) + if response.status == 503 and retry_count < 1: + retry_count += 1 + continue + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(retry_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK 2" + + assert request_count == 2 + + +async def test_client_middleware_per_request(aiohttp_server: AiohttpServer) -> None: + """Test that middleware can be specified per request.""" + session_middleware_called = False + request_middleware_called = False + + async def handler(request: web.Request) -> web.Response: + return web.Response(text="OK") + + async def session_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + nonlocal session_middleware_called + session_middleware_called = True + response = await handler(request) + return response + + async def request_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + nonlocal request_middleware_called + request_middleware_called = True + response = await handler(request) + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Request with session middleware + async with ClientSession(middlewares=(session_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + + assert session_middleware_called is True + assert request_middleware_called is False + + # Reset flags + session_middleware_called = False + + # Request with override middleware + async with ClientSession(middlewares=(session_middleware,)) as session: + async with session.get( + server.make_url("/"), middlewares=(request_middleware,) + ) as resp: + assert resp.status == 200 + + assert session_middleware_called is False + assert request_middleware_called is True + + +async def test_multiple_client_middlewares(aiohttp_server: AiohttpServer) -> None: + """Test that multiple middlewares are executed in order.""" + calls: list[str] = [] + + async def handler(request: web.Request) -> web.Response: + return web.Response(text="OK") + + async def middleware1( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + calls.append("before1") + response = await handler(request) + calls.append("after1") + return response + + async def middleware2( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + calls.append("before2") + response = await handler(request) + calls.append("after2") + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(middleware1, middleware2)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + + # Middlewares are applied in reverse order (like server middlewares) + # So middleware1 wraps middleware2 + assert calls == ["before1", "before2", "after2", "after1"] + + +async def test_client_middleware_auth_example(aiohttp_server: AiohttpServer) -> None: + """Test an authentication middleware example.""" + + async def handler(request: web.Request) -> web.Response: + auth_header = request.headers.get("Authorization") + if auth_header == "Bearer valid-token": + return web.Response(text="Authenticated") + return web.Response(status=401, text="Unauthorized") + + async def auth_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Add authentication header before request + request.headers["Authorization"] = "Bearer valid-token" + response = await handler(request) + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Without middleware - should fail + async with ClientSession() as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 401 + + # With middleware - should succeed + async with ClientSession(middlewares=(auth_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Authenticated" + + +async def test_client_middleware_challenge_auth(aiohttp_server: AiohttpServer) -> None: + """Test authentication middleware with challenge/response pattern like digest auth.""" + request_count = 0 + challenge_token = "challenge-123" + + async def handler(request: web.Request) -> web.Response: + nonlocal request_count + request_count += 1 + + auth_header = request.headers.get("Authorization") + + # First request - no auth header, return challenge + if request_count == 1 and not auth_header: + return web.Response( + status=401, + headers={ + "WWW-Authenticate": f'Custom realm="test", nonce="{challenge_token}"' + }, + ) + + # Subsequent requests - check for correct auth with challenge + if auth_header == f'Custom response="{challenge_token}-secret"': + return web.Response(text="Authenticated") + + assert False, "Should not reach here - invalid auth scenario" + + async def challenge_auth_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + challenge_data: Dict[str, Union[bool, str, None]] = { + "nonce": None, + "attempted": False, + } + + while True: + # If we have challenge data from previous attempt, add auth header + if challenge_data["nonce"] and challenge_data["attempted"]: + request.headers["Authorization"] = ( + f'Custom response="{challenge_data["nonce"]}-secret"' + ) + + response = await handler(request) + + # If we get a 401 with challenge, store it and retry + if response.status == 401 and not challenge_data["attempted"]: + www_auth = response.headers.get("WWW-Authenticate") + if www_auth and "nonce=" in www_auth: # pragma: no branch + # Extract nonce from authentication header + nonce_start = www_auth.find('nonce="') + 7 + nonce_end = www_auth.find('"', nonce_start) + challenge_data["nonce"] = www_auth[nonce_start:nonce_end] + challenge_data["attempted"] = True + continue + + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(challenge_auth_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Authenticated" + + # Should have made 2 requests: initial and retry with auth + assert request_count == 2 + + +async def test_client_middleware_multi_step_auth(aiohttp_server: AiohttpServer) -> None: + """Test middleware with multi-step authentication flow.""" + auth_state: dict[str, int] = {} + middleware_state: Dict[str, Optional[Union[int, str]]] = { + "step": 0, + "session": None, + "challenge": None, + } + + async def handler(request: web.Request) -> web.Response: + client_id = request.headers.get("X-Client-ID", "unknown") + auth_header = request.headers.get("Authorization") + step = auth_state.get(client_id, 0) + + # Step 0: No auth, request client ID + if step == 0 and not auth_header: + auth_state[client_id] = 1 + return web.Response( + status=401, headers={"X-Auth-Step": "1", "X-Session": "session-123"} + ) + + # Step 1: Has session, request credentials + if step == 1 and auth_header == "Bearer session-123": + auth_state[client_id] = 2 + return web.Response( + status=401, headers={"X-Auth-Step": "2", "X-Challenge": "challenge-456"} + ) + + # Step 2: Has challenge response, authenticate + if step == 2 and auth_header == "Bearer challenge-456-response": + return web.Response(text="Authenticated") + + assert False, "Should not reach here - invalid multi-step auth flow" + + async def multi_step_auth_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + request.headers["X-Client-ID"] = "test-client" + + while True: + # Apply auth based on current state + if middleware_state["step"] == 1 and middleware_state["session"]: + request.headers["Authorization"] = ( + f"Bearer {middleware_state['session']}" + ) + elif middleware_state["step"] == 2 and middleware_state["challenge"]: + request.headers["Authorization"] = ( + f"Bearer {middleware_state['challenge']}-response" + ) + + response = await handler(request) + + # Handle multi-step auth flow + if response.status == 401: + auth_step = response.headers.get("X-Auth-Step") + + if auth_step == "1": + # First step: store session token + middleware_state["session"] = response.headers.get("X-Session") + middleware_state["step"] = 1 + continue + + elif auth_step == "2": # pragma: no branch + # Second step: store challenge + middleware_state["challenge"] = response.headers.get("X-Challenge") + middleware_state["step"] = 2 + continue + + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(multi_step_auth_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Authenticated" + + +async def test_client_middleware_conditional_retry( + aiohttp_server: AiohttpServer, +) -> None: + """Test middleware with conditional retry based on response content.""" + request_count = 0 + token_state: Dict[str, Union[str, bool]] = { + "token": "old-token", + "refreshed": False, + } + + async def handler(request: web.Request) -> web.Response: + nonlocal request_count + request_count += 1 + + auth_token = request.headers.get("X-Auth-Token") + + if request_count == 1: + # First request returns expired token error + return web.json_response( + {"error": "token_expired", "refresh_required": True}, status=401 + ) + + if auth_token == "refreshed-token": + return web.json_response({"data": "success"}) + + assert False, "Should not reach here - invalid token refresh flow" + + async def token_refresh_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + while True: + # Add token to request + request.headers["X-Auth-Token"] = str(token_state["token"]) + + response = await handler(request) + + # Check if token needs refresh + if response.status == 401 and not token_state["refreshed"]: + data = await response.json() + if data.get("error") == "token_expired" and data.get( + "refresh_required" + ): # pragma: no branch + # Simulate token refresh + token_state["token"] = "refreshed-token" + token_state["refreshed"] = True + continue + + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(token_refresh_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + data = await resp.json() + assert data == {"data": "success"} + + assert request_count == 2 # Initial request + retry after refresh + + +async def test_build_client_middlewares_empty() -> None: + """Test build_client_middlewares with empty middlewares.""" + + async def handler(request: ClientRequest) -> NoReturn: + """Dummy handler.""" + assert False + + # Test empty case + result = build_client_middlewares(handler, ()) + assert result is handler # Should return handler unchanged + + +async def test_client_middleware_class_based_auth( + aiohttp_server: AiohttpServer, +) -> None: + """Test middleware using class-based pattern with instance state.""" + + class TokenAuthMiddleware: + """Middleware that handles token-based authentication.""" + + def __init__(self, token: str) -> None: + self.token = token + self.request_count = 0 + + async def __call__( + self, request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + self.request_count += 1 + request.headers["Authorization"] = f"Bearer {self.token}" + return await handler(request) + + async def handler(request: web.Request) -> web.Response: + auth_header = request.headers.get("Authorization") + if auth_header == "Bearer test-token": + return web.Response(text="Authenticated") + assert False, "Should not reach here - class auth should always have token" + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Create middleware instance + auth_middleware = TokenAuthMiddleware("test-token") + + async with ClientSession(middlewares=(auth_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Authenticated" + + # Verify the middleware was called + assert auth_middleware.request_count == 1 + + +async def test_client_middleware_stateful_retry(aiohttp_server: AiohttpServer) -> None: + """Test retry middleware using class with state management.""" + + class RetryMiddleware: + """Middleware that retries failed requests with backoff.""" + + def __init__(self, max_retries: int = 3) -> None: + self.max_retries = max_retries + self.retry_counts: Dict[int, int] = {} # Track retries per request + + async def __call__( + self, request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + retry_count = 0 + + while True: + response = await handler(request) + + if response.status >= 500 and retry_count < self.max_retries: + retry_count += 1 + continue + + return response + + request_count = 0 + + async def handler(request: web.Request) -> web.Response: + nonlocal request_count + request_count += 1 + + if request_count < 3: + return web.Response(status=503) + return web.Response(text="Success") + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + retry_middleware = RetryMiddleware(max_retries=2) + + async with ClientSession(middlewares=(retry_middleware,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Success" + + assert request_count == 3 # Initial + 2 retries + + +async def test_client_middleware_multiple_instances( + aiohttp_server: AiohttpServer, +) -> None: + """Test using multiple instances of the same middleware class.""" + + class HeaderMiddleware: + """Middleware that adds a header with instance-specific value.""" + + def __init__(self, header_name: str, header_value: str) -> None: + self.header_name = header_name + self.header_value = header_value + self.applied = False + + async def __call__( + self, request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + self.applied = True + request.headers[self.header_name] = self.header_value + return await handler(request) + + headers_received = {} + + async def handler(request: web.Request) -> web.Response: + headers_received.update(dict(request.headers)) + return web.Response(text="OK") + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Create two instances with different headers + middleware1 = HeaderMiddleware("X-Custom-1", "value1") + middleware2 = HeaderMiddleware("X-Custom-2", "value2") + + async with ClientSession(middlewares=(middleware1, middleware2)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + + # Both middlewares should have been applied + assert middleware1.applied is True + assert middleware2.applied is True + assert headers_received.get("X-Custom-1") == "value1" + assert headers_received.get("X-Custom-2") == "value2" + + +async def test_client_middleware_disable_with_empty_tuple( + aiohttp_server: AiohttpServer, +) -> None: + """Test that passing middlewares=() to a request disables session-level middlewares.""" + session_middleware_called = False + request_middleware_called = False + + async def handler(request: web.Request) -> web.Response: + auth_header = request.headers.get("Authorization") + if auth_header: + return web.Response(text=f"Auth: {auth_header}") + return web.Response(text="No auth") + + async def session_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + nonlocal session_middleware_called + session_middleware_called = True + request.headers["Authorization"] = "Bearer session-token" + response = await handler(request) + return response + + async def request_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + nonlocal request_middleware_called + request_middleware_called = True + request.headers["Authorization"] = "Bearer request-token" + response = await handler(request) + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Create session with middleware + async with ClientSession(middlewares=(session_middleware,)) as session: + # First request uses session middleware + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Auth: Bearer session-token" + assert session_middleware_called is True + assert request_middleware_called is False + + # Reset flags + session_middleware_called = False + request_middleware_called = False + + # Second request explicitly disables middlewares + async with session.get(server.make_url("/"), middlewares=()) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "No auth" + assert session_middleware_called is False + assert request_middleware_called is False + + # Reset flags + session_middleware_called = False + request_middleware_called = False + + # Third request uses request-specific middleware + async with session.get( + server.make_url("/"), middlewares=(request_middleware,) + ) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Auth: Bearer request-token" + assert session_middleware_called is False + assert request_middleware_called is True + + +@pytest.mark.parametrize( + "exception_class,match_text", + [ + (ValueError, "Middleware error"), + (ClientError, "Client error from middleware"), + (OSError, "OS error from middleware"), + ], +) +async def test_client_middleware_exception_closes_connection( + aiohttp_server: AiohttpServer, + exception_class: type[Exception], + match_text: str, +) -> None: + """Test that connections are closed when middleware raises an exception.""" + + async def handler(request: web.Request) -> NoReturn: + assert False, "Handler should not be reached" + + async def failing_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> NoReturn: + # Raise exception before the handler is called + raise exception_class(match_text) + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Create custom connector + connector = TCPConnector() + + async with ClientSession( + connector=connector, middlewares=(failing_middleware,) + ) as session: + # Make request that should fail in middleware + with pytest.raises(exception_class, match=match_text): + await session.get(server.make_url("/")) + + # Check that the connector has no active connections + # If connections were properly closed, _conns should be empty + assert len(connector._conns) == 0 + + await connector.close() + + +async def test_client_middleware_blocks_connection_before_established( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can block connections before they are established.""" + blocked_hosts = {"blocked.example.com", "evil.com"} + connection_attempts: List[str] = [] + + async def handler(request: web.Request) -> web.Response: + return web.Response(text="Reached") + + async def blocking_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Record the connection attempt + connection_attempts.append(str(request.url)) + + # Block requests to certain hosts + if request.url.host in blocked_hosts: + raise BlockedByMiddleware(f"Connection to {request.url.host} is blocked") + + # Allow the request to proceed + return await handler(request) + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + connector = TCPConnector() + async with ClientSession( + connector=connector, middlewares=(blocking_middleware,) + ) as session: + # Test allowed request + allowed_url = server.make_url("/") + async with session.get(allowed_url) as resp: + assert resp.status == 200 + assert await resp.text() == "Reached" + + # Test blocked request + with pytest.raises(BlockedByMiddleware) as exc_info: + # Use a fake URL that would fail DNS if connection was attempted + await session.get("https://blocked.example.com/") + + assert "Connection to blocked.example.com is blocked" in str(exc_info.value) + + # Test another blocked host + with pytest.raises(BlockedByMiddleware) as exc_info: + await session.get("https://evil.com/path") + + assert "Connection to evil.com is blocked" in str(exc_info.value) + + # Verify that connections were attempted in the correct order + assert len(connection_attempts) == 3 + assert allowed_url.host and allowed_url.host in connection_attempts[0] + assert "blocked.example.com" in connection_attempts[1] + assert "evil.com" in connection_attempts[2] + + # Check that no connections were leaked + assert len(connector._conns) == 0 + + +async def test_client_middleware_blocks_connection_without_dns_lookup( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware prevents DNS lookups for blocked hosts.""" + blocked_hosts = {"blocked.domain.tld"} + dns_lookups_made: List[str] = [] + + # Create a simple server for the allowed request + async def handler(request: web.Request) -> web.Response: + return web.Response(text="OK") + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + class TrackingResolver(ThreadedResolver): + async def resolve( + self, + hostname: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ) -> List[ResolveResult]: + dns_lookups_made.append(hostname) + return await super().resolve(hostname, port, family) + + async def blocking_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Block requests to certain hosts before DNS lookup + if request.url.host in blocked_hosts: + raise BlockedByMiddleware(f"Blocked by policy: {request.url.host}") + + return await handler(request) + + resolver = TrackingResolver() + connector = TCPConnector(resolver=resolver) + async with ClientSession( + connector=connector, middlewares=(blocking_middleware,) + ) as session: + # Test blocked request to non-existent domain + with pytest.raises(BlockedByMiddleware) as exc_info: + await session.get("https://blocked.domain.tld/") + + assert "Blocked by policy: blocked.domain.tld" in str(exc_info.value) + + # Verify that no DNS lookup was made for the blocked domain + assert "blocked.domain.tld" not in dns_lookups_made + + # Test allowed request to existing server - this should trigger DNS lookup + async with session.get(f"http://localhost:{server.port}") as resp: + assert resp.status == 200 + + # Verify that DNS lookup was made for the allowed request + # The server might use a hostname that requires DNS resolution + assert len(dns_lookups_made) > 0 + + # Make sure blocked domain is still not in DNS lookups + assert "blocked.domain.tld" not in dns_lookups_made + + # Clean up + await connector.close() + + +async def test_client_middleware_retry_reuses_connection( + aiohttp_server: AiohttpServer, +) -> None: + """Test that connections are reused when middleware performs retries.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response(text="OK") + + class TrackingConnector(TCPConnector): + """Connector that tracks connection attempts.""" + + connection_attempts = 0 + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + self.connection_attempts += 1 + return await super()._create_connection(req, traces, timeout) + + class RetryOnceMiddleware: + """Middleware that retries exactly once.""" + + def __init__(self) -> None: + self.attempt_count = 0 + + async def __call__( + self, request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + retry_count = 0 + while True: + self.attempt_count += 1 + if retry_count == 0: + retry_count += 1 + await handler(request) + continue + return await handler(request) + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + connector = TrackingConnector() + middleware = RetryOnceMiddleware() + + async with ClientSession(connector=connector, middlewares=(middleware,)) as session: + # Make initial request + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK" + + # Should have made 2 request attempts (initial + 1 retry) + assert middleware.attempt_count == 2 + # Should have created only 1 connection (reused on retry) + assert connector.connection_attempts == 1 + + await connector.close() + + +async def test_middleware_uses_session_avoids_recursion_with_path_check( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can avoid infinite recursion using a path check.""" + log_collector: List[Dict[str, str]] = [] + + async def log_api_handler(request: web.Request) -> web.Response: + """Handle log API requests.""" + data: Dict[str, str] = await request.json() + log_collector.append(data) + return web.Response(text="OK") + + async def main_handler(request: web.Request) -> web.Response: + """Handle main server requests.""" + return web.Response(text=f"Hello from {request.path}") + + # Create log API server + log_app = web.Application() + log_app.router.add_post("/log", log_api_handler) + log_server = await aiohttp_server(log_app) + + # Create main server + main_app = web.Application() + main_app.router.add_get("/{path:.*}", main_handler) + main_server = await aiohttp_server(main_app) + + async def log_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + """Log requests to external API, avoiding recursion with path check.""" + # Avoid infinite recursion by not logging requests to the /log endpoint + if request.url.path != "/log": + # Use the session from the request to make the logging call + async with request.session.post( + f"http://localhost:{log_server.port}/log", + json={"method": str(request.method), "url": str(request.url)}, + ) as resp: + assert resp.status == 200 + + return await handler(request) + + # Create session with the middleware + async with ClientSession(middlewares=(log_middleware,)) as session: + # Make request to main server - should be logged + async with session.get(main_server.make_url("/test")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Hello from /test" + + # Make direct request to log API - should NOT be logged (avoid recursion) + async with session.post( + log_server.make_url("/log"), + json={"method": "DIRECT_POST", "url": "manual_test_entry"}, + ) as resp: + assert resp.status == 200 + + # Check logs + # The first request should be logged + # The second request (to /log) should also be logged but not the middleware's own log request + assert len(log_collector) == 2 + assert log_collector[0]["method"] == "GET" + assert log_collector[0]["url"] == str(main_server.make_url("/test")) + assert log_collector[1]["method"] == "DIRECT_POST" + assert log_collector[1]["url"] == "manual_test_entry" + + +async def test_middleware_uses_session_avoids_recursion_with_disabled_middleware( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can avoid infinite recursion by disabling middleware.""" + log_collector: List[Dict[str, str]] = [] + request_count = 0 + + async def log_api_handler(request: web.Request) -> web.Response: + """Handle log API requests.""" + nonlocal request_count + request_count += 1 + data: Dict[str, str] = await request.json() + log_collector.append(data) + return web.Response(text="OK") + + async def main_handler(request: web.Request) -> web.Response: + """Handle main server requests.""" + return web.Response(text=f"Hello from {request.path}") + + # Create log API server + log_app = web.Application() + log_app.router.add_post("/log", log_api_handler) + log_server = await aiohttp_server(log_app) + + # Create main server + main_app = web.Application() + main_app.router.add_get("/{path:.*}", main_handler) + main_server = await aiohttp_server(main_app) + + async def log_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + """Log all requests using session with disabled middleware.""" + # Use the session from the request to make the logging call + # Disable middleware to avoid infinite recursion + async with request.session.post( + f"http://localhost:{log_server.port}/log", + json={"method": str(request.method), "url": str(request.url)}, + middlewares=(), # This prevents infinite recursion + ) as resp: + assert resp.status == 200 + + return await handler(request) + + # Create session with the middleware + async with ClientSession(middlewares=(log_middleware,)) as session: + # Make request to main server - should be logged + async with session.get(main_server.make_url("/test")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Hello from /test" + + # Make another request - should also be logged + async with session.get(main_server.make_url("/another")) as resp: + assert resp.status == 200 + + # Check logs - both requests should be logged + assert len(log_collector) == 2 + assert log_collector[0]["method"] == "GET" + assert log_collector[0]["url"] == str(main_server.make_url("/test")) + assert log_collector[1]["method"] == "GET" + assert log_collector[1]["url"] == str(main_server.make_url("/another")) + + # Ensure that log requests were made without the middleware + # (request_count equals number of logged requests, not infinite) + assert request_count == 2 + + +async def test_middleware_can_check_request_body( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can check request body.""" + received_bodies: List[str] = [] + received_headers: List[Dict[str, str]] = [] + + async def handler(request: web.Request) -> web.Response: + """Server handler that receives requests.""" + body = await request.text() + received_bodies.append(body) + received_headers.append(dict(request.headers)) + return web.Response(text="OK") + + app = web.Application() + app.router.add_post("/api", handler) + app.router.add_get("/api", handler) # Add GET handler too + server = await aiohttp_server(app) + + class CustomAuth: + """Middleware that follows the GitHub discussion pattern for authentication.""" + + def __init__(self, secretkey: str) -> None: + self.secretkey = secretkey + + def get_hash(self, request: ClientRequest) -> str: + if request.body: + data = request.body.decode("utf-8") + else: + data = "{}" + + # Simulate authentication hash without using real crypto + signature = f"SIGNATURE-{self.secretkey}-{len(data)}-{data[:10]}" + return signature + + async def __call__( + self, request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + request.headers["CUSTOM-AUTH"] = self.get_hash(request) + return await handler(request) + + middleware = CustomAuth("test-secret-key") + + async with ClientSession(middlewares=(middleware,)) as session: + # Test 1: Send JSON data with user/action + data1 = {"user": "alice", "action": "login"} + json_str1 = json.dumps(data1) + async with session.post( + server.make_url("/api"), + data=json_str1, + headers={"Content-Type": "application/json"}, + ) as resp: + assert resp.status == 200 + + # Test 2: Send JSON data with different fields + data2 = {"user": "bob", "value": 42} + json_str2 = json.dumps(data2) + async with session.post( + server.make_url("/api"), + data=json_str2, + headers={"Content-Type": "application/json"}, + ) as resp: + assert resp.status == 200 + + # Test 3: Send GET request with no body + async with session.get(server.make_url("/api")) as resp: + assert resp.status == 200 # GET with empty body still should validate + + # Test 4: Send plain text (non-JSON) + text_data = "plain text body" + async with session.post( + server.make_url("/api"), + data=text_data, + headers={"Content-Type": "text/plain"}, + ) as resp: + assert resp.status == 200 + + # Verify server received the correct headers with authentication + headers1 = received_headers[0] + assert ( + headers1["CUSTOM-AUTH"] + == f"SIGNATURE-test-secret-key-{len(json_str1)}-{json_str1[:10]}" + ) + + headers2 = received_headers[1] + assert ( + headers2["CUSTOM-AUTH"] + == f"SIGNATURE-test-secret-key-{len(json_str2)}-{json_str2[:10]}" + ) + + headers3 = received_headers[2] + # GET request with no body should have empty JSON body + assert headers3["CUSTOM-AUTH"] == "SIGNATURE-test-secret-key-2-{}" + + headers4 = received_headers[3] + assert ( + headers4["CUSTOM-AUTH"] + == f"SIGNATURE-test-secret-key-{len(text_data)}-{text_data[:10]}" + ) + + # Verify all responses were successful + assert received_bodies[0] == json_str1 + assert received_bodies[1] == json_str2 + assert received_bodies[2] == "" # GET request has no body + assert received_bodies[3] == text_data From 6473180d2485efbf1ee1d6b763b4fc64359aa3d3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 17:18:14 +0000 Subject: [PATCH 1401/1511] [PR #10884/d758b7ae backport][3.12] Fix flakey middleware connection reuse test (#10887) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_client_middleware.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index 7effa31c9f0..2f79e4fd774 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -847,11 +847,12 @@ async def __call__( retry_count = 0 while True: self.attempt_count += 1 + response = await handler(request) if retry_count == 0: retry_count += 1 - await handler(request) + response.release() # Release the response to enable connection reuse continue - return await handler(request) + return response app = web.Application() app.router.add_get("/", handler) From 50dd4c6444a6061e810c10a15fa5e24c2f8b4b08 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 14:16:34 -0400 Subject: [PATCH 1402/1511] [PR #10880/3c9d7abf backport][3.12] Add invalid content type test docs (#10886) --- docs/client_reference.rst | 7 ++++++- tests/test_web_response.py | 7 +++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 039419ba965..afe6c720d78 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1506,7 +1506,12 @@ Response object Returns value is ``'application/octet-stream'`` if no Content-Type header present in HTTP headers according to - :rfc:`2616`. To make sure Content-Type header is not present in + :rfc:`9110`. If the *Content-Type* header is invalid (e.g., ``jpg`` + instead of ``image/jpeg``), the value is ``text/plain`` by default + according to :rfc:`2045`. To see the original header check + ``resp.headers['CONTENT-TYPE']``. + + To make sure Content-Type header is not present in the server reply, use :attr:`headers` or :attr:`raw_headers`, e.g. ``'CONTENT-TYPE' not in resp.headers``. diff --git a/tests/test_web_response.py b/tests/test_web_response.py index b7758f46baa..68ffe211f20 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1164,6 +1164,13 @@ def test_ctor_content_type_with_extra() -> None: assert resp.headers["content-type"] == "text/plain; version=0.0.4; charset=utf-8" +def test_invalid_content_type_parses_to_text_plain() -> None: + resp = Response(text="test test", content_type="jpeg") + + assert resp.content_type == "text/plain" + assert resp.headers["content-type"] == "jpeg; charset=utf-8" + + def test_ctor_both_content_type_param_and_header_with_text() -> None: with pytest.raises(ValueError): Response( From 1cfe02881b49b3354bc7bbee59b0db920de5ef0d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 14:16:57 -0400 Subject: [PATCH 1403/1511] [PR #10881/4facc402 backport][3.12] Remove License from setup.cfg (#10883) --- CHANGES/10662.packaging.rst | 1 + CONTRIBUTORS.txt | 1 + setup.cfg | 2 -- 3 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10662.packaging.rst diff --git a/CHANGES/10662.packaging.rst b/CHANGES/10662.packaging.rst new file mode 100644 index 00000000000..2ed3a69cb56 --- /dev/null +++ b/CHANGES/10662.packaging.rst @@ -0,0 +1 @@ +Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 3815ae6829d..c70c86cf671 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -100,6 +100,7 @@ Denilson Amorim Denis Matiychuk Denis Moshensky Dennis Kliban +Devanshu Koyalkar Dima Veselov Dimitar Dimitrov Diogo Dutra da Mata diff --git a/setup.cfg b/setup.cfg index 649a5aaa4eb..23e56d61d00 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,8 +25,6 @@ classifiers = Intended Audience :: Developers - License :: OSI Approved :: Apache Software License - Operating System :: POSIX Operating System :: MacOS :: MacOS X Operating System :: Microsoft :: Windows From ad7ee7cb4bcd9222af99b38c7e92452ee55566b4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 15:05:25 +0000 Subject: [PATCH 1404/1511] [PR #10725/ab76c5a6 backport][3.12] Digest Authentication Middleware for aiohttp (#10894) Co-authored-by: Tim Menninger <tmenninger22@gmail.com> Co-authored-by: jf <feus4177@gmail.com> Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: J. Nick Koston <nick+github@koston.org> Co-authored-by: Sam Bull <git@sambull.org> closes #2213 closes #4939 --- CHANGES/10725.feature.rst | 6 + CHANGES/2213.feature.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/__init__.py | 2 + aiohttp/client_middleware_digest_auth.py | 416 ++++++++++ docs/client_advanced.rst | 20 + docs/client_reference.rst | 29 + docs/spelling_wordlist.txt | 1 + examples/digest_auth_qop_auth.py | 68 ++ tests/test_client_middleware_digest_auth.py | 801 ++++++++++++++++++++ 10 files changed, 1345 insertions(+) create mode 100644 CHANGES/10725.feature.rst create mode 120000 CHANGES/2213.feature.rst create mode 100644 aiohttp/client_middleware_digest_auth.py create mode 100644 examples/digest_auth_qop_auth.py create mode 100644 tests/test_client_middleware_digest_auth.py diff --git a/CHANGES/10725.feature.rst b/CHANGES/10725.feature.rst new file mode 100644 index 00000000000..2cb096a58e7 --- /dev/null +++ b/CHANGES/10725.feature.rst @@ -0,0 +1,6 @@ +Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) +that implements RFC 7616. The middleware supports all standard hash algorithms +(MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and +'auth-int' quality of protection options, and automatically manages the +authentication flow by intercepting 401 responses and retrying with proper +credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. diff --git a/CHANGES/2213.feature.rst b/CHANGES/2213.feature.rst new file mode 120000 index 00000000000..d118975e478 --- /dev/null +++ b/CHANGES/2213.feature.rst @@ -0,0 +1 @@ +10725.feature.rst \ No newline at end of file diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c70c86cf671..32e6e119aa7 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -187,6 +187,7 @@ Jesus Cea Jian Zeng Jinkyu Yi Joel Watts +John Feusi John Parton Jon Nabozny Jonas Krüger Svensson diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index d18bab60d2e..4bc6a3a2b22 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -47,6 +47,7 @@ WSServerHandshakeError, request, ) +from .client_middleware_digest_auth import DigestAuthMiddleware from .client_middlewares import ClientHandlerType, ClientMiddlewareType from .compression_utils import set_zlib_backend from .connector import ( @@ -187,6 +188,7 @@ # helpers "BasicAuth", "ChainMapProxy", + "DigestAuthMiddleware", "ETag", "set_zlib_backend", # http diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py new file mode 100644 index 00000000000..e9eb3ba82e2 --- /dev/null +++ b/aiohttp/client_middleware_digest_auth.py @@ -0,0 +1,416 @@ +""" +Digest authentication middleware for aiohttp client. + +This middleware implements HTTP Digest Authentication according to RFC 7616, +providing a more secure alternative to Basic Authentication. It supports all +standard hash algorithms including MD5, SHA, SHA-256, SHA-512 and their session +variants, as well as both 'auth' and 'auth-int' quality of protection (qop) options. +""" + +import hashlib +import os +import re +import time +from typing import ( + Callable, + Dict, + Final, + FrozenSet, + List, + Literal, + Tuple, + TypedDict, + Union, +) + +from yarl import URL + +from . import hdrs +from .client_exceptions import ClientError +from .client_middlewares import ClientHandlerType +from .client_reqrep import ClientRequest, ClientResponse + + +class DigestAuthChallenge(TypedDict, total=False): + realm: str + nonce: str + qop: str + algorithm: str + opaque: str + + +DigestFunctions: Dict[str, Callable[[bytes], "hashlib._Hash"]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA256": hashlib.sha256, + "SHA256-SESS": hashlib.sha256, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA512": hashlib.sha512, + "SHA512-SESS": hashlib.sha512, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, +} + + +# Compile the regex pattern once at module level for performance +_HEADER_PAIRS_PATTERN = re.compile( + r'(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' + # | | | | | | | | | || | + # +----|--|-|-|--|----|------|----|--||-----|--> alphanumeric key + # +--|-|-|--|----|------|----|--||-----|--> maybe whitespace + # | | | | | | | || | + # +-|-|--|----|------|----|--||-----|--> = (delimiter) + # +-|--|----|------|----|--||-----|--> maybe whitespace + # | | | | | || | + # +--|----|------|----|--||-----|--> group quoted or unquoted + # | | | | || | + # +----|------|----|--||-----|--> if quoted... + # +------|----|--||-----|--> anything but " or \ + # +----|--||-----|--> escaped characters allowed + # +--||-----|--> or can be empty string + # || | + # +|-----|--> if unquoted... + # +-----|--> anything but , or <space> + # +--> at least one char req'd +) + + +# RFC 7616: Challenge parameters to extract +CHALLENGE_FIELDS: Final[ + Tuple[Literal["realm", "nonce", "qop", "algorithm", "opaque"], ...] +] = ( + "realm", + "nonce", + "qop", + "algorithm", + "opaque", +) + +# Supported digest authentication algorithms +# Use a tuple of sorted keys for predictable documentation and error messages +SUPPORTED_ALGORITHMS: Final[Tuple[str, ...]] = tuple(sorted(DigestFunctions.keys())) + +# RFC 7616: Fields that require quoting in the Digest auth header +# These fields must be enclosed in double quotes in the Authorization header. +# Algorithm, qop, and nc are never quoted per RFC specifications. +# This frozen set is used by the template-based header construction to +# automatically determine which fields need quotes. +QUOTED_AUTH_FIELDS: Final[FrozenSet[str]] = frozenset( + {"username", "realm", "nonce", "uri", "response", "opaque", "cnonce"} +) + + +def escape_quotes(value: str) -> str: + """Escape double quotes for HTTP header values.""" + return value.replace('"', '\\"') + + +def unescape_quotes(value: str) -> str: + """Unescape double quotes in HTTP header values.""" + return value.replace('\\"', '"') + + +def parse_header_pairs(header: str) -> Dict[str, str]: + """ + Parse key-value pairs from WWW-Authenticate or similar HTTP headers. + + This function handles the complex format of WWW-Authenticate header values, + supporting both quoted and unquoted values, proper handling of commas in + quoted values, and whitespace variations per RFC 7616. + + Examples of supported formats: + - key1="value1", key2=value2 + - key1 = "value1" , key2="value, with, commas" + - key1=value1,key2="value2" + - realm="example.com", nonce="12345", qop="auth" + + Args: + header: The header value string to parse + + Returns: + Dictionary mapping parameter names to their values + """ + return { + stripped_key: unescape_quotes(quoted_val) if quoted_val else unquoted_val + for key, quoted_val, unquoted_val in _HEADER_PAIRS_PATTERN.findall(header) + if (stripped_key := key.strip()) + } + + +class DigestAuthMiddleware: + """ + HTTP digest authentication middleware for aiohttp client. + + This middleware intercepts 401 Unauthorized responses containing a Digest + authentication challenge, calculates the appropriate digest credentials, + and automatically retries the request with the proper Authorization header. + + Features: + - Handles all aspects of Digest authentication handshake automatically + - Supports all standard hash algorithms: + - MD5, MD5-SESS + - SHA, SHA-SESS + - SHA256, SHA256-SESS, SHA-256, SHA-256-SESS + - SHA512, SHA512-SESS, SHA-512, SHA-512-SESS + - Supports 'auth' and 'auth-int' quality of protection modes + - Properly handles quoted strings and parameter parsing + - Includes replay attack protection with client nonce count tracking + + Standards compliance: + - RFC 7616: HTTP Digest Access Authentication (primary reference) + - RFC 2617: HTTP Authentication (deprecated by RFC 7616) + - RFC 1945: Section 11.1 (username restrictions) + + Implementation notes: + The core digest calculation is inspired by the implementation in + https://github.com/requests/requests/blob/v2.18.4/requests/auth.py + with added support for modern digest auth features and error handling. + """ + + def __init__( + self, + login: str, + password: str, + ) -> None: + if login is None: + raise ValueError("None is not allowed as login value") + + if password is None: + raise ValueError("None is not allowed as password value") + + if ":" in login: + raise ValueError('A ":" is not allowed in username (RFC 1945#section-11.1)') + + self._login_str: Final[str] = login + self._login_bytes: Final[bytes] = login.encode("utf-8") + self._password_bytes: Final[bytes] = password.encode("utf-8") + + self._last_nonce_bytes = b"" + self._nonce_count = 0 + self._challenge: DigestAuthChallenge = {} + + def _encode(self, method: str, url: URL, body: Union[bytes, str]) -> str: + """ + Build digest authorization header for the current challenge. + + Args: + method: The HTTP method (GET, POST, etc.) + url: The request URL + body: The request body (used for qop=auth-int) + + Returns: + A fully formatted Digest authorization header string + + Raises: + ClientError: If the challenge is missing required parameters or + contains unsupported values + """ + challenge = self._challenge + if "realm" not in challenge: + raise ClientError( + "Malformed Digest auth challenge: Missing 'realm' parameter" + ) + + if "nonce" not in challenge: + raise ClientError( + "Malformed Digest auth challenge: Missing 'nonce' parameter" + ) + + # Empty realm values are allowed per RFC 7616 (SHOULD, not MUST, contain host name) + realm = challenge["realm"] + nonce = challenge["nonce"] + + # Empty nonce values are not allowed as they are security-critical for replay protection + if not nonce: + raise ClientError( + "Security issue: Digest auth challenge contains empty 'nonce' value" + ) + + qop_raw = challenge.get("qop", "") + algorithm = challenge.get("algorithm", "MD5").upper() + opaque = challenge.get("opaque", "") + + # Convert string values to bytes once + nonce_bytes = nonce.encode("utf-8") + realm_bytes = realm.encode("utf-8") + path = URL(url).path_qs + + # Process QoP + qop = "" + qop_bytes = b"" + if qop_raw: + valid_qops = {"auth", "auth-int"}.intersection( + {q.strip() for q in qop_raw.split(",") if q.strip()} + ) + if not valid_qops: + raise ClientError( + f"Digest auth error: Unsupported Quality of Protection (qop) value(s): {qop_raw}" + ) + + qop = "auth-int" if "auth-int" in valid_qops else "auth" + qop_bytes = qop.encode("utf-8") + + if algorithm not in DigestFunctions: + raise ClientError( + f"Digest auth error: Unsupported hash algorithm: {algorithm}. " + f"Supported algorithms: {', '.join(SUPPORTED_ALGORITHMS)}" + ) + hash_fn: Final = DigestFunctions[algorithm] + + def H(x: bytes) -> bytes: + """RFC 7616 Section 3: Hash function H(data) = hex(hash(data)).""" + return hash_fn(x).hexdigest().encode() + + def KD(s: bytes, d: bytes) -> bytes: + """RFC 7616 Section 3: KD(secret, data) = H(concat(secret, ":", data)).""" + return H(b":".join((s, d))) + + # Calculate A1 and A2 + A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes)) + A2 = f"{method.upper()}:{path}".encode() + if qop == "auth-int": + if isinstance(body, str): + entity_str = body.encode("utf-8", errors="replace") + else: + entity_str = body + entity_hash = H(entity_str) + A2 = b":".join((A2, entity_hash)) + + HA1 = H(A1) + HA2 = H(A2) + + # Nonce count handling + if nonce_bytes == self._last_nonce_bytes: + self._nonce_count += 1 + else: + self._nonce_count = 1 + + self._last_nonce_bytes = nonce_bytes + ncvalue = f"{self._nonce_count:08x}" + ncvalue_bytes = ncvalue.encode("utf-8") + + # Generate client nonce + cnonce = hashlib.sha1( + b"".join( + [ + str(self._nonce_count).encode("utf-8"), + nonce_bytes, + time.ctime().encode("utf-8"), + os.urandom(8), + ] + ) + ).hexdigest()[:16] + cnonce_bytes = cnonce.encode("utf-8") + + # Special handling for session-based algorithms + if algorithm.upper().endswith("-SESS"): + HA1 = H(b":".join((HA1, nonce_bytes, cnonce_bytes))) + + # Calculate the response digest + if qop: + noncebit = b":".join( + (nonce_bytes, ncvalue_bytes, cnonce_bytes, qop_bytes, HA2) + ) + response_digest = KD(HA1, noncebit) + else: + response_digest = KD(HA1, b":".join((nonce_bytes, HA2))) + + # Define a dict mapping of header fields to their values + # Group fields into always-present, optional, and qop-dependent + header_fields = { + # Always present fields + "username": escape_quotes(self._login_str), + "realm": escape_quotes(realm), + "nonce": escape_quotes(nonce), + "uri": path, + "response": response_digest.decode(), + "algorithm": algorithm, + } + + # Optional fields + if opaque: + header_fields["opaque"] = escape_quotes(opaque) + + # QoP-dependent fields + if qop: + header_fields["qop"] = qop + header_fields["nc"] = ncvalue + header_fields["cnonce"] = cnonce + + # Build header using templates for each field type + pairs: List[str] = [] + for field, value in header_fields.items(): + if field in QUOTED_AUTH_FIELDS: + pairs.append(f'{field}="{value}"') + else: + pairs.append(f"{field}={value}") + + return f"Digest {', '.join(pairs)}" + + def _authenticate(self, response: ClientResponse) -> bool: + """ + Takes the given response and tries digest-auth, if needed. + + Returns true if the original request must be resent. + """ + if response.status != 401: + return False + + auth_header = response.headers.get("www-authenticate", "") + if not auth_header: + return False # No authentication header present + + method, sep, headers = auth_header.partition(" ") + if not sep: + # No space found in www-authenticate header + return False # Malformed auth header, missing scheme separator + + if method.lower() != "digest": + # Not a digest auth challenge (could be Basic, Bearer, etc.) + return False + + if not headers: + # We have a digest scheme but no parameters + return False # Malformed digest header, missing parameters + + # We have a digest auth header with content + if not (header_pairs := parse_header_pairs(headers)): + # Failed to parse any key-value pairs + return False # Malformed digest header, no valid parameters + + # Extract challenge parameters + self._challenge = {} + for field in CHALLENGE_FIELDS: + if value := header_pairs.get(field): + self._challenge[field] = value + + # Return True only if we found at least one challenge parameter + return bool(self._challenge) + + async def __call__( + self, request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + """Run the digest auth middleware.""" + response = None + for retry_count in range(2): + # Apply authorization header if we have a challenge (on second attempt) + if retry_count > 0: + request.headers[hdrs.AUTHORIZATION] = self._encode( + request.method, request.url, request.body + ) + + # Send the request + response = await handler(request) + + # Check if we need to authenticate + if not self._authenticate(response): + break + elif retry_count < 1: + response.release() # Release the response to enable connection reuse on retry + + # At this point, response is guaranteed to be defined + assert response is not None + return response diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 8795b3d164a..9affef7efe2 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -67,6 +67,26 @@ argument. An instance of :class:`BasicAuth` can be passed in like this:: async with ClientSession(auth=auth) as session: ... +For HTTP digest authentication, use the :class:`DigestAuthMiddleware` client middleware:: + + from aiohttp import ClientSession, DigestAuthMiddleware + + # Create the middleware with your credentials + digest_auth = DigestAuthMiddleware(login="user", password="password") + + # Pass it to the ClientSession as a tuple + async with ClientSession(middlewares=(digest_auth,)) as session: + # The middleware will automatically handle auth challenges + async with session.get("https://example.com/protected") as resp: + print(await resp.text()) + +The :class:`DigestAuthMiddleware` implements HTTP Digest Authentication according to RFC 7616, +providing a more secure alternative to Basic Authentication. It supports all +standard hash algorithms including MD5, SHA, SHA-256, SHA-512 and their session +variants, as well as both 'auth' and 'auth-int' quality of protection (qop) options. +The middleware automatically handles the authentication flow by intercepting 401 responses +and retrying with proper credentials. + Note that if the request is redirected and the redirect URL contains credentials, those credentials will supersede any previously set credentials. In other words, if ``http://user@example.com`` redirects to diff --git a/docs/client_reference.rst b/docs/client_reference.rst index afe6c720d78..8e6153bf40c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2010,6 +2010,7 @@ Utilities .. versionadded:: 3.2 + .. class:: BasicAuth(login, password='', encoding='latin1') HTTP basic authentication helper. @@ -2050,6 +2051,34 @@ Utilities :return: encoded authentication data, :class:`str`. + +.. class:: DigestAuthMiddleware(login, password) + + HTTP digest authentication client middleware. + + :param str login: login + :param str password: password + + This middleware supports HTTP digest authentication with both `auth` and + `auth-int` quality of protection (qop) modes, and a variety of hashing algorithms. + + It automatically handles the digest authentication handshake by: + + - Parsing 401 Unauthorized responses with `WWW-Authenticate: Digest` headers + - Generating appropriate `Authorization: Digest` headers on retry + - Maintaining nonce counts and challenge data per request + + Usage:: + + digest_auth_middleware = DigestAuthMiddleware(login="user", password="pass") + async with ClientSession(middlewares=(digest_auth_middleware,)) as session: + async with session.get("http://protected.example.com") as resp: + # The middleware automatically handles the digest auth handshake + assert resp.status == 200 + + .. versionadded:: 3.12 + + .. class:: CookieJar(*, unsafe=False, quote_cookie=True, treat_as_secure_origin = []) The cookie jar instance is available as :attr:`ClientSession.cookie_jar`. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index f2321adb708..421ef842678 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -252,6 +252,7 @@ pyflakes pyright pytest Pytest +qop Quickstart quote’s rc diff --git a/examples/digest_auth_qop_auth.py b/examples/digest_auth_qop_auth.py new file mode 100644 index 00000000000..508f444e9f9 --- /dev/null +++ b/examples/digest_auth_qop_auth.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +""" +Example of using digest authentication middleware with aiohttp client. + +This example shows how to use the DigestAuthMiddleware from +aiohttp.client_middleware_digest_auth to authenticate with a server +that requires digest authentication with different qop options. + +In this case, it connects to httpbin.org's digest auth endpoint. +""" + +import asyncio +from itertools import product + +from yarl import URL + +from aiohttp import ClientSession +from aiohttp.client_middleware_digest_auth import DigestAuthMiddleware + +# Define QOP options available +QOP_OPTIONS = ["auth", "auth-int"] + +# Algorithms supported by httpbin.org +ALGORITHMS = ["MD5", "SHA-256", "SHA-512"] + +# Username and password for testing +USERNAME = "my" +PASSWORD = "dog" + +# All combinations of QOP options and algorithms +TEST_COMBINATIONS = list(product(QOP_OPTIONS, ALGORITHMS)) + + +async def main() -> None: + # Create a DigestAuthMiddleware instance with appropriate credentials + digest_auth = DigestAuthMiddleware(login=USERNAME, password=PASSWORD) + + # Create a client session with the digest auth middleware + async with ClientSession(middlewares=(digest_auth,)) as session: + # Test each combination of QOP and algorithm + for qop, algorithm in TEST_COMBINATIONS: + print(f"\n\n=== Testing with qop={qop}, algorithm={algorithm} ===\n") + + url = URL( + f"https://httpbin.org/digest-auth/{qop}/{USERNAME}/{PASSWORD}/{algorithm}" + ) + + async with session.get(url) as resp: + print(f"Status: {resp.status}") + print(f"Headers: {resp.headers}") + + # Parse the JSON response + json_response = await resp.json() + print(f"Response: {json_response}") + + # Verify authentication was successful + if resp.status == 200: + print("\nAuthentication successful!") + print(f"Authenticated user: {json_response.get('user')}") + print( + f"Authentication method: {json_response.get('authenticated')}" + ) + else: + print("\nAuthentication failed.") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py new file mode 100644 index 00000000000..26118288913 --- /dev/null +++ b/tests/test_client_middleware_digest_auth.py @@ -0,0 +1,801 @@ +"""Test digest authentication middleware for aiohttp client.""" + +from hashlib import md5, sha1 +from typing import Generator, Union +from unittest import mock + +import pytest +from yarl import URL + +from aiohttp import ClientSession, hdrs +from aiohttp.client_exceptions import ClientError +from aiohttp.client_middleware_digest_auth import ( + DigestAuthChallenge, + DigestAuthMiddleware, + DigestFunctions, + escape_quotes, + parse_header_pairs, + unescape_quotes, +) +from aiohttp.client_reqrep import ClientResponse +from aiohttp.pytest_plugin import AiohttpServer +from aiohttp.web import Application, Request, Response + + +@pytest.fixture +def digest_auth_mw() -> DigestAuthMiddleware: + return DigestAuthMiddleware("user", "pass") + + +@pytest.fixture +def basic_challenge() -> DigestAuthChallenge: + """Return a basic digest auth challenge with required fields only.""" + return DigestAuthChallenge(realm="test", nonce="abc") + + +@pytest.fixture +def complete_challenge() -> DigestAuthChallenge: + """Return a complete digest auth challenge with all fields.""" + return DigestAuthChallenge( + realm="test", nonce="abc", qop="auth", algorithm="MD5", opaque="xyz" + ) + + +@pytest.fixture +def qop_challenge() -> DigestAuthChallenge: + """Return a digest auth challenge with qop field.""" + return DigestAuthChallenge(realm="test", nonce="abc", qop="auth") + + +@pytest.fixture +def no_qop_challenge() -> DigestAuthChallenge: + """Return a digest auth challenge without qop.""" + return DigestAuthChallenge(realm="test-realm", nonce="testnonce", algorithm="MD5") + + +@pytest.fixture +def auth_mw_with_challenge( + digest_auth_mw: DigestAuthMiddleware, complete_challenge: DigestAuthChallenge +) -> DigestAuthMiddleware: + """Return a digest auth middleware with pre-set challenge.""" + digest_auth_mw._challenge = complete_challenge + digest_auth_mw._last_nonce_bytes = complete_challenge["nonce"].encode("utf-8") + digest_auth_mw._nonce_count = 0 + return digest_auth_mw + + +@pytest.fixture +def mock_sha1_digest() -> Generator[mock.MagicMock, None, None]: + """Mock SHA1 to return a predictable value for testing.""" + mock_digest = mock.MagicMock(spec=sha1()) + mock_digest.hexdigest.return_value = "deadbeefcafebabe" + with mock.patch("hashlib.sha1", return_value=mock_digest) as patched: + yield patched + + +@pytest.fixture +def mock_md5_digest() -> Generator[mock.MagicMock, None, None]: + """Mock MD5 to return a predictable value for testing.""" + mock_digest = mock.MagicMock(spec=md5()) + mock_digest.hexdigest.return_value = "abcdef0123456789" + with mock.patch("hashlib.md5", return_value=mock_digest) as patched: + yield patched + + +@pytest.mark.parametrize( + ("response_status", "headers", "expected_result", "expected_challenge"), + [ + # Valid digest with all fields + ( + 401, + { + "www-authenticate": 'Digest realm="test", nonce="abc", ' + 'qop="auth", opaque="xyz", algorithm=MD5' + }, + True, + { + "realm": "test", + "nonce": "abc", + "qop": "auth", + "algorithm": "MD5", + "opaque": "xyz", + }, + ), + # Valid digest without opaque + ( + 401, + {"www-authenticate": 'Digest realm="test", nonce="abc", qop="auth"'}, + True, + {"realm": "test", "nonce": "abc", "qop": "auth"}, + ), + # Non-401 status + (200, {}, False, {}), # No challenge should be set + ], +) +async def test_authenticate_scenarios( + digest_auth_mw: DigestAuthMiddleware, + response_status: int, + headers: dict[str, str], + expected_result: bool, + expected_challenge: dict[str, str], +) -> None: + """Test different authentication scenarios.""" + response = mock.MagicMock(spec=ClientResponse) + response.status = response_status + response.headers = headers + + result = digest_auth_mw._authenticate(response) + assert result == expected_result + + if expected_result: + challenge_dict = dict(digest_auth_mw._challenge) + for key, value in expected_challenge.items(): + assert challenge_dict[key] == value + + +@pytest.mark.parametrize( + ("challenge", "expected_error"), + [ + ( + DigestAuthChallenge(), + "Malformed Digest auth challenge: Missing 'realm' parameter", + ), + ( + DigestAuthChallenge(nonce="abc"), + "Malformed Digest auth challenge: Missing 'realm' parameter", + ), + ( + DigestAuthChallenge(realm="test"), + "Malformed Digest auth challenge: Missing 'nonce' parameter", + ), + ( + DigestAuthChallenge(realm="test", nonce=""), + "Security issue: Digest auth challenge contains empty 'nonce' value", + ), + ], +) +def test_encode_validation_errors( + digest_auth_mw: DigestAuthMiddleware, + challenge: DigestAuthChallenge, + expected_error: str, +) -> None: + """Test validation errors when encoding digest auth headers.""" + digest_auth_mw._challenge = challenge + with pytest.raises(ClientError, match=expected_error): + digest_auth_mw._encode("GET", URL("http://example.com/resource"), "") + + +def test_encode_digest_with_md5(auth_mw_with_challenge: DigestAuthMiddleware) -> None: + header = auth_mw_with_challenge._encode( + "GET", URL("http://example.com/resource"), "" + ) + assert header.startswith("Digest ") + assert 'username="user"' in header + assert "algorithm=MD5" in header + + +@pytest.mark.parametrize( + "algorithm", ["MD5-SESS", "SHA-SESS", "SHA-256-SESS", "SHA-512-SESS"] +) +def test_encode_digest_with_sess_algorithms( + digest_auth_mw: DigestAuthMiddleware, + qop_challenge: DigestAuthChallenge, + algorithm: str, +) -> None: + """Test that all session-based digest algorithms work correctly.""" + # Create a modified challenge with the test algorithm + challenge = qop_challenge.copy() + challenge["algorithm"] = algorithm + digest_auth_mw._challenge = challenge + + header = digest_auth_mw._encode("GET", URL("http://example.com/resource"), "") + assert f"algorithm={algorithm}" in header + + +def test_encode_unsupported_algorithm( + digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge +) -> None: + """Test that unsupported algorithm raises ClientError.""" + # Create a modified challenge with an unsupported algorithm + challenge = basic_challenge.copy() + challenge["algorithm"] = "UNSUPPORTED" + digest_auth_mw._challenge = challenge + + with pytest.raises(ClientError, match="Unsupported hash algorithm"): + digest_auth_mw._encode("GET", URL("http://example.com/resource"), "") + + +def test_invalid_qop_rejected( + digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge +) -> None: + """Test that invalid Quality of Protection values are rejected.""" + # Use bad QoP value to trigger error + challenge = basic_challenge.copy() + challenge["qop"] = "badvalue" + challenge["algorithm"] = "MD5" + digest_auth_mw._challenge = challenge + + # This should raise an error about unsupported QoP + with pytest.raises(ClientError, match="Unsupported Quality of Protection"): + digest_auth_mw._encode("GET", URL("http://example.com"), "") + + +def compute_expected_digest( + algorithm: str, + username: str, + password: str, + realm: str, + nonce: str, + uri: str, + method: str, + qop: str, + nc: str, + cnonce: str, + body: str = "", +) -> str: + hash_fn = DigestFunctions[algorithm] + + def H(x: str) -> str: + return hash_fn(x.encode()).hexdigest() + + def KD(secret: str, data: str) -> str: + return H(f"{secret}:{data}") + + A1 = f"{username}:{realm}:{password}" + HA1 = H(A1) + + if algorithm.upper().endswith("-SESS"): + HA1 = H(f"{HA1}:{nonce}:{cnonce}") + + A2 = f"{method}:{uri}" + if "auth-int" in qop: + entity_hash = H(body) + A2 = f"{A2}:{entity_hash}" + HA2 = H(A2) + + if qop: + return KD(HA1, f"{nonce}:{nc}:{cnonce}:{qop}:{HA2}") + else: + return KD(HA1, f"{nonce}:{HA2}") + + +@pytest.mark.parametrize("qop", ["auth", "auth-int", "auth,auth-int", ""]) +@pytest.mark.parametrize("algorithm", sorted(DigestFunctions.keys())) +@pytest.mark.parametrize( + ("body", "body_str"), + [ + ("this is a body", "this is a body"), # String case + (b"this is a body", "this is a body"), # Bytes case + ], +) +def test_digest_response_exact_match( + qop: str, + algorithm: str, + body: Union[str, bytes], + body_str: str, + mock_sha1_digest: mock.MagicMock, +) -> None: + # Fixed input values + login = "user" + password = "pass" + realm = "example.com" + nonce = "abc123nonce" + cnonce = "deadbeefcafebabe" + nc = 1 + ncvalue = f"{nc+1:08x}" + method = "GET" + uri = "/secret" + qop = "auth-int" if "auth-int" in qop else "auth" + + # Create the auth object + auth = DigestAuthMiddleware(login, password) + auth._challenge = DigestAuthChallenge( + realm=realm, nonce=nonce, qop=qop, algorithm=algorithm + ) + auth._last_nonce_bytes = nonce.encode("utf-8") + auth._nonce_count = nc + + header = auth._encode(method, URL(f"http://host{uri}"), body) + + # Get expected digest + expected = compute_expected_digest( + algorithm=algorithm, + username=login, + password=password, + realm=realm, + nonce=nonce, + uri=uri, + method=method, + qop=qop, + nc=ncvalue, + cnonce=cnonce, + body=body_str, + ) + + # Check that the response digest is exactly correct + assert f'response="{expected}"' in header + + +@pytest.mark.parametrize( + ("header", "expected_result"), + [ + # Normal quoted values + ( + 'realm="example.com", nonce="12345", qop="auth"', + {"realm": "example.com", "nonce": "12345", "qop": "auth"}, + ), + # Unquoted values + ( + "realm=example.com, nonce=12345, qop=auth", + {"realm": "example.com", "nonce": "12345", "qop": "auth"}, + ), + # Mixed quoted/unquoted with commas in quoted values + ( + 'realm="ex,ample", nonce=12345, qop="auth", domain="/test"', + { + "realm": "ex,ample", + "nonce": "12345", + "qop": "auth", + "domain": "/test", + }, + ), + # Header with scheme + ( + 'Digest realm="example.com", nonce="12345", qop="auth"', + {"realm": "example.com", "nonce": "12345", "qop": "auth"}, + ), + # No spaces after commas + ( + 'realm="test",nonce="123",qop="auth"', + {"realm": "test", "nonce": "123", "qop": "auth"}, + ), + # Extra whitespace + ( + 'realm = "test" , nonce = "123"', + {"realm": "test", "nonce": "123"}, + ), + # Escaped quotes + ( + 'realm="test\\"realm", nonce="123"', + {"realm": 'test"realm', "nonce": "123"}, + ), + # Single quotes (treated as regular chars) + ( + "realm='test', nonce=123", + {"realm": "'test'", "nonce": "123"}, + ), + # Empty header + ("", {}), + ], + ids=[ + "fully_quoted_header", + "unquoted_header", + "mixed_quoted_unquoted_with_commas", + "header_with_scheme", + "no_spaces_after_commas", + "extra_whitespace", + "escaped_quotes", + "single_quotes_as_regular_chars", + "empty_header", + ], +) +def test_parse_header_pairs(header: str, expected_result: dict[str, str]) -> None: + """Test parsing HTTP header pairs with various formats.""" + result = parse_header_pairs(header) + assert result == expected_result + + +def test_digest_auth_middleware_callable(digest_auth_mw: DigestAuthMiddleware) -> None: + """Test that DigestAuthMiddleware is callable.""" + assert callable(digest_auth_mw) + + +def test_middleware_invalid_login() -> None: + """Test that invalid login values raise errors.""" + with pytest.raises(ValueError, match="None is not allowed as login value"): + DigestAuthMiddleware(None, "pass") # type: ignore[arg-type] + + with pytest.raises(ValueError, match="None is not allowed as password value"): + DigestAuthMiddleware("user", None) # type: ignore[arg-type] + + with pytest.raises(ValueError, match=r"A \":\" is not allowed in username"): + DigestAuthMiddleware("user:name", "pass") + + +def test_escaping_quotes_in_auth_header() -> None: + """Test that double quotes are properly escaped in auth header.""" + auth = DigestAuthMiddleware('user"with"quotes', "pass") + auth._challenge = DigestAuthChallenge( + realm='realm"with"quotes', + nonce='nonce"with"quotes', + qop="auth", + algorithm="MD5", + opaque='opaque"with"quotes', + ) + + header = auth._encode("GET", URL("http://example.com/path"), "") + + # Check that quotes are escaped in the header + assert 'username="user\\"with\\"quotes"' in header + assert 'realm="realm\\"with\\"quotes"' in header + assert 'nonce="nonce\\"with\\"quotes"' in header + assert 'opaque="opaque\\"with\\"quotes"' in header + + +def test_template_based_header_construction( + auth_mw_with_challenge: DigestAuthMiddleware, + mock_sha1_digest: mock.MagicMock, + mock_md5_digest: mock.MagicMock, +) -> None: + """Test that the template-based header construction works correctly.""" + header = auth_mw_with_challenge._encode("GET", URL("http://example.com/test"), "") + + # Split the header into scheme and parameters + scheme, params_str = header.split(" ", 1) + assert scheme == "Digest" + + # Parse the parameters into a dictionary + params = { + key: value[1:-1] if value.startswith('"') and value.endswith('"') else value + for key, value in (param.split("=", 1) for param in params_str.split(", ")) + } + + # Check all required fields are present + assert "username" in params + assert "realm" in params + assert "nonce" in params + assert "uri" in params + assert "response" in params + assert "algorithm" in params + assert "qop" in params + assert "nc" in params + assert "cnonce" in params + assert "opaque" in params + + # Check that fields are quoted correctly + quoted_fields = [ + "username", + "realm", + "nonce", + "uri", + "response", + "opaque", + "cnonce", + ] + unquoted_fields = ["algorithm", "qop", "nc"] + + # Re-check the original header for proper quoting + for field in quoted_fields: + assert f'{field}="{params[field]}"' in header + + for field in unquoted_fields: + assert f"{field}={params[field]}" in header + + # Check specific values + assert params["username"] == "user" + assert params["realm"] == "test" + assert params["algorithm"] == "MD5" + assert params["nc"] == "00000001" # nonce_count = 1 (incremented from 0) + assert params["uri"] == "/test" # path component of URL + + +@pytest.mark.parametrize( + ("test_string", "expected_escaped", "description"), + [ + ('value"with"quotes', 'value\\"with\\"quotes', "Basic string with quotes"), + ("", "", "Empty string"), + ("no quotes", "no quotes", "String without quotes"), + ('with"one"quote', 'with\\"one\\"quote', "String with one quoted segment"), + ( + 'many"quotes"in"string', + 'many\\"quotes\\"in\\"string', + "String with multiple quoted segments", + ), + ('""', '\\"\\"', "Just double quotes"), + ('"', '\\"', "Single double quote"), + ('already\\"escaped', 'already\\\\"escaped', "Already escaped quotes"), + ], +) +def test_quote_escaping_functions( + test_string: str, expected_escaped: str, description: str +) -> None: + """Test that escape_quotes and unescape_quotes work correctly.""" + # Test escaping + escaped = escape_quotes(test_string) + assert escaped == expected_escaped + + # Test unescaping (should return to original) + unescaped = unescape_quotes(escaped) + assert unescaped == test_string + + # Test that they're inverse operations + assert unescape_quotes(escape_quotes(test_string)) == test_string + + +async def test_middleware_retry_on_401( + aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware +) -> None: + """Test that the middleware retries on 401 errors.""" + request_count = 0 + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + if request_count == 1: + # First request returns 401 with digest challenge + challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + # Second request should have Authorization header + auth_header = request.headers.get(hdrs.AUTHORIZATION) + if auth_header and auth_header.startswith("Digest "): + # Return success response + return Response(text="OK") + + # This branch should not be reached in the tests + assert False, "This branch should not be reached" + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text_content = await resp.text() + assert text_content == "OK" + + assert request_count == 2 # Initial request + retry with auth + + +async def test_digest_auth_no_qop( + aiohttp_server: AiohttpServer, + digest_auth_mw: DigestAuthMiddleware, + no_qop_challenge: DigestAuthChallenge, + mock_sha1_digest: mock.MagicMock, +) -> None: + """Test digest auth with a server that doesn't provide a QoP parameter.""" + request_count = 0 + realm = no_qop_challenge["realm"] + nonce = no_qop_challenge["nonce"] + algorithm = no_qop_challenge["algorithm"] + username = "user" + password = "pass" + uri = "/" + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + if request_count == 1: + # First request returns 401 with digest challenge without qop + challenge = ( + f'Digest realm="{realm}", nonce="{nonce}", algorithm={algorithm}' + ) + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + # Second request should have Authorization header + auth_header = request.headers.get(hdrs.AUTHORIZATION) + assert auth_header and auth_header.startswith("Digest ") + + # Successful auth should have no qop param + assert "qop=" not in auth_header + assert "nc=" not in auth_header + assert "cnonce=" not in auth_header + + expected_digest = compute_expected_digest( + algorithm=algorithm, + username=username, + password=password, + realm=realm, + nonce=nonce, + uri=uri, + method="GET", + qop="", # This is the key part - explicitly setting qop="" + nc="", # Not needed for non-qop digest + cnonce="", # Not needed for non-qop digest + ) + # We mock the cnonce, so we can check the expected digest + assert expected_digest in auth_header + + return Response(text="OK") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text_content = await resp.text() + assert text_content == "OK" + + assert request_count == 2 # Initial request + retry with auth + + +async def test_digest_auth_without_opaque( + aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware +) -> None: + """Test digest auth with a server that doesn't provide an opaque parameter.""" + request_count = 0 + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + if request_count == 1: + # First request returns 401 with digest challenge without opaque + challenge = ( + 'Digest realm="test-realm", nonce="testnonce", ' + 'qop="auth", algorithm=MD5' + ) + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + # Second request should have Authorization header + auth_header = request.headers.get(hdrs.AUTHORIZATION) + assert auth_header and auth_header.startswith("Digest ") + # Successful auth should have no opaque param + assert "opaque=" not in auth_header + + return Response(text="OK") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text_content = await resp.text() + assert text_content == "OK" + + assert request_count == 2 # Initial request + retry with auth + + +@pytest.mark.parametrize( + "www_authenticate", + [ + None, + "DigestWithoutSpace", + 'Basic realm="test"', + "Digest ", + "Digest =invalid, format", + ], +) +async def test_auth_header_no_retry( + aiohttp_server: AiohttpServer, + www_authenticate: str, + digest_auth_mw: DigestAuthMiddleware, +) -> None: + """Test that middleware doesn't retry with invalid WWW-Authenticate headers.""" + request_count = 0 + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + # First (and only) request returns 401 + headers = {} + if www_authenticate is not None: + headers["WWW-Authenticate"] = www_authenticate + + # Use a custom HTTPUnauthorized instead of the helper since + # we're specifically testing malformed headers + return Response(status=401, headers=headers, text="Unauthorized") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + async with session.get(server.make_url("/")) as resp: + assert resp.status == 401 + + # No retry should happen + assert request_count == 1 + + +async def test_direct_success_no_auth_needed( + aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware +) -> None: + """Test middleware with a direct 200 response with no auth challenge.""" + request_count = 0 + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + # Return success without auth challenge + return Response(text="OK") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + async with session.get(server.make_url("/")) as resp: + text = await resp.text() + assert resp.status == 200 + assert text == "OK" + + # Verify only one request was made + assert request_count == 1 + + +async def test_no_retry_on_second_401( + aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware +) -> None: + """Test digest auth does not retry on second 401.""" + request_count = 0 + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + # Always return 401 challenge + challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Create a session that uses the digest auth middleware + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + async with session.get(server.make_url("/")) as resp: + await resp.text() + assert resp.status == 401 + + # Verify we made exactly 2 requests (initial + 1 retry) + assert request_count == 2 + + +@pytest.mark.parametrize( + ("status", "headers", "expected"), + [ + (200, {}, False), + (401, {"www-authenticate": ""}, False), + (401, {"www-authenticate": "DigestWithoutSpace"}, False), + (401, {"www-authenticate": "Basic realm=test"}, False), + (401, {"www-authenticate": "Digest "}, False), + (401, {"www-authenticate": "Digest =invalid, format"}, False), + ], + ids=[ + "different_status_code", + "empty_www_authenticate_header", + "no_space_after_scheme", + "different_scheme", + "empty_parameters", + "malformed_parameters", + ], +) +def test_authenticate_with_malformed_headers( + digest_auth_mw: DigestAuthMiddleware, + status: int, + headers: dict[str, str], + expected: bool, +) -> None: + """Test _authenticate method with various edge cases.""" + response = mock.MagicMock(spec=ClientResponse) + response.status = status + response.headers = headers + + result = digest_auth_mw._authenticate(response) + assert result == expected From d4eaf550c67515033db3d8fc726dc355662690e0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 20 May 2025 11:54:11 -0400 Subject: [PATCH 1405/1511] [PR #10891/802152a backport][3.12] Fix flakey signal handling tests (#10896) --- tests/test_web_runner.py | 38 ++++++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index b71c34fe912..22ce3d00650 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -41,22 +41,40 @@ async def test_site_for_nonfrozen_app(make_runner: Any) -> None: platform.system() == "Windows", reason="the test is not valid for Windows" ) async def test_runner_setup_handle_signals(make_runner: Any) -> None: - runner = make_runner(handle_signals=True) - await runner.setup() - assert signal.getsignal(signal.SIGTERM) is not signal.SIG_DFL - await runner.cleanup() - assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL + # Save the original signal handler + original_handler = signal.getsignal(signal.SIGTERM) + try: + # Set a known state for the signal handler to avoid flaky tests + signal.signal(signal.SIGTERM, signal.SIG_DFL) + + runner = make_runner(handle_signals=True) + await runner.setup() + assert signal.getsignal(signal.SIGTERM) is not signal.SIG_DFL + await runner.cleanup() + assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL + finally: + # Restore original signal handler + signal.signal(signal.SIGTERM, original_handler) @pytest.mark.skipif( platform.system() == "Windows", reason="the test is not valid for Windows" ) async def test_runner_setup_without_signal_handling(make_runner: Any) -> None: - runner = make_runner(handle_signals=False) - await runner.setup() - assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL - await runner.cleanup() - assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL + # Save the original signal handler + original_handler = signal.getsignal(signal.SIGTERM) + try: + # Set a known state for the signal handler to avoid flaky tests + signal.signal(signal.SIGTERM, signal.SIG_DFL) + + runner = make_runner(handle_signals=False) + await runner.setup() + assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL + await runner.cleanup() + assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL + finally: + # Restore original signal handler + signal.signal(signal.SIGTERM, original_handler) async def test_site_double_added(make_runner: Any) -> None: From c8c3d5f2fd7d91b4af05b2ff848d0b3bd73e0cb2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 20 May 2025 15:06:25 -0400 Subject: [PATCH 1406/1511] [PR #10898/a4be2cb backport][3.12] Cleanup tests to ensure connector cleanup and resource management (#10900) --- tests/test_connector.py | 106 ++++++++++++++++++--------------- tests/test_proxy_functional.py | 18 +++--- 2 files changed, 69 insertions(+), 55 deletions(-) diff --git a/tests/test_connector.py b/tests/test_connector.py index db0514e5f0d..fd2cdac7a94 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -307,80 +307,90 @@ async def test_close(loop) -> None: async def test_get(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: conn = aiohttp.BaseConnector() - assert await conn._get(key, []) is None + try: + assert await conn._get(key, []) is None - proto = create_mocked_conn(loop) - conn._conns[key] = deque([(proto, loop.time())]) - connection = await conn._get(key, []) - assert connection is not None - assert connection.protocol == proto - connection.close() - await conn.close() + proto = create_mocked_conn(loop) + conn._conns[key] = deque([(proto, loop.time())]) + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() + finally: + await conn.close() async def test_get_unconnected_proto(loop) -> None: conn = aiohttp.BaseConnector() key = ConnectionKey("localhost", 80, False, False, None, None, None) - assert await conn._get(key, []) is None - - proto = create_mocked_conn(loop) - conn._conns[key] = deque([(proto, loop.time())]) - connection = await conn._get(key, []) - assert connection is not None - assert connection.protocol == proto - connection.close() + try: + assert await conn._get(key, []) is None - assert await conn._get(key, []) is None - conn._conns[key] = deque([(proto, loop.time())]) - proto.is_connected = lambda *args: False - assert await conn._get(key, []) is None - await conn.close() + proto = create_mocked_conn(loop) + conn._conns[key] = deque([(proto, loop.time())]) + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() + + assert await conn._get(key, []) is None + conn._conns[key] = deque([(proto, loop.time())]) + proto.is_connected = lambda *args: False + assert await conn._get(key, []) is None + finally: + await conn.close() async def test_get_unconnected_proto_ssl(loop) -> None: conn = aiohttp.BaseConnector() key = ConnectionKey("localhost", 80, True, False, None, None, None) - assert await conn._get(key, []) is None - - proto = create_mocked_conn(loop) - conn._conns[key] = deque([(proto, loop.time())]) - connection = await conn._get(key, []) - assert connection is not None - assert connection.protocol == proto - connection.close() + try: + assert await conn._get(key, []) is None - assert await conn._get(key, []) is None - conn._conns[key] = deque([(proto, loop.time())]) - proto.is_connected = lambda *args: False - assert await conn._get(key, []) is None - await conn.close() + proto = create_mocked_conn(loop) + conn._conns[key] = deque([(proto, loop.time())]) + connection = await conn._get(key, []) + assert connection is not None + assert connection.protocol == proto + connection.close() + + assert await conn._get(key, []) is None + conn._conns[key] = deque([(proto, loop.time())]) + proto.is_connected = lambda *args: False + assert await conn._get(key, []) is None + finally: + await conn.close() async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.BaseConnector() key = ConnectionKey("localhost", 80, False, False, None, None, None) - assert await conn._get(key, []) is None + try: + assert await conn._get(key, []) is None - proto = mock.Mock() - conn._conns[key] = deque([(proto, loop.time() - 1000)]) - assert await conn._get(key, []) is None - assert not conn._conns - await conn.close() + proto = create_mocked_conn(loop) + conn._conns[key] = deque([(proto, loop.time() - 1000)]) + assert await conn._get(key, []) is None + assert not conn._conns + finally: + await conn.close() @pytest.mark.usefixtures("enable_cleanup_closed") async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.BaseConnector(enable_cleanup_closed=True) key = ConnectionKey("localhost", 80, True, False, None, None, None) - assert await conn._get(key, []) is None + try: + assert await conn._get(key, []) is None - proto = mock.Mock() - transport = proto.transport - conn._conns[key] = deque([(proto, loop.time() - 1000)]) - assert await conn._get(key, []) is None - assert not conn._conns - assert conn._cleanup_closed_transports == [transport] - await conn.close() + proto = create_mocked_conn(loop) + transport = proto.transport + conn._conns[key] = deque([(proto, loop.time() - 1000)]) + assert await conn._get(key, []) is None + assert not conn._conns + assert conn._cleanup_closed_transports == [transport] + finally: + await conn.close() async def test_release_acquired(loop, key) -> None: diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index c6c6ac67c1b..f86975b7423 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -220,14 +220,18 @@ async def test_uvloop_secure_https_proxy( """Ensure HTTPS sites are accessible through a secure proxy without warning when using uvloop.""" conn = aiohttp.TCPConnector() sess = aiohttp.ClientSession(connector=conn) - url = URL("https://example.com") - - async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx) as response: - assert response.status == 200 + try: + url = URL("https://example.com") - await sess.close() - await conn.close() - await asyncio.sleep(0.1) + async with sess.get( + url, proxy=secure_proxy_url, ssl=client_ssl_ctx + ) as response: + assert response.status == 200 + finally: + await sess.close() + await conn.close() + await asyncio.sleep(0) + await asyncio.sleep(0.1) @pytest.fixture From 64fc60030872b8b64fd676de0ed786512a277497 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 20 May 2025 16:30:11 -0400 Subject: [PATCH 1407/1511] [PR #10897/4624fed backport][3.12] Fix DNS resolver object churn for multiple sessions (#10906) --- CHANGES/10847.feature.rst | 5 + aiohttp/resolver.py | 84 +++++++++++- tests/test_resolver.py | 268 ++++++++++++++++++++++++++++++++++---- 3 files changed, 334 insertions(+), 23 deletions(-) create mode 100644 CHANGES/10847.feature.rst diff --git a/CHANGES/10847.feature.rst b/CHANGES/10847.feature.rst new file mode 100644 index 00000000000..bfa7f6d498a --- /dev/null +++ b/CHANGES/10847.feature.rst @@ -0,0 +1,5 @@ +Implemented shared DNS resolver management to fix excessive resolver object creation +when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures +only one ``DNSResolver`` object is created for default configurations, significantly +reducing resource usage and improving performance for applications using multiple +client sessions simultaneously -- by :user:`bdraco`. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index a5af5fddda6..8e73beb6e1e 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,5 +1,6 @@ import asyncio import socket +import weakref from typing import Any, Dict, Final, List, Optional, Tuple, Type, Union from .abc import AbstractResolver, ResolveResult @@ -93,7 +94,17 @@ def __init__( if aiodns is None: raise RuntimeError("Resolver requires aiodns library") - self._resolver = aiodns.DNSResolver(*args, **kwargs) + self._loop = asyncio.get_running_loop() + self._manager: Optional[_DNSResolverManager] = None + # If custom args are provided, create a dedicated resolver instance + # This means each AsyncResolver with custom args gets its own + # aiodns.DNSResolver instance + if args or kwargs: + self._resolver = aiodns.DNSResolver(*args, **kwargs) + return + # Use the shared resolver from the manager for default arguments + self._manager = _DNSResolverManager() + self._resolver = self._manager.get_resolver(self, self._loop) if not hasattr(self._resolver, "gethostbyname"): # aiodns 1.1 is not available, fallback to DNSResolver.query @@ -180,7 +191,78 @@ async def _resolve_with_query( return hosts async def close(self) -> None: + if self._manager: + # Release the resolver from the manager if using the shared resolver + self._manager.release_resolver(self, self._loop) + self._manager = None # Clear reference to manager + self._resolver = None # type: ignore[assignment] # Clear reference to resolver + return + # Otherwise cancel our dedicated resolver self._resolver.cancel() + self._resolver = None # type: ignore[assignment] # Clear reference + + +class _DNSResolverManager: + """Manager for aiodns.DNSResolver objects. + + This class manages shared aiodns.DNSResolver instances + with no custom arguments across different event loops. + """ + + _instance: Optional["_DNSResolverManager"] = None + + def __new__(cls) -> "_DNSResolverManager": + if cls._instance is None: + cls._instance = super().__new__(cls) + cls._instance._init() + return cls._instance + + def _init(self) -> None: + # Use WeakKeyDictionary to allow event loops to be garbage collected + self._loop_data: weakref.WeakKeyDictionary[ + asyncio.AbstractEventLoop, + tuple["aiodns.DNSResolver", weakref.WeakSet["AsyncResolver"]], + ] = weakref.WeakKeyDictionary() + + def get_resolver( + self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop + ) -> "aiodns.DNSResolver": + """Get or create the shared aiodns.DNSResolver instance for a specific event loop. + + Args: + client: The AsyncResolver instance requesting the resolver. + This is required to track resolver usage. + loop: The event loop to use for the resolver. + """ + # Create a new resolver and client set for this loop if it doesn't exist + if loop not in self._loop_data: + resolver = aiodns.DNSResolver(loop=loop) + client_set: weakref.WeakSet["AsyncResolver"] = weakref.WeakSet() + self._loop_data[loop] = (resolver, client_set) + else: + # Get the existing resolver and client set + resolver, client_set = self._loop_data[loop] + + # Register this client with the loop + client_set.add(client) + return resolver + + def release_resolver( + self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop + ) -> None: + """Release the resolver for an AsyncResolver client when it's closed. + + Args: + client: The AsyncResolver instance to release. + loop: The event loop the resolver was using. + """ + # Remove client from its loop's tracking + resolver, client_set = self._loop_data[loop] + client_set.discard(client) + # If no more clients for this loop, cancel and remove its resolver + if not client_set: + resolver.cancel() + del self._loop_data[loop] _DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] diff --git a/tests/test_resolver.py b/tests/test_resolver.py index b4606067079..9a6a782c06a 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -1,6 +1,8 @@ import asyncio +import gc import ipaddress import socket +from collections.abc import Generator from ipaddress import ip_address from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union from unittest.mock import Mock, create_autospec, patch @@ -12,6 +14,7 @@ AsyncResolver, DefaultResolver, ThreadedResolver, + _DNSResolverManager, ) try: @@ -23,6 +26,48 @@ getaddrinfo = False +@pytest.fixture() +def check_no_lingering_resolvers() -> Generator[None, None, None]: + """Verify no resolvers remain after the test. + + This fixture should be used in any test that creates instances of + AsyncResolver or directly uses _DNSResolverManager. + """ + manager = _DNSResolverManager() + before = len(manager._loop_data) + yield + after = len(manager._loop_data) + if after > before: # pragma: no branch + # Force garbage collection to ensure weak references are updated + gc.collect() # pragma: no cover + after = len(manager._loop_data) # pragma: no cover + if after > before: # pragma: no cover + pytest.fail( # pragma: no cover + f"Lingering resolvers found: {(after - before)} " + "new AsyncResolver instances were not properly closed." + ) + + +@pytest.fixture() +def dns_resolver_manager() -> Generator[_DNSResolverManager, None, None]: + """Create a fresh _DNSResolverManager instance for testing. + + Saves and restores the singleton state to avoid affecting other tests. + """ + # Save the original instance + original_instance = _DNSResolverManager._instance + + # Reset the singleton + _DNSResolverManager._instance = None + + # Create and yield a fresh instance + try: + yield _DNSResolverManager() + finally: + # Clean up and restore the original instance + _DNSResolverManager._instance = original_instance + + class FakeAIODNSAddrInfoNode(NamedTuple): family: int @@ -117,7 +162,10 @@ async def fake(*args: Any, **kwargs: Any) -> Tuple[str, int]: @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_positive_ipv4_lookup( + loop: asyncio.AbstractEventLoop, +) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result( ["127.0.0.1"] @@ -132,10 +180,14 @@ async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: port=0, type=socket.SOCK_STREAM, ) + await resolver.close() @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_positive_link_local_ipv6_lookup( + loop: asyncio.AbstractEventLoop, +) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result( ["fe80::1"] @@ -154,46 +206,44 @@ async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None type=socket.SOCK_STREAM, ) mock().getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS) + await resolver.close() @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -async def test_async_resolver_multiple_replies(loop: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_multiple_replies(loop: asyncio.AbstractEventLoop) -> None: with patch("aiodns.DNSResolver") as mock: ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result(ips) resolver = AsyncResolver() real = await resolver.resolve("www.google.com") - ips = [ipaddress.ip_address(x["host"]) for x in real] - assert len(ips) > 3, "Expecting multiple addresses" - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_multiple_replies(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] - mock().query.return_value = fake_query_result(ips) - resolver = AsyncResolver(loop=loop) - real = await resolver.resolve("www.google.com") - ips = [ipaddress.ip_address(x["host"]) for x in real] + ipaddrs = [ipaddress.ip_address(x["host"]) for x in real] + assert len(ipaddrs) > 3, "Expecting multiple addresses" + await resolver.close() @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -async def test_async_resolver_negative_lookup(loop: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_negative_lookup(loop: asyncio.AbstractEventLoop) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.side_effect = aiodns.error.DNSError() resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") + await resolver.close() @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -async def test_async_resolver_no_hosts_in_getaddrinfo(loop: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_no_hosts_in_getaddrinfo( + loop: asyncio.AbstractEventLoop, +) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result([]) resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") + await resolver.close() async def test_threaded_resolver_positive_lookup() -> None: @@ -294,8 +344,9 @@ async def test_close_for_threaded_resolver(loop) -> None: @pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_close_for_async_resolver(loop) -> None: - resolver = AsyncResolver(loop=loop) +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_close_for_async_resolver(loop: asyncio.AbstractEventLoop) -> None: + resolver = AsyncResolver() await resolver.close() @@ -306,7 +357,10 @@ async def test_default_loop_for_threaded_resolver(loop) -> None: @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -async def test_async_resolver_ipv6_positive_lookup(loop: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_ipv6_positive_lookup( + loop: asyncio.AbstractEventLoop, +) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result(["::1"]) resolver = AsyncResolver() @@ -319,6 +373,7 @@ async def test_async_resolver_ipv6_positive_lookup(loop: Any) -> None: port=0, type=socket.SOCK_STREAM, ) + await resolver.close() @pytest.mark.skipif(aiodns is None, reason="aiodns required") @@ -363,6 +418,7 @@ async def test_async_resolver_query_fallback_error_messages_passed_no_hosts( @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.usefixtures("check_no_lingering_resolvers") async def test_async_resolver_error_messages_passed( loop: asyncio.AbstractEventLoop, ) -> None: @@ -374,9 +430,11 @@ async def test_async_resolver_error_messages_passed( await resolver.resolve("x.org") assert excinfo.value.strerror == "Test error message" + await resolver.close() @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.usefixtures("check_no_lingering_resolvers") async def test_async_resolver_error_messages_passed_no_hosts( loop: asyncio.AbstractEventLoop, ) -> None: @@ -388,15 +446,20 @@ async def test_async_resolver_error_messages_passed_no_hosts( await resolver.resolve("x.org") assert excinfo.value.strerror == "DNS lookup failed" + await resolver.close() -async def test_async_resolver_aiodns_not_present(loop: Any, monkeypatch: Any) -> None: +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_aiodns_not_present( + loop: asyncio.AbstractEventLoop, monkeypatch: pytest.MonkeyPatch +) -> None: monkeypatch.setattr("aiohttp.resolver.aiodns", None) with pytest.raises(RuntimeError): AsyncResolver(loop=loop) @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.usefixtures("check_no_lingering_resolvers") def test_aio_dns_is_default() -> None: assert DefaultResolver is AsyncResolver @@ -404,3 +467,164 @@ def test_aio_dns_is_default() -> None: @pytest.mark.skipif(getaddrinfo, reason="aiodns <3.2.0 required") def test_threaded_resolver_is_default() -> None: assert DefaultResolver is ThreadedResolver + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_sharing( + dns_resolver_manager: _DNSResolverManager, +) -> None: + """Test that the DNSResolverManager shares a resolver among AsyncResolver instances.""" + # Create two default AsyncResolver instances + resolver1 = AsyncResolver() + resolver2 = AsyncResolver() + + # Check that they share the same underlying resolver + assert resolver1._resolver is resolver2._resolver + + # Create an AsyncResolver with custom args + resolver3 = AsyncResolver(nameservers=["8.8.8.8"]) + + # Check that it has its own resolver + assert resolver1._resolver is not resolver3._resolver + + # Cleanup + await resolver1.close() + await resolver2.close() + await resolver3.close() + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_singleton( + dns_resolver_manager: _DNSResolverManager, +) -> None: + """Test that DNSResolverManager is a singleton.""" + # Create a second manager and check it's the same instance + manager1 = dns_resolver_manager + manager2 = _DNSResolverManager() + + assert manager1 is manager2 + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_resolver_lifecycle( + dns_resolver_manager: _DNSResolverManager, +) -> None: + """Test that DNSResolverManager creates and destroys resolver correctly.""" + manager = dns_resolver_manager + + # Initially there should be no resolvers + assert not manager._loop_data + + # Create a mock AsyncResolver for testing + mock_client = Mock(spec=AsyncResolver) + mock_client._loop = asyncio.get_running_loop() + + # Getting resolver should create one + mock_loop = mock_client._loop + resolver = manager.get_resolver(mock_client, mock_loop) + assert resolver is not None + assert manager._loop_data[mock_loop][0] is resolver + + # Getting it again should return the same instance + assert manager.get_resolver(mock_client, mock_loop) is resolver + + # Clean up + manager.release_resolver(mock_client, mock_loop) + assert not manager._loop_data + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_client_registration( + dns_resolver_manager: _DNSResolverManager, +) -> None: + """Test client registration and resolver release logic.""" + with patch("aiodns.DNSResolver") as mock: + # Create resolver instances + resolver1 = AsyncResolver() + resolver2 = AsyncResolver() + + # Both should use the same resolver from the manager + assert resolver1._resolver is resolver2._resolver + + # The manager should be tracking both clients + assert resolver1._manager is resolver2._manager + manager = resolver1._manager + assert manager is not None + loop = asyncio.get_running_loop() + _, client_set = manager._loop_data[loop] + assert len(client_set) == 2 + + # Close one resolver + await resolver1.close() + _, client_set = manager._loop_data[loop] + assert len(client_set) == 1 + + # Resolver should still exist + assert manager._loop_data # Not empty + + # Close the second resolver + await resolver2.close() + assert not manager._loop_data # Should be empty after closing all clients + + # Now all resolvers should be canceled and removed + assert not manager._loop_data # Should be empty + mock().cancel.assert_called_once() + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_multiple_event_loops( + dns_resolver_manager: _DNSResolverManager, +) -> None: + """Test that DNSResolverManager correctly manages resolvers across different event loops.""" + # Create separate resolvers for each loop + resolver1 = Mock(name="resolver1") + resolver2 = Mock(name="resolver2") + + # Create a patch that returns different resolvers based on the loop argument + mock_resolver = Mock() + mock_resolver.side_effect = lambda loop=None, **kwargs: ( + resolver1 if loop is asyncio.get_running_loop() else resolver2 + ) + + with patch("aiodns.DNSResolver", mock_resolver): + manager = dns_resolver_manager + + # Create two mock clients on different loops + mock_client1 = Mock(spec=AsyncResolver) + mock_client1._loop = asyncio.get_running_loop() + + # Create a second event loop + loop2 = Mock(spec=asyncio.AbstractEventLoop) + mock_client2 = Mock(spec=AsyncResolver) + mock_client2._loop = loop2 + + # Get resolvers for both clients + loop1 = mock_client1._loop + loop2 = mock_client2._loop + + # Get the resolvers through the manager + manager_resolver1 = manager.get_resolver(mock_client1, loop1) + manager_resolver2 = manager.get_resolver(mock_client2, loop2) + + # Should be different resolvers for different loops + assert manager_resolver1 is resolver1 + assert manager_resolver2 is resolver2 + assert manager._loop_data[loop1][0] is resolver1 + assert manager._loop_data[loop2][0] is resolver2 + + # Release the first resolver + manager.release_resolver(mock_client1, loop1) + + # First loop's resolver should be gone, but second should remain + assert loop1 not in manager._loop_data + assert loop2 in manager._loop_data + + # Release the second resolver + manager.release_resolver(mock_client2, loop2) + + # Both resolvers should be gone + assert not manager._loop_data + + # Verify resolver cleanup + resolver1.cancel.assert_called_once() + resolver2.cancel.assert_called_once() From f543fea73a8f3ea47d9e9de808eda63bed4d9e63 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 20:38:08 +0000 Subject: [PATCH 1408/1511] [PR #10902/94de3f9d backport][3.12] Middleware cleanups (#10904) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10902.feature.rst | 1 + aiohttp/client.py | 8 +- aiohttp/client_middlewares.py | 7 +- docs/client_advanced.rst | 288 ++++++++++++++++---------------- docs/client_reference.rst | 23 ++- tests/test_client_middleware.py | 126 +++++++++----- 6 files changed, 255 insertions(+), 198 deletions(-) create mode 120000 CHANGES/10902.feature.rst diff --git a/CHANGES/10902.feature.rst b/CHANGES/10902.feature.rst new file mode 120000 index 00000000000..b565aa68ee0 --- /dev/null +++ b/CHANGES/10902.feature.rst @@ -0,0 +1 @@ +9732.feature.rst \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index 2b7afe1344c..bea1c6f61e7 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -24,6 +24,7 @@ List, Mapping, Optional, + Sequence, Set, Tuple, Type, @@ -192,7 +193,7 @@ class _RequestOptions(TypedDict, total=False): auto_decompress: Union[bool, None] max_line_size: Union[int, None] max_field_size: Union[int, None] - middlewares: Optional[Tuple[ClientMiddlewareType, ...]] + middlewares: Optional[Sequence[ClientMiddlewareType]] @attr.s(auto_attribs=True, frozen=True, slots=True) @@ -301,7 +302,7 @@ def __init__( max_line_size: int = 8190, max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", - middlewares: Optional[Tuple[ClientMiddlewareType, ...]] = None, + middlewares: Optional[Sequence[ClientMiddlewareType]] = None, ) -> None: # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. @@ -505,7 +506,7 @@ async def _request( auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, max_field_size: Optional[int] = None, - middlewares: Optional[Tuple[ClientMiddlewareType, ...]] = None, + middlewares: Optional[Sequence[ClientMiddlewareType]] = None, ) -> ClientResponse: # NOTE: timeout clamps existing connect and read timeouts. We cannot @@ -705,7 +706,6 @@ async def _request( trust_env=self.trust_env, ) - # Core request handler - now includes connection logic async def _connect_and_send_request( req: ClientRequest, ) -> ClientResponse: diff --git a/aiohttp/client_middlewares.py b/aiohttp/client_middlewares.py index 6be353c3a40..3ca2cb202ad 100644 --- a/aiohttp/client_middlewares.py +++ b/aiohttp/client_middlewares.py @@ -1,6 +1,6 @@ """Client middleware support.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Sequence from .client_reqrep import ClientRequest, ClientResponse @@ -17,7 +17,7 @@ def build_client_middlewares( handler: ClientHandlerType, - middlewares: tuple[ClientMiddlewareType, ...], + middlewares: Sequence[ClientMiddlewareType], ) -> ClientHandlerType: """ Apply middlewares to request handler. @@ -28,9 +28,6 @@ def build_client_middlewares( This implementation avoids using partial/update_wrapper to minimize overhead and doesn't cache to avoid holding references to stateful middleware. """ - if not middlewares: - return handler - # Optimize for single middleware case if len(middlewares) == 1: middleware = middlewares[0] diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 9affef7efe2..d598a40c6ab 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -123,32 +123,18 @@ background. Client Middleware ----------------- -aiohttp client supports middleware to intercept requests and responses. This can be +The client supports middleware to intercept requests and responses. This can be useful for authentication, logging, request/response modification, and retries. -To create a middleware, you need to define an async function that accepts the request -and a handler function, and returns the response. The middleware must match the -:type:`ClientMiddlewareType` type signature:: - - import logging - from aiohttp import ClientSession, ClientRequest, ClientResponse, ClientHandlerType - - _LOGGER = logging.getLogger(__name__) - - async def my_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - # Process request before sending - _LOGGER.debug(f"Request: {request.method} {request.url}") - - # Call the next handler - response = await handler(request) +Creating Middleware +^^^^^^^^^^^^^^^^^^^ - # Process response after receiving - _LOGGER.debug(f"Response: {response.status}") +To create a middleware, define an async function (or callable class) that accepts a request +and a handler function, and returns a response. Middleware must follow the +:type:`ClientMiddlewareType` signature (see :ref:`aiohttp-client-reference` for details). - return response +Using Middleware +^^^^^^^^^^^^^^^^ You can apply middleware to a client session or to individual requests:: @@ -160,175 +146,189 @@ You can apply middleware to a client session or to individual requests:: async with ClientSession() as session: resp = await session.get('http://example.com', middlewares=(my_middleware,)) -Middleware Examples +Middleware Chaining ^^^^^^^^^^^^^^^^^^^ -Here's a simple example showing request modification:: +Multiple middlewares are applied in the order they are listed:: - async def add_api_key_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - # Add API key to all requests - request.headers['X-API-Key'] = 'my-secret-key' - return await handler(request) + # Middlewares are applied in order: logging -> auth -> request + async with ClientSession(middlewares=(logging_middleware, auth_middleware)) as session: + resp = await session.get('http://example.com') + +A key aspect to understand about the flat middleware structure is that the execution flow follows this pattern: + +1. The first middleware in the list is called first and executes its code before calling the handler +2. The handler is the next middleware in the chain (or the actual request handler if there are no more middleware) +3. When the handler returns a response, execution continues in the first middleware after the handler call +4. This creates a nested "onion-like" pattern for execution + +For example, with ``middlewares=(middleware1, middleware2)``, the execution order would be: + +1. Enter ``middleware1`` (pre-request code) +2. Enter ``middleware2`` (pre-request code) +3. Execute the actual request handler +4. Exit ``middleware2`` (post-response code) +5. Exit ``middleware1`` (post-response code) + +This flat structure means that middleware is applied on each retry attempt inside the client's retry loop, not just once before all retries. This allows middleware to modify requests freshly on each retry attempt. + +.. note:: + + Client middleware is a powerful feature but should be used judiciously. + Each middleware adds overhead to request processing. For simple use cases + like adding static headers, you can often use request parameters + (e.g., ``headers``) or session configuration instead. + +Common Middleware Patterns +^^^^^^^^^^^^^^^^^^^^^^^^^^ .. _client-middleware-retry: -Middleware Retry Pattern -^^^^^^^^^^^^^^^^^^^^^^^^ +Authentication and Retry +"""""""""""""""""""""""" -Client middleware can implement retry logic internally using a ``while`` loop. This allows the middleware to: +There are two recommended approaches for implementing retry logic: -- Retry requests based on response status codes or other conditions -- Modify the request between retries (e.g., refreshing tokens) -- Maintain state across retry attempts -- Control when to stop retrying and return the response +1. **For Loop Pattern (Simple Cases)** -This pattern is particularly useful for: + Use a bounded ``for`` loop when the number of retry attempts is known and fixed:: -- Refreshing authentication tokens after a 401 response -- Switching to fallback servers or authentication methods -- Adding or modifying headers based on error responses -- Implementing back-off strategies with increasing delays + import hashlib + from aiohttp import ClientSession, ClientRequest, ClientResponse, ClientHandlerType -The middleware can maintain state between retries to track which strategies have been tried and modify the request accordingly for the next attempt. + async def auth_retry_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + # Try up to 3 authentication methods + for attempt in range(3): + if attempt == 0: + # First attempt: use API key + request.headers["X-API-Key"] = "my-api-key" + elif attempt == 1: + # Second attempt: use Bearer token + request.headers["Authorization"] = "Bearer fallback-token" + else: + # Third attempt: use hash-based signature + secret_key = "my-secret-key" + url_path = str(request.url.path) + signature = hashlib.sha256(f"{url_path}{secret_key}".encode()).hexdigest() + request.headers["X-Signature"] = signature -Example: Retrying requests with middleware -"""""""""""""""""""""""""""""""""""""""""" + # Send the request + response = await handler(request) -:: + # If successful or not an auth error, return immediately + if response.status != 401: + return response - import logging - import aiohttp + # Return the last response if all retries are exhausted + return response - _LOGGER = logging.getLogger(__name__) +2. **While Loop Pattern (Complex Cases)** - class RetryMiddleware: - def __init__(self, max_retries: int = 3): - self.max_retries = max_retries + For more complex scenarios, use a ``while`` loop with strict exit conditions:: - async def __call__( - self, - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - retry_count = 0 - use_fallback_auth = False + import logging - while True: - # Modify request based on retry state - if use_fallback_auth: - request.headers['Authorization'] = 'Bearer fallback-token' + _LOGGER = logging.getLogger(__name__) - response = await handler(request) + class RetryMiddleware: + def __init__(self, max_retries: int = 3): + self.max_retries = max_retries - # Retry on 401 errors with different authentication - if response.status == 401 and retry_count < self.max_retries: - retry_count += 1 - use_fallback_auth = True - _LOGGER.debug(f"Retrying with fallback auth (attempt {retry_count})") - continue + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + retry_count = 0 - # Retry on 5xx errors - if response.status >= 500 and retry_count < self.max_retries: - retry_count += 1 - _LOGGER.debug(f"Retrying request (attempt {retry_count})") - continue + # Always have clear exit conditions + while retry_count <= self.max_retries: + # Send the request + response = await handler(request) - return response + # Exit conditions + if 200 <= response.status < 400 or retry_count >= self.max_retries: + return response -Middleware Chaining -^^^^^^^^^^^^^^^^^^^ + # Retry logic for different status codes + if response.status in (401, 429, 500, 502, 503, 504): + retry_count += 1 + _LOGGER.debug(f"Retrying request (attempt {retry_count}/{self.max_retries})") + continue -Multiple middlewares are applied in the order they are listed:: + # For any other status code, don't retry + return response - import logging + # Safety return (should never reach here) + return response - _LOGGER = logging.getLogger(__name__) +Request Modification +"""""""""""""""""""" - async def logging_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - _LOGGER.debug(f"[LOG] {request.method} {request.url}") - return await handler(request) +Modify request properties based on request content:: - async def auth_middleware( + async def content_type_middleware( request: ClientRequest, handler: ClientHandlerType ) -> ClientResponse: - request.headers['Authorization'] = 'Bearer token123' - return await handler(request) + # Examine URL path to determine content-type + if request.url.path.endswith('.json'): + request.headers['Content-Type'] = 'application/json' + elif request.url.path.endswith('.xml'): + request.headers['Content-Type'] = 'application/xml' - # Middlewares are applied in order: logging -> auth -> request - async with ClientSession(middlewares=(logging_middleware, auth_middleware)) as session: - resp = await session.get('http://example.com') + # Add custom headers based on HTTP method + if request.method == 'POST': + request.headers['X-Request-ID'] = f"post-{id(request)}" -.. note:: + return await handler(request) - Client middleware is a powerful feature but should be used judiciously. - Each middleware adds overhead to request processing. For simple use cases - like adding static headers, you can often use request parameters - (e.g., ``headers``) or session configuration instead. +Avoiding Infinite Recursion +^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. warning:: Using the same session from within middleware can cause infinite recursion if the middleware makes HTTP requests using the same session that has the middleware - applied. - - To avoid recursion, use one of these approaches: - - **Recommended:** Pass ``middlewares=()`` to requests made inside the middleware to - disable middleware for those specific requests:: - - async def log_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - async with request.session.post( - "https://logapi.example/log", - json={"url": str(request.url)}, - middlewares=() # This prevents infinite recursion - ) as resp: - pass + applied. This is especially risky in token refresh middleware or retry logic. - return await handler(request) + When implementing retry or refresh logic, always use bounded loops + (e.g., ``for _ in range(2):`` instead of ``while True:``) to prevent infinite recursion. - **Alternative:** Check the request contents (URL, path, host) to avoid applying - middleware to certain requests:: +To avoid recursion when making requests inside middleware, use one of these approaches: - async def log_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - if request.url.host != "logapi.example": # Avoid infinite recursion - async with request.session.post( - "https://logapi.example/log", - json={"url": str(request.url)} - ) as resp: - pass +**Option 1:** Disable middleware for internal requests:: - return await handler(request) - -Middleware Type -^^^^^^^^^^^^^^^ - -.. type:: ClientMiddlewareType - - Type alias for client middleware functions. Middleware functions must have this signature:: + async def log_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + async with request.session.post( + "https://logapi.example/log", + json={"url": str(request.url)}, + middlewares=() # This prevents infinite recursion + ) as resp: + pass - Callable[ - [ClientRequest, ClientHandlerType], - Awaitable[ClientResponse] - ] + return await handler(request) -.. type:: ClientHandlerType +**Option 2:** Check request details to avoid recursive application:: - Type alias for client request handler functions:: + async def log_middleware( + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + if request.url.host != "logapi.example": # Avoid infinite recursion + async with request.session.post( + "https://logapi.example/log", + json={"url": str(request.url)} + ) as resp: + pass - Callable[ClientRequest, Awaitable[ClientResponse]] + return await handler(request) Custom Cookies -------------- diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 8e6153bf40c..97933ada1ed 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -230,7 +230,7 @@ The client session supports the context manager protocol for self closing. disabling. See :ref:`aiohttp-client-tracing-reference` for more information. - :param middlewares: A tuple of middleware instances to apply to all session requests. + :param middlewares: A sequence of middleware instances to apply to all session requests. Each middleware must match the :type:`ClientMiddlewareType` signature. ``None`` (default) is used when no middleware is needed. See :ref:`aiohttp-client-middleware` for more information. @@ -544,7 +544,7 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 - :param middlewares: A tuple of middleware instances to apply to this request only. + :param middlewares: A sequence of middleware instances to apply to this request only. Each middleware must match the :type:`ClientMiddlewareType` signature. ``None`` by default which uses session middlewares. See :ref:`aiohttp-client-middleware` for more information. @@ -2624,3 +2624,22 @@ Hierarchy of exceptions * :exc:`InvalidUrlRedirectClientError` * :exc:`NonHttpUrlRedirectClientError` + + +Client Types +------------ + +.. type:: ClientMiddlewareType + + Type alias for client middleware functions. Middleware functions must have this signature:: + + Callable[ + [ClientRequest, ClientHandlerType], + Awaitable[ClientResponse] + ] + +.. type:: ClientHandlerType + + Type alias for client request handler functions:: + + Callable[[ClientRequest], Awaitable[ClientResponse]] diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index 2f79e4fd774..5894795dc21 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -74,13 +74,12 @@ async def handler(request: web.Request) -> web.Response: async def retry_middleware( request: ClientRequest, handler: ClientHandlerType ) -> ClientResponse: - retry_count = 0 - while True: + response = None + for _ in range(2): # pragma: no branch response = await handler(request) - if response.status == 503 and retry_count < 1: - retry_count += 1 - continue - return response + if response.ok: + return response + assert False, "not reachable in test" app = web.Application() app.router.add_get("/", handler) @@ -244,30 +243,28 @@ async def handler(request: web.Request) -> web.Response: async def challenge_auth_middleware( request: ClientRequest, handler: ClientHandlerType ) -> ClientResponse: - challenge_data: Dict[str, Union[bool, str, None]] = { - "nonce": None, - "attempted": False, - } + nonce: Optional[str] = None + attempted: bool = False while True: # If we have challenge data from previous attempt, add auth header - if challenge_data["nonce"] and challenge_data["attempted"]: - request.headers["Authorization"] = ( - f'Custom response="{challenge_data["nonce"]}-secret"' - ) + if nonce and attempted: + request.headers["Authorization"] = f'Custom response="{nonce}-secret"' response = await handler(request) # If we get a 401 with challenge, store it and retry - if response.status == 401 and not challenge_data["attempted"]: + if response.status == 401 and not attempted: www_auth = response.headers.get("WWW-Authenticate") - if www_auth and "nonce=" in www_auth: # pragma: no branch + if www_auth and "nonce=" in www_auth: # Extract nonce from authentication header nonce_start = www_auth.find('nonce="') + 7 nonce_end = www_auth.find('"', nonce_start) - challenge_data["nonce"] = www_auth[nonce_start:nonce_end] - challenge_data["attempted"] = True + nonce = www_auth[nonce_start:nonce_end] + attempted = True continue + else: + assert False, "Should not reach here" return response @@ -324,7 +321,7 @@ async def multi_step_auth_middleware( ) -> ClientResponse: request.headers["X-Client-ID"] = "test-client" - while True: + for _ in range(3): # Apply auth based on current state if middleware_state["step"] == 1 and middleware_state["session"]: request.headers["Authorization"] = ( @@ -347,13 +344,17 @@ async def multi_step_auth_middleware( middleware_state["step"] = 1 continue - elif auth_step == "2": # pragma: no branch + elif auth_step == "2": # Second step: store challenge middleware_state["challenge"] = response.headers.get("X-Challenge") middleware_state["step"] = 2 continue + else: + assert False, "Should not reach here" return response + # This should not be reached but keeps mypy happy + assert False, "Should not reach here" app = web.Application() app.router.add_get("/", handler) @@ -396,7 +397,7 @@ async def handler(request: web.Request) -> web.Response: async def token_refresh_middleware( request: ClientRequest, handler: ClientHandlerType ) -> ClientResponse: - while True: + for _ in range(2): # Add token to request request.headers["X-Auth-Token"] = str(token_state["token"]) @@ -407,13 +408,17 @@ async def token_refresh_middleware( data = await response.json() if data.get("error") == "token_expired" and data.get( "refresh_required" - ): # pragma: no branch + ): # Simulate token refresh token_state["token"] = "refreshed-token" token_state["refreshed"] = True continue + else: + assert False, "Should not reach here" return response + # This should not be reached but keeps mypy happy + assert False, "Should not reach here" app = web.Application() app.router.add_get("/", handler) @@ -490,7 +495,6 @@ class RetryMiddleware: def __init__(self, max_retries: int = 3) -> None: self.max_retries = max_retries - self.retry_counts: Dict[int, int] = {} # Track retries per request async def __call__( self, request: ClientRequest, handler: ClientHandlerType @@ -576,10 +580,55 @@ async def handler(request: web.Request) -> web.Response: assert headers_received.get("X-Custom-2") == "value2" -async def test_client_middleware_disable_with_empty_tuple( +async def test_request_middleware_overrides_session_middleware_with_empty( aiohttp_server: AiohttpServer, ) -> None: - """Test that passing middlewares=() to a request disables session-level middlewares.""" + """Test that passing empty middlewares tuple to a request disables session-level middlewares.""" + session_middleware_called = False + + async def handler(request: web.Request) -> web.Response: + auth_header = request.headers.get("Authorization") + if auth_header: + return web.Response(text=f"Auth: {auth_header}") + return web.Response(text="No auth") + + async def session_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + nonlocal session_middleware_called + session_middleware_called = True + request.headers["Authorization"] = "Bearer session-token" + response = await handler(request) + return response + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + # Create session with middleware + async with ClientSession(middlewares=(session_middleware,)) as session: + # First request uses session middleware + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "Auth: Bearer session-token" + assert session_middleware_called is True + + # Reset flags + session_middleware_called = False + + # Second request explicitly disables middlewares with empty tuple + async with session.get(server.make_url("/"), middlewares=()) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "No auth" + assert session_middleware_called is False + + +async def test_request_middleware_overrides_session_middleware_with_specific( + aiohttp_server: AiohttpServer, +) -> None: + """Test that passing specific middlewares to a request overrides session-level middlewares.""" session_middleware_called = False request_middleware_called = False @@ -625,19 +674,7 @@ async def request_middleware( session_middleware_called = False request_middleware_called = False - # Second request explicitly disables middlewares - async with session.get(server.make_url("/"), middlewares=()) as resp: - assert resp.status == 200 - text = await resp.text() - assert text == "No auth" - assert session_middleware_called is False - assert request_middleware_called is False - - # Reset flags - session_middleware_called = False - request_middleware_called = False - - # Third request uses request-specific middleware + # Second request uses request-specific middleware async with session.get( server.make_url("/"), middlewares=(request_middleware,) ) as resp: @@ -745,9 +782,13 @@ async def blocking_middleware( # Verify that connections were attempted in the correct order assert len(connection_attempts) == 3 - assert allowed_url.host and allowed_url.host in connection_attempts[0] - assert "blocked.example.com" in connection_attempts[1] - assert "evil.com" in connection_attempts[2] + assert allowed_url.host + + assert connection_attempts == [ + str(server.make_url("/")), + "https://blocked.example.com/", + "https://evil.com/path", + ] # Check that no connections were leaked assert len(connector._conns) == 0 @@ -1042,8 +1083,7 @@ def get_hash(self, request: ClientRequest) -> str: data = "{}" # Simulate authentication hash without using real crypto - signature = f"SIGNATURE-{self.secretkey}-{len(data)}-{data[:10]}" - return signature + return f"SIGNATURE-{self.secretkey}-{len(data)}-{data[:10]}" async def __call__( self, request: ClientRequest, handler: ClientHandlerType From 9a2835ae81c7dd085bf3fba22bbe9bfba421ae04 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 20:56:32 +0000 Subject: [PATCH 1409/1511] [PR #10907/b25eca01 backport][3.12] Fix flakey test_uvloop_secure_https_proxy test (#10909) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_proxy_functional.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index f86975b7423..78521ae6008 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -218,7 +218,7 @@ async def test_uvloop_secure_https_proxy( uvloop_loop: asyncio.AbstractEventLoop, ) -> None: """Ensure HTTPS sites are accessible through a secure proxy without warning when using uvloop.""" - conn = aiohttp.TCPConnector() + conn = aiohttp.TCPConnector(force_close=True) sess = aiohttp.ClientSession(connector=conn) try: url = URL("https://example.com") @@ -227,6 +227,8 @@ async def test_uvloop_secure_https_proxy( url, proxy=secure_proxy_url, ssl=client_ssl_ctx ) as response: assert response.status == 200 + # Ensure response body is read to completion + await response.read() finally: await sess.close() await conn.close() From 437dffaa1441b92ee373cadf6b64bc7fa11fe626 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 21:05:20 +0000 Subject: [PATCH 1410/1511] Bump multidict from 6.4.3 to 6.4.4 (#10892) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.4.3 to 6.4.4. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/releases">multidict's releases</a>.</em></p> <blockquote> <h2>6.4.4</h2> <h2>Bug fixes</h2> <ul> <li> <p>Fixed a segmentation fault when calling :py:meth:<code>multidict.MultiDict.setdefault</code> with a single argument -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1160">#1160</a>.</p> </li> <li> <p>Fixed a segmentation fault when attempting to directly instantiate view objects (<code>multidict._ItemsView</code>, <code>multidict._KeysView</code>, <code>multidict._ValuesView</code>) -- by :user:<code>bdraco</code>.</p> <p>View objects now raise a proper :exc:<code>TypeError</code> with the message "cannot create '...' instances directly" when direct instantiation is attempted.</p> <p>View objects should only be created through the proper methods: :py:meth:<code>multidict.MultiDict.items</code>, :py:meth:<code>multidict.MultiDict.keys</code>, and :py:meth:<code>multidict.MultiDict.values</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1164">#1164</a>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>:class:<code>multidict.MultiDictProxy</code> was refactored to rely only on :class:<code>multidict.MultiDict</code> public interface and don't touch any implementation details.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1150">#1150</a>.</p> </li> <li> <p>Multidict views were refactored to rely only on :class:<code>multidict.MultiDict</code> API and don't touch any implementation details.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1152">#1152</a>.</p> </li> <li> <p>Dropped internal <code>_Impl</code> class from pure Python implementation, both pure Python and C Extension follows the same design internally now.</p> <p><em>Related issues and pull requests on GitHub:</em> <a href="https://redirect.github.com/aio-libs/multidict/issues/1153">#1153</a>.</p> </li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/multidict/blob/master/CHANGES.rst">multidict's changelog</a>.</em></p> <blockquote> <h1>6.4.4</h1> <p><em>(2025-05-19)</em></p> <h2>Bug fixes</h2> <ul> <li> <p>Fixed a segmentation fault when calling :py:meth:<code>multidict.MultiDict.setdefault</code> with a single argument -- by :user:<code>bdraco</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1160</code>.</p> </li> <li> <p>Fixed a segmentation fault when attempting to directly instantiate view objects (<code>multidict._ItemsView</code>, <code>multidict._KeysView</code>, <code>multidict._ValuesView</code>) -- by :user:<code>bdraco</code>.</p> <p>View objects now raise a proper :exc:<code>TypeError</code> with the message "cannot create '...' instances directly" when direct instantiation is attempted.</p> <p>View objects should only be created through the proper methods: :py:meth:<code>multidict.MultiDict.items</code>, :py:meth:<code>multidict.MultiDict.keys</code>, and :py:meth:<code>multidict.MultiDict.values</code>.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1164</code>.</p> </li> </ul> <h2>Miscellaneous internal changes</h2> <ul> <li> <p>:class:<code>multidict.MultiDictProxy</code> was refactored to rely only on :class:<code>multidict.MultiDict</code> public interface and don't touch any implementation details.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1150</code>.</p> </li> <li> <p>Multidict views were refactored to rely only on :class:<code>multidict.MultiDict</code> API and don't touch any implementation details.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1152</code>.</p> </li> <li> <p>Dropped internal <code>_Impl</code> class from pure Python implementation, both pure Python and C Extension follows the same design internally now.</p> <p><em>Related issues and pull requests on GitHub:</em> :issue:<code>1153</code>.</p> </li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/multidict/commit/e77793a006e8c27ee96ae8b638a38c6413f87b9c"><code>e77793a</code></a> Release 6.4.4 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1164">#1164</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/b5a24b7d0c7ae2518580edacb60c328754fadfce"><code>b5a24b7</code></a> Fix segfault creating view objects (<a href="https://redirect.github.com/aio-libs/multidict/issues/1163">#1163</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/fea968f9475bc5b1fb0603a838104baf992cf1d9"><code>fea968f</code></a> Fix segfault when a single arg is passed to setdefault (<a href="https://redirect.github.com/aio-libs/multidict/issues/1160">#1160</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/7770ff2779022edfb0f3f2dd23ca5497a34a7b07"><code>7770ff2</code></a> Bump dependabot/fetch-metadata from 2.3.0 to 2.4.0 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1162">#1162</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/af420430358d6f0ebb660d6a733c16b7ff491d3f"><code>af42043</code></a> Bump pypa/cibuildwheel from 2.23.2 to 2.23.3 (<a href="https://redirect.github.com/aio-libs/multidict/issues/1158">#1158</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/df93794f4c43ba196aa87df5687a480369713e51"><code>df93794</code></a> Fix wrong types in tests (<a href="https://redirect.github.com/aio-libs/multidict/issues/1156">#1156</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/72f3fcefbb414627ca803915f3e3e819eaf93d46"><code>72f3fce</code></a> Tune multidict benchmark (<a href="https://redirect.github.com/aio-libs/multidict/issues/1155">#1155</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/99e3f3b31c05905d1647d339c1b2726dd0b60e14"><code>99e3f3b</code></a> Drop _Impl helper class, move all logic to MultiDict itself (<a href="https://redirect.github.com/aio-libs/multidict/issues/1153">#1153</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/d476a03c454a529c2d9fbb0007b9971bf71556a7"><code>d476a03</code></a> Views don't rely in _Impl anymore (<a href="https://redirect.github.com/aio-libs/multidict/issues/1152">#1152</a>)</li> <li><a href="https://github.com/aio-libs/multidict/commit/1d72435794cea9329376ec8e742f7665fa5a88d0"><code>1d72435</code></a> MultiDictProxy doesn't rely on '_impl' anymore (<a href="https://redirect.github.com/aio-libs/multidict/issues/1150">#1150</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/multidict/compare/v6.4.3...v6.4.4">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.4.3&new-version=6.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston <nick@koston.org> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 1a0c6fe1046..26c18e2f53e 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.4.3 +multidict==6.4.4 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9a53aaaea12..3c3cf6cfacf 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -115,7 +115,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.4.3 +multidict==6.4.4 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index 1dd3cc00fc4..8d7e2dc256c 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.1.1 # via -r requirements/cython.in -multidict==6.4.3 +multidict==6.4.4 # via -r requirements/multidict.in typing-extensions==4.13.2 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index ce52430fbee..82750d218f3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -113,7 +113,7 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.4.3 +multidict==6.4.4 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 41435a67142..abd2e2cc9eb 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.4.3 +multidict==6.4.4 # via -r requirements/multidict.in typing-extensions==4.13.2 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 863d4525cad..58263ab61ed 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.6.0 # aiosignal idna==3.4 # via yarl -multidict==6.4.3 +multidict==6.4.4 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 5b3444b3cc4..683001e8967 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -59,7 +59,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.4.3 +multidict==6.4.4 # via # -r requirements/runtime-deps.in # yarl From a61fcc62f5f4eddcdfa5ca5bd489b7254e6c4290 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 20 May 2025 18:01:48 -0400 Subject: [PATCH 1411/1511] Release 3.12.0b0 (#10911) --- CHANGES.rst | 193 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 194 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 11fd19153e3..651437c90bd 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,199 @@ .. towncrier release notes start +3.12.0b0 (2025-05-20) +===================== + +Bug fixes +--------- + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.11.18 (2025-04-20) ==================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4bc6a3a2b22..9ca85c654c5 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0.dev0" +__version__ = "3.12.0b0" from typing import TYPE_CHECKING, Tuple From 761a16c26a9d840c0018a7afb257ecf21bf04b47 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 20 May 2025 18:22:30 -0400 Subject: [PATCH 1412/1511] [PR #10910/36a2567 backport][3.12] Remove mocked coro from tests (#10914) --- CONTRIBUTORS.txt | 1 + tests/test_client_request.py | 7 +- tests/test_client_response.py | 3 +- tests/test_client_session.py | 15 ++- tests/test_client_ws.py | 9 +- tests/test_connector.py | 51 +++++----- tests/test_http_writer.py | 21 +++-- tests/test_multipart.py | 9 +- tests/test_proxy.py | 149 ++++++++++++++++-------------- tests/test_run_app.py | 42 +++++---- tests/test_tracing.py | 4 +- tests/test_web_app.py | 5 +- tests/test_web_functional.py | 20 ++-- tests/test_web_request_handler.py | 5 +- tests/test_web_response.py | 12 +-- tests/test_web_sendfile.py | 12 +-- tests/test_web_websocket.py | 25 ++--- tests/test_websocket_writer.py | 3 +- 18 files changed, 202 insertions(+), 191 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 32e6e119aa7..5ff1eea3da7 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -287,6 +287,7 @@ Pavol Vargovčík Pawel Kowalski Pawel Miech Pepe Osca +Phebe Polk Philipp A. Pierre-Louis Peeters Pieter van Beek diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 6454b42c89b..4706c10a588 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -24,7 +24,6 @@ ) from aiohttp.compression_utils import ZLibBackend from aiohttp.http import HttpVersion10, HttpVersion11 -from aiohttp.test_utils import make_mocked_coro class WriterMock(mock.AsyncMock): @@ -806,7 +805,7 @@ async def test_content_encoding(loop, conn) -> None: "post", URL("http://python.org/"), data="foo", compress="deflate", loop=loop ) with mock.patch("aiohttp.client_reqrep.StreamWriter") as m_writer: - m_writer.return_value.write_headers = make_mocked_coro() + m_writer.return_value.write_headers = mock.AsyncMock() resp = await req.send(conn) assert req.headers["TRANSFER-ENCODING"] == "chunked" assert req.headers["CONTENT-ENCODING"] == "deflate" @@ -837,7 +836,7 @@ async def test_content_encoding_header(loop, conn) -> None: loop=loop, ) with mock.patch("aiohttp.client_reqrep.StreamWriter") as m_writer: - m_writer.return_value.write_headers = make_mocked_coro() + m_writer.return_value.write_headers = mock.AsyncMock() resp = await req.send(conn) assert not m_writer.return_value.enable_compression.called @@ -887,7 +886,7 @@ async def test_chunked2(loop, conn) -> None: async def test_chunked_explicit(loop, conn) -> None: req = ClientRequest("post", URL("http://python.org/"), chunked=True, loop=loop) with mock.patch("aiohttp.client_reqrep.StreamWriter") as m_writer: - m_writer.return_value.write_headers = make_mocked_coro() + m_writer.return_value.write_headers = mock.AsyncMock() resp = await req.send(conn) assert "chunked" == req.headers["TRANSFER-ENCODING"] diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 18ba6c5149d..4a8000962d1 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -14,7 +14,6 @@ from aiohttp import ClientSession, http from aiohttp.client_reqrep import ClientResponse, RequestInfo from aiohttp.helpers import TimerNoop -from aiohttp.test_utils import make_mocked_coro class WriterMock(mock.AsyncMock): @@ -1104,7 +1103,7 @@ def test_redirect_history_in_exception() -> None: async def test_response_read_triggers_callback(loop, session) -> None: trace = mock.Mock() - trace.send_response_chunk_received = make_mocked_coro() + trace.send_response_chunk_received = mock.AsyncMock() response_method = "get" response_url = URL("http://def-cl-resp.org") response_body = b"This is response" diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 548af5db551..0656a9ed023 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -23,7 +23,6 @@ from aiohttp.helpers import DEBUG from aiohttp.http import RawResponseMessage from aiohttp.pytest_plugin import AiohttpServer -from aiohttp.test_utils import make_mocked_coro from aiohttp.tracing import Trace @@ -738,10 +737,10 @@ async def handler(request: web.Request) -> web.Response: trace_config_ctx = mock.Mock() trace_request_ctx = {} body = "This is request body" - gathered_req_headers = CIMultiDict() - on_request_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_request_redirect = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_request_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + gathered_req_headers: CIMultiDict[str] = CIMultiDict() + on_request_start = mock.AsyncMock() + on_request_redirect = mock.AsyncMock() + on_request_end = mock.AsyncMock() with io.BytesIO() as gathered_req_body, io.BytesIO() as gathered_res_body: @@ -809,7 +808,7 @@ async def redirect_handler(request): app.router.add_get("/", root_handler) app.router.add_get("/redirect", redirect_handler) - mocks = [mock.Mock(side_effect=make_mocked_coro(mock.Mock())) for _ in range(7)] + mocks = [mock.AsyncMock() for _ in range(7)] ( on_request_start, on_request_redirect, @@ -900,8 +899,8 @@ def to_url(path: str) -> URL: async def test_request_tracing_exception() -> None: loop = asyncio.get_event_loop() - on_request_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_request_exception = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_request_end = mock.AsyncMock() + on_request_exception = mock.AsyncMock() trace_config = aiohttp.TraceConfig() trace_config.on_request_end.append(on_request_end) diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index 92b5d117db7..48481055a7f 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -11,7 +11,6 @@ from aiohttp import ClientConnectionResetError, ServerDisconnectedError, client, hdrs from aiohttp.http import WS_KEY from aiohttp.streams import EofStream -from aiohttp.test_utils import make_mocked_coro async def test_ws_connect(ws_key: Any, loop: Any, key_data: Any) -> None: @@ -352,7 +351,7 @@ async def test_close(loop, ws_key, key_data) -> None: m_req.return_value.set_result(resp) writer = mock.Mock() WebSocketWriter.return_value = writer - writer.close = make_mocked_coro() + writer.close = mock.AsyncMock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect("http://test.org") @@ -461,7 +460,7 @@ async def test_close_exc( m_req.return_value.set_result(mresp) writer = mock.Mock() WebSocketWriter.return_value = writer - writer.close = make_mocked_coro() + writer.close = mock.AsyncMock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect("http://test.org") @@ -595,7 +594,7 @@ async def test_reader_read_exception(ws_key, key_data, loop) -> None: writer = mock.Mock() WebSocketWriter.return_value = writer - writer.close = make_mocked_coro() + writer.close = mock.AsyncMock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect("http://test.org") @@ -731,7 +730,7 @@ async def test_ws_connect_deflate_per_message(loop, ws_key, key_data) -> None: m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) writer = WebSocketWriter.return_value = mock.Mock() - send_frame = writer.send_frame = make_mocked_coro() + send_frame = writer.send_frame = mock.AsyncMock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect("http://test.org") diff --git a/tests/test_connector.py b/tests/test_connector.py index fd2cdac7a94..8128b47f02d 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -41,7 +41,7 @@ _DNSCacheTable, ) from aiohttp.resolver import ResolveResult -from aiohttp.test_utils import make_mocked_coro, unused_port +from aiohttp.test_utils import unused_port from aiohttp.tracing import Trace @@ -1347,10 +1347,10 @@ def exception_handler(loop, context): async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: session = mock.Mock() trace_config_ctx = mock.Mock() - on_dns_resolvehost_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_resolvehost_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_cache_hit = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_cache_miss = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_dns_resolvehost_start = mock.AsyncMock() + on_dns_resolvehost_end = mock.AsyncMock() + on_dns_cache_hit = mock.AsyncMock() + on_dns_cache_miss = mock.AsyncMock() trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) @@ -1392,8 +1392,8 @@ async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> None: session = mock.Mock() trace_config_ctx = mock.Mock() - on_dns_resolvehost_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_resolvehost_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_dns_resolvehost_start = mock.AsyncMock() + on_dns_resolvehost_end = mock.AsyncMock() trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) @@ -1447,8 +1447,8 @@ async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> N async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) -> None: session = mock.Mock() trace_config_ctx = mock.Mock() - on_dns_cache_hit = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_cache_miss = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_dns_cache_hit = mock.AsyncMock() + on_dns_cache_miss = mock.AsyncMock() trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) @@ -1477,8 +1477,8 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - async def test_dns_error(loop) -> None: connector = aiohttp.TCPConnector(loop=loop) - connector._resolve_host = make_mocked_coro( - raise_exception=OSError("dont take it serious") + connector._resolve_host = mock.AsyncMock( + side_effect=OSError("dont take it serious") ) req = ClientRequest("GET", URL("http://www.python.org"), loop=loop) @@ -1577,8 +1577,8 @@ async def test_connect(loop, key) -> None: async def test_connect_tracing(loop) -> None: session = mock.Mock() trace_config_ctx = mock.Mock() - on_connection_create_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_connection_create_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_connection_create_start = mock.AsyncMock() + on_connection_create_end = mock.AsyncMock() trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) @@ -2573,8 +2573,8 @@ async def f(): async def test_connect_queued_operation_tracing(loop, key) -> None: session = mock.Mock() trace_config_ctx = mock.Mock() - on_connection_queued_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_connection_queued_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_connection_queued_start = mock.AsyncMock() + on_connection_queued_end = mock.AsyncMock() trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) @@ -2619,7 +2619,7 @@ async def f(): async def test_connect_reuseconn_tracing(loop, key) -> None: session = mock.Mock() trace_config_ctx = mock.Mock() - on_connection_reuseconn = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) + on_connection_reuseconn = mock.AsyncMock() trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) @@ -3111,9 +3111,10 @@ async def test_unix_connector_not_found(loop) -> None: @pytest.mark.skipif(not hasattr(socket, "AF_UNIX"), reason="requires UNIX sockets") -async def test_unix_connector_permission(loop) -> None: - loop.create_unix_connection = make_mocked_coro(raise_exception=PermissionError()) - connector = aiohttp.UnixConnector("/" + uuid.uuid4().hex, loop=loop) +async def test_unix_connector_permission(loop: asyncio.AbstractEventLoop) -> None: + m = mock.AsyncMock(side_effect=PermissionError()) + with mock.patch.object(loop, "create_unix_connection", m): + connector = aiohttp.UnixConnector("/" + uuid.uuid4().hex) req = ClientRequest("GET", URL("http://www.python.org"), loop=loop) with pytest.raises(aiohttp.ClientConnectorError): @@ -3142,11 +3143,13 @@ async def test_named_pipe_connector_not_found(proactor_loop, pipe_name) -> None: @pytest.mark.skipif( platform.system() != "Windows", reason="Proactor Event loop present only in Windows" ) -async def test_named_pipe_connector_permission(proactor_loop, pipe_name) -> None: - proactor_loop.create_pipe_connection = make_mocked_coro( - raise_exception=PermissionError() - ) - connector = aiohttp.NamedPipeConnector(pipe_name, loop=proactor_loop) +async def test_named_pipe_connector_permission( + proactor_loop: asyncio.AbstractEventLoop, pipe_name: str +) -> None: + m = mock.AsyncMock(side_effect=PermissionError()) + with mock.patch.object(proactor_loop, "create_pipe_connection", m): + asyncio.set_event_loop(proactor_loop) + connector = aiohttp.NamedPipeConnector(pipe_name) req = ClientRequest("GET", URL("http://www.python.org"), loop=proactor_loop) with pytest.raises(aiohttp.ClientConnectorError): diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 7f813692571..ec256275d22 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -12,7 +12,6 @@ from aiohttp.base_protocol import BaseProtocol from aiohttp.compression_utils import ZLibBackend from aiohttp.http_writer import _serialize_headers -from aiohttp.test_utils import make_mocked_coro @pytest.fixture @@ -58,7 +57,7 @@ def writelines(chunks: Iterable[bytes]) -> None: @pytest.fixture def protocol(loop, transport): protocol = mock.Mock(transport=transport) - protocol._drain_helper = make_mocked_coro() + protocol._drain_helper = mock.AsyncMock() return protocol @@ -732,7 +731,7 @@ async def test_write_payload_slicing_long_memoryview(buf, protocol, transport, l async def test_write_drain(protocol, transport, loop) -> None: msg = http.StreamWriter(protocol, loop) - msg.drain = make_mocked_coro() + msg.drain = mock.AsyncMock() await msg.write(b"1" * (64 * 1024 * 2), drain=False) assert not msg.drain.called @@ -741,8 +740,12 @@ async def test_write_drain(protocol, transport, loop) -> None: assert msg.buffer_size == 0 -async def test_write_calls_callback(protocol, transport, loop) -> None: - on_chunk_sent = make_mocked_coro() +async def test_write_calls_callback( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + on_chunk_sent = mock.AsyncMock() msg = http.StreamWriter(protocol, loop, on_chunk_sent=on_chunk_sent) chunk = b"1" await msg.write(chunk) @@ -750,8 +753,12 @@ async def test_write_calls_callback(protocol, transport, loop) -> None: assert on_chunk_sent.call_args == mock.call(chunk) -async def test_write_eof_calls_callback(protocol, transport, loop) -> None: - on_chunk_sent = make_mocked_coro() +async def test_write_eof_calls_callback( + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + on_chunk_sent = mock.AsyncMock() msg = http.StreamWriter(protocol, loop, on_chunk_sent=on_chunk_sent) chunk = b"1" await msg.write_eof(chunk=chunk) diff --git a/tests/test_multipart.py b/tests/test_multipart.py index b0ca92fde9e..c76d523ca86 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -19,7 +19,6 @@ from aiohttp.helpers import parse_mimetype from aiohttp.multipart import MultipartResponseWrapper from aiohttp.streams import StreamReader -from aiohttp.test_utils import make_mocked_coro BOUNDARY = b"--:" @@ -97,21 +96,21 @@ def test_at_eof(self) -> None: async def test_next(self) -> None: wrapper = MultipartResponseWrapper(mock.Mock(), mock.Mock()) - wrapper.stream.next = make_mocked_coro(b"") + wrapper.stream.next = mock.AsyncMock(b"") wrapper.stream.at_eof.return_value = False await wrapper.next() assert wrapper.stream.next.called async def test_release(self) -> None: wrapper = MultipartResponseWrapper(mock.Mock(), mock.Mock()) - wrapper.resp.release = make_mocked_coro(None) + wrapper.resp.release = mock.AsyncMock(None) await wrapper.release() assert wrapper.resp.release.called async def test_release_when_stream_at_eof(self) -> None: wrapper = MultipartResponseWrapper(mock.Mock(), mock.Mock()) - wrapper.resp.release = make_mocked_coro(None) - wrapper.stream.next = make_mocked_coro(b"") + wrapper.resp.release = mock.AsyncMock(None) + wrapper.stream.next = mock.AsyncMock(b"") wrapper.stream.at_eof.return_value = True await wrapper.next() assert wrapper.stream.next.called diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 83457de891f..0e73210f58b 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -14,7 +14,6 @@ from aiohttp.client_reqrep import ClientRequest, ClientResponse, Fingerprint from aiohttp.connector import _SSL_CONTEXT_VERIFIED from aiohttp.helpers import TimerNoop -from aiohttp.test_utils import make_mocked_coro pytestmark = pytest.mark.skipif( sys.platform == "win32", reason="Proxy tests are unstable on Windows" @@ -27,7 +26,9 @@ class TestProxy(unittest.TestCase): } mocked_response = mock.Mock(**response_mock_attrs) clientrequest_mock_attrs = { - "return_value.send.return_value.start": make_mocked_coro(mocked_response), + "return_value.send.return_value.start": mock.AsyncMock( + return_value=mocked_response + ), } def setUp(self): @@ -61,8 +62,8 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -79,7 +80,9 @@ async def make_conn(): "transport.get_extra_info.return_value": False, } ) - self.loop.create_connection = make_mocked_coro((proto.transport, proto)) + self.loop.create_connection = mock.AsyncMock( + return_value=(proto.transport, proto) + ) conn = self.loop.run_until_complete( connector.connect(req, None, aiohttp.ClientTimeout()) ) @@ -119,8 +122,8 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -137,7 +140,9 @@ async def make_conn(): "transport.get_extra_info.return_value": False, } ) - self.loop.create_connection = make_mocked_coro((proto.transport, proto)) + self.loop.create_connection = mock.AsyncMock( + return_value=(proto.transport, proto) + ) conn = self.loop.run_until_complete( connector.connect(req, None, aiohttp.ClientTimeout()) ) @@ -185,8 +190,8 @@ async def make_conn(): return aiohttp.TCPConnector() connector: aiohttp.TCPConnector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - raise_exception=OSError("dont take it serious") + connector._resolve_host = mock.AsyncMock( + side_effect=OSError("dont take it serious") ) req = ClientRequest( @@ -214,8 +219,8 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "www.python.org", "host": "127.0.0.1", @@ -226,8 +231,8 @@ async def make_conn(): } ] ) - connector._loop.create_connection = make_mocked_coro( - raise_exception=OSError("dont take it serious") + connector._loop.create_connection = mock.AsyncMock( + side_effect=OSError("dont take it serious") ) req = ClientRequest( @@ -266,15 +271,15 @@ def test_proxy_server_hostname_default( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -287,8 +292,8 @@ async def make_conn(): ) tr, proto = mock.Mock(), mock.Mock() - self.loop.create_connection = make_mocked_coro((tr, proto)) - self.loop.start_tls = make_mocked_coro(mock.Mock()) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) + self.loop.start_tls = mock.AsyncMock(return_value=mock.Mock()) req = ClientRequest( "GET", @@ -335,15 +340,15 @@ def test_proxy_server_hostname_override( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -356,8 +361,8 @@ async def make_conn(): ) tr, proto = mock.Mock(), mock.Mock() - self.loop.create_connection = make_mocked_coro((tr, proto)) - self.loop.start_tls = make_mocked_coro(mock.Mock()) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) + self.loop.start_tls = mock.AsyncMock(return_value=mock.Mock()) req = ClientRequest( "GET", @@ -513,15 +518,15 @@ def test_https_connect( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -534,8 +539,8 @@ async def make_conn(): ) tr, proto = mock.Mock(), mock.Mock() - self.loop.create_connection = make_mocked_coro((tr, proto)) - self.loop.start_tls = make_mocked_coro(mock.Mock()) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) + self.loop.start_tls = mock.AsyncMock(return_value=mock.Mock()) req = ClientRequest( "GET", @@ -580,15 +585,15 @@ def test_https_connect_certificate_error( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -601,9 +606,11 @@ async def make_conn(): ) # Called on connection to http://proxy.example.com - self.loop.create_connection = make_mocked_coro((mock.Mock(), mock.Mock())) + self.loop.create_connection = mock.AsyncMock( + return_value=(mock.Mock(), mock.Mock()) + ) # Called on connection to https://www.python.org - self.loop.start_tls = make_mocked_coro(raise_exception=ssl.CertificateError) + self.loop.start_tls = mock.AsyncMock(side_effect=ssl.CertificateError) req = ClientRequest( "GET", @@ -641,15 +648,15 @@ def test_https_connect_ssl_error( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -662,11 +669,11 @@ async def make_conn(): ) # Called on connection to http://proxy.example.com - self.loop.create_connection = make_mocked_coro( - (mock.Mock(), mock.Mock()), + self.loop.create_connection = mock.AsyncMock( + return_value=(mock.Mock(), mock.Mock()), ) # Called on connection to https://www.python.org - self.loop.start_tls = make_mocked_coro(raise_exception=ssl.SSLError) + self.loop.start_tls = mock.AsyncMock(side_effect=ssl.SSLError) req = ClientRequest( "GET", @@ -704,15 +711,17 @@ def test_https_connect_http_proxy_error( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=400, reason="bad request")) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock( + return_value=mock.Mock(status=400, reason="bad request") + ) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -726,7 +735,7 @@ async def make_conn(): tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None - self.loop.create_connection = make_mocked_coro((tr, proto)) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) req = ClientRequest( "GET", @@ -770,15 +779,15 @@ def test_https_connect_resp_start_error( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(raise_exception=OSError("error message")) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(side_effect=OSError("error message")) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -792,7 +801,7 @@ async def make_conn(): tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None - self.loop.create_connection = make_mocked_coro((tr, proto)) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) req = ClientRequest( "GET", @@ -821,8 +830,8 @@ async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -836,7 +845,7 @@ async def make_conn(): tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None - self.loop.create_connection = make_mocked_coro((tr, proto)) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) req = ClientRequest( "GET", @@ -893,15 +902,15 @@ def test_https_connect_pass_ssl_context( loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -914,8 +923,8 @@ async def make_conn(): ) tr, proto = mock.Mock(), mock.Mock() - self.loop.create_connection = make_mocked_coro((tr, proto)) - self.loop.start_tls = make_mocked_coro(mock.Mock()) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) + self.loop.start_tls = mock.AsyncMock(return_value=mock.Mock()) req = ClientRequest( "GET", @@ -969,15 +978,15 @@ def test_https_auth(self, start_connection: Any, ClientRequestMock: Any) -> None loop=self.loop, session=mock.Mock(), ) - proxy_req.send = make_mocked_coro(proxy_resp) - proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) + proxy_req.send = mock.AsyncMock(return_value=proxy_resp) + proxy_resp.start = mock.AsyncMock(return_value=mock.Mock(status=200)) async def make_conn(): return aiohttp.TCPConnector() connector = self.loop.run_until_complete(make_conn()) - connector._resolve_host = make_mocked_coro( - [ + connector._resolve_host = mock.AsyncMock( + return_value=[ { "hostname": "hostname", "host": "127.0.0.1", @@ -990,8 +999,8 @@ async def make_conn(): ) tr, proto = mock.Mock(), mock.Mock() - self.loop.create_connection = make_mocked_coro((tr, proto)) - self.loop.start_tls = make_mocked_coro(mock.Mock()) + self.loop.create_connection = mock.AsyncMock(return_value=(tr, proto)) + self.loop.start_tls = mock.AsyncMock(return_value=mock.Mock()) self.assertIn("AUTHORIZATION", proxy_req.headers) self.assertNotIn("PROXY-AUTHORIZATION", proxy_req.headers) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 9332d4aa96c..e269b452f86 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -14,6 +14,7 @@ Awaitable, Callable, Coroutine, + Iterator, NoReturn, Optional, Set, @@ -25,7 +26,6 @@ import pytest from aiohttp import ClientConnectorError, ClientSession, ClientTimeout, WSCloseCode, web -from aiohttp.test_utils import make_mocked_coro from aiohttp.web_runner import BaseRunner # Test for features of OS' socket support @@ -65,15 +65,25 @@ def skip_if_on_windows(): @pytest.fixture -def patched_loop(loop): - server = mock.Mock() - server.wait_closed = make_mocked_coro(None) - loop.create_server = make_mocked_coro(server) - unix_server = mock.Mock() - unix_server.wait_closed = make_mocked_coro(None) - loop.create_unix_server = make_mocked_coro(unix_server) - asyncio.set_event_loop(loop) - return loop +def patched_loop( + loop: asyncio.AbstractEventLoop, +) -> Iterator[asyncio.AbstractEventLoop]: + server = mock.create_autospec(asyncio.Server, spec_set=True, instance=True) + server.wait_closed.return_value = None + unix_server = mock.create_autospec(asyncio.Server, spec_set=True, instance=True) + unix_server.wait_closed.return_value = None + with mock.patch.object( + loop, "create_server", autospec=True, spec_set=True, return_value=server + ): + with mock.patch.object( + loop, + "create_unix_server", + autospec=True, + spec_set=True, + return_value=unix_server, + ): + asyncio.set_event_loop(loop) + yield loop def stopper(loop): @@ -88,9 +98,9 @@ def f(*args): def test_run_app_http(patched_loop) -> None: app = web.Application() - startup_handler = make_mocked_coro() + startup_handler = mock.AsyncMock() app.on_startup.append(startup_handler) - cleanup_handler = make_mocked_coro() + cleanup_handler = mock.AsyncMock() app.on_cleanup.append(cleanup_handler) web.run_app(app, print=stopper(patched_loop), loop=patched_loop) @@ -693,9 +703,9 @@ def test_startup_cleanup_signals_even_on_failure(patched_loop) -> None: patched_loop.create_server = mock.Mock(side_effect=RuntimeError()) app = web.Application() - startup_handler = make_mocked_coro() + startup_handler = mock.AsyncMock() app.on_startup.append(startup_handler) - cleanup_handler = make_mocked_coro() + cleanup_handler = mock.AsyncMock() app.on_cleanup.append(cleanup_handler) with pytest.raises(RuntimeError): @@ -711,9 +721,9 @@ def test_run_app_coro(patched_loop) -> None: async def make_app(): nonlocal startup_handler, cleanup_handler app = web.Application() - startup_handler = make_mocked_coro() + startup_handler = mock.AsyncMock() app.on_startup.append(startup_handler) - cleanup_handler = make_mocked_coro() + cleanup_handler = mock.AsyncMock() app.on_cleanup.append(cleanup_handler) return app diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 809d757f199..845c0ba6ab4 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -1,9 +1,9 @@ from types import SimpleNamespace +from unittest import mock from unittest.mock import Mock import pytest -from aiohttp.test_utils import make_mocked_coro from aiohttp.tracing import ( Trace, TraceConfig, @@ -104,7 +104,7 @@ class TestTrace: async def test_send(self, signal, params, param_obj) -> None: session = Mock() trace_request_ctx = Mock() - callback = Mock(side_effect=make_mocked_coro(Mock())) + callback = mock.AsyncMock() trace_config = TraceConfig() getattr(trace_config, "on_%s" % signal).append(callback) diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 8c03a6041b2..69655b1a49a 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -8,7 +8,6 @@ from aiohttp import log, web from aiohttp.abc import AbstractAccessLogger, AbstractRouter from aiohttp.helpers import DEBUG -from aiohttp.test_utils import make_mocked_coro from aiohttp.typedefs import Handler @@ -167,8 +166,8 @@ async def test_app_make_handler_raises_deprecation_warning() -> None: async def test_app_register_on_finish() -> None: app = web.Application() - cb1 = make_mocked_coro(None) - cb2 = make_mocked_coro(None) + cb1 = mock.AsyncMock(return_value=None) + cb2 = mock.AsyncMock(return_value=None) app.on_cleanup.append(cb1) app.on_cleanup.append(cb2) app.freeze() diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 9cc05a08426..b6caf23df53 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -23,8 +23,7 @@ ) from aiohttp.compression_utils import ZLibBackend, ZLibCompressObjProtocol from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING -from aiohttp.pytest_plugin import AiohttpClient -from aiohttp.test_utils import make_mocked_coro +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer from aiohttp.typedefs import Handler from aiohttp.web_protocol import RequestHandler @@ -2025,15 +2024,14 @@ async def handler(request): assert resp.status == 200 -async def test_request_tracing(aiohttp_server) -> None: - - on_request_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_request_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_resolvehost_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_dns_resolvehost_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_request_redirect = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_connection_create_start = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) - on_connection_create_end = mock.Mock(side_effect=make_mocked_coro(mock.Mock())) +async def test_request_tracing(aiohttp_server: AiohttpServer) -> None: + on_request_start = mock.AsyncMock() + on_request_end = mock.AsyncMock() + on_dns_resolvehost_start = mock.AsyncMock() + on_dns_resolvehost_end = mock.AsyncMock() + on_request_redirect = mock.AsyncMock() + on_connection_create_start = mock.AsyncMock() + on_connection_create_end = mock.AsyncMock() async def redirector(request): raise web.HTTPFound(location=URL("/redirected")) diff --git a/tests/test_web_request_handler.py b/tests/test_web_request_handler.py index 4837cab030e..ee30e485f1b 100644 --- a/tests/test_web_request_handler.py +++ b/tests/test_web_request_handler.py @@ -1,7 +1,6 @@ from unittest import mock from aiohttp import web -from aiohttp.test_utils import make_mocked_coro async def serve(request: web.BaseRequest) -> web.Response: @@ -37,7 +36,7 @@ async def test_shutdown_no_timeout() -> None: handler = mock.Mock(spec_set=web.RequestHandler) handler._task_handler = None - handler.shutdown = make_mocked_coro(mock.Mock()) + handler.shutdown = mock.AsyncMock(return_value=mock.Mock()) transport = mock.Mock() manager.connection_made(handler, transport) @@ -52,7 +51,7 @@ async def test_shutdown_timeout() -> None: manager = web.Server(serve) handler = mock.Mock() - handler.shutdown = make_mocked_coro(mock.Mock()) + handler.shutdown = mock.AsyncMock(return_value=mock.Mock()) transport = mock.Mock() manager.connection_made(handler, transport) diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 68ffe211f20..7b048970967 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -18,7 +18,7 @@ from aiohttp.http_writer import StreamWriter, _serialize_headers from aiohttp.multipart import BodyPartReader, MultipartWriter from aiohttp.payload import BytesPayload, StringPayload -from aiohttp.test_utils import make_mocked_coro, make_mocked_request +from aiohttp.test_utils import make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response @@ -858,8 +858,8 @@ async def test_cannot_write_eof_twice() -> None: resp = StreamResponse() writer = mock.Mock() resp_impl = await resp.prepare(make_request("GET", "/")) - resp_impl.write = make_mocked_coro(None) - resp_impl.write_eof = make_mocked_coro(None) + resp_impl.write = mock.AsyncMock(None) + resp_impl.write_eof = mock.AsyncMock(None) await resp.write(b"data") assert resp_impl.write.called @@ -1065,7 +1065,7 @@ async def test_prepare_twice() -> None: async def test_prepare_calls_signal() -> None: app = mock.Mock() - sig = make_mocked_coro() + sig = mock.AsyncMock() on_response_prepare = aiosignal.Signal(app) on_response_prepare.append(sig) req = make_request("GET", "/", app=app, on_response_prepare=on_response_prepare) @@ -1336,8 +1336,8 @@ async def test_send_set_cookie_header(buf, writer) -> None: async def test_consecutive_write_eof() -> None: writer = mock.Mock() - writer.write_eof = make_mocked_coro() - writer.write_headers = make_mocked_coro() + writer.write_eof = mock.AsyncMock() + writer.write_headers = mock.AsyncMock() req = make_request("GET", "/", writer=writer) data = b"data" resp = Response(body=data) diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 58a46ec602c..1776a3aabd3 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -3,7 +3,7 @@ from unittest import mock from aiohttp import hdrs -from aiohttp.test_utils import make_mocked_coro, make_mocked_request +from aiohttp.test_utils import make_mocked_request from aiohttp.web_fileresponse import FileResponse MOCK_MODE = S_IFREG | S_IRUSR | S_IWUSR @@ -28,7 +28,7 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: file_sender = FileResponse(filepath) file_sender._path = filepath - file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] + file_sender._sendfile = mock.AsyncMock(return_value=None) # type: ignore[method-assign] loop.run_until_complete(file_sender.prepare(request)) @@ -53,7 +53,7 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: file_sender = FileResponse(filepath) file_sender._path = filepath - file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] + file_sender._sendfile = mock.AsyncMock(return_value=None) # type: ignore[method-assign] loop.run_until_complete(file_sender.prepare(request)) @@ -76,7 +76,7 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: file_sender = FileResponse(filepath) file_sender._path = filepath - file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] + file_sender._sendfile = mock.AsyncMock(return_value=None) # type: ignore[method-assign] loop.run_until_complete(file_sender.prepare(request)) @@ -101,7 +101,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: file_sender = FileResponse(filepath) file_sender._path = filepath - file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] + file_sender._sendfile = mock.AsyncMock(return_value=None) # type: ignore[method-assign] loop.run_until_complete(file_sender.prepare(request)) @@ -120,7 +120,7 @@ def test_status_controlled_by_user(loop) -> None: file_sender = FileResponse(filepath, status=203) file_sender._path = filepath - file_sender._sendfile = make_mocked_coro(None) # type: ignore[assignment] + file_sender._sendfile = mock.AsyncMock(return_value=None) # type: ignore[method-assign] loop.run_until_complete(file_sender.prepare(request)) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index f9a92d0587f..390d6224d3d 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -10,7 +10,7 @@ from aiohttp import WSMessage, WSMessageTypeError, WSMsgType, web from aiohttp.http import WS_CLOSED_MESSAGE from aiohttp.streams import EofStream -from aiohttp.test_utils import make_mocked_coro, make_mocked_request +from aiohttp.test_utils import make_mocked_request from aiohttp.web import HTTPBadRequest, WebSocketResponse from aiohttp.web_ws import WebSocketReady @@ -420,13 +420,11 @@ async def test_receive_eofstream_in_reader(make_request, loop) -> None: ws._reader = mock.Mock() exc = EofStream() - res = loop.create_future() - res.set_exception(exc) - ws._reader.read = make_mocked_coro(res) - ws._payload_writer.drain = mock.Mock() - ws._payload_writer.drain.return_value = loop.create_future() - ws._payload_writer.drain.return_value.set_result(True) - + ws._reader.read = mock.AsyncMock(side_effect=exc) + assert ws._payload_writer is not None + f = loop.create_future() + f.set_result(True) + ws._payload_writer.drain.return_value = f # type: ignore[attr-defined] msg = await ws.receive() assert msg.type == WSMsgType.CLOSED assert ws.closed @@ -439,12 +437,7 @@ async def test_receive_exception_in_reader(make_request: Any, loop: Any) -> None ws._reader = mock.Mock() exc = Exception() - res = loop.create_future() - res.set_exception(exc) - ws._reader.read = make_mocked_coro(res) - ws._payload_writer.drain = mock.Mock() - ws._payload_writer.drain.return_value = loop.create_future() - ws._payload_writer.drain.return_value.set_result(True) + ws._reader.read = mock.AsyncMock(side_effect=exc) msg = await ws.receive() assert msg.type == WSMsgType.ERROR @@ -526,9 +519,7 @@ async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: assert len(ws._req.transport.close.mock_calls) == 0 ws._reader = mock.Mock() - res = loop.create_future() - res.set_exception(asyncio.TimeoutError()) - ws._reader.read = make_mocked_coro(res) + ws._reader.read = mock.AsyncMock(side_effect=asyncio.TimeoutError()) with pytest.raises(asyncio.TimeoutError): await ws.receive() diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index b39e411f90d..f5125dde361 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -8,13 +8,12 @@ from aiohttp import WSMsgType from aiohttp._websocket.reader import WebSocketDataQueue from aiohttp.http import WebSocketReader, WebSocketWriter -from aiohttp.test_utils import make_mocked_coro @pytest.fixture def protocol(): ret = mock.Mock() - ret._drain_helper = make_mocked_coro() + ret._drain_helper = mock.AsyncMock() return ret From 383323d74f7b73245de86b5e2bcc4723fc50ba91 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 10:59:03 +0000 Subject: [PATCH 1413/1511] Bump setuptools from 80.7.1 to 80.8.0 (#10920) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [setuptools](https://github.com/pypa/setuptools) from 80.7.1 to 80.8.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/setuptools/blob/main/NEWS.rst">setuptools's changelog</a>.</em></p> <blockquote> <h1>v80.8.0</h1> <h2>Features</h2> <ul> <li>Replaced more references to pkg_resources with importlib equivalents in wheel odule. (<a href="https://redirect.github.com/pypa/setuptools/issues/3085">#3085</a>)</li> <li>Restore explicit LICENSE file. (<a href="https://redirect.github.com/pypa/setuptools/issues/5001">#5001</a>)</li> <li>Removed no longer used build dependency on <code>coherent.licensed</code>. (<a href="https://redirect.github.com/pypa/setuptools/issues/5003">#5003</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/setuptools/commit/b3786cd9b59576907e671d8b22b66c73cfed5dc6"><code>b3786cd</code></a> Bump version: 80.7.1 → 80.8.0</li> <li><a href="https://github.com/pypa/setuptools/commit/9179b7597589713ce565b9f7ba0cc8831a1303a4"><code>9179b75</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/5003">#5003</a> from abravalheri/issue-5002</li> <li><a href="https://github.com/pypa/setuptools/commit/6f937df223c38eed24645ca32ba71091032c1c18"><code>6f937df</code></a> Merge pull request <a href="https://redirect.github.com/pypa/setuptools/issues/5004">#5004</a> from pypa/feature/remove-more-pkg_resources</li> <li><a href="https://github.com/pypa/setuptools/commit/1bfd8db2d6b4f98cfe181d5f0854f19f1aa27c22"><code>1bfd8db</code></a> Add news fragment.</li> <li><a href="https://github.com/pypa/setuptools/commit/0e19b82062f168f428128d51f7bd12034daebd2b"><code>0e19b82</code></a> Replace pkg_resources with importlib.metadata and packaging.requirements.</li> <li><a href="https://github.com/pypa/setuptools/commit/95145dd0afd94006c158d668bffb1d6ec7f4cacb"><code>95145dd</code></a> Extract a method for converting requires.</li> <li><a href="https://github.com/pypa/setuptools/commit/57d6fcd5854cec806b00ee966b62887d3d13922b"><code>57d6fcd</code></a> Add news fragment</li> <li><a href="https://github.com/pypa/setuptools/commit/62e47935abc3ede4ca5860d0775d70147e6c5635"><code>62e4793</code></a> Comment out unused build dependency</li> <li><a href="https://github.com/pypa/setuptools/commit/ae480ff7c2b40442dc97fff712312549a9ed76e6"><code>ae480ff</code></a> Restore explicit LICENSE file</li> <li>See full diff in <a href="https://github.com/pypa/setuptools/compare/v80.7.1...v80.8.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=setuptools&package-manager=pip&previous-version=80.7.1&new-version=80.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3c3cf6cfacf..bcb2e81a5e0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -300,7 +300,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.7.1 +setuptools==80.8.0 # via # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 82750d218f3..0a992d8a1e1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -291,7 +291,7 @@ zlib-ng==0.5.1 # The following packages are considered to be unsafe in a requirements file: pip==25.1.1 # via pip-tools -setuptools==80.7.1 +setuptools==80.8.0 # via # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index e00e4b52226..142aa6d7edb 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -76,5 +76,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.7.1 +setuptools==80.8.0 # via incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index 0ee0b84218e..08f24f4175a 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -69,5 +69,5 @@ urllib3==2.4.0 # via requests # The following packages are considered to be unsafe in a requirements file: -setuptools==80.7.1 +setuptools==80.8.0 # via incremental From 2e51f406bf575c91557eb631ef890bddcc3621f3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 23:27:55 +0000 Subject: [PATCH 1414/1511] [PR #10923/19643c9c backport][3.12] Fix weakref garbage collection race condition in DNS resolver manager (#10924) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10923.feature.rst | 1 + aiohttp/resolver.py | 5 ++++- tests/test_resolver.py | 46 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10923.feature.rst diff --git a/CHANGES/10923.feature.rst b/CHANGES/10923.feature.rst new file mode 120000 index 00000000000..879a4227358 --- /dev/null +++ b/CHANGES/10923.feature.rst @@ -0,0 +1 @@ +10847.feature.rst \ No newline at end of file diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 8e73beb6e1e..05accd19564 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -257,11 +257,14 @@ def release_resolver( loop: The event loop the resolver was using. """ # Remove client from its loop's tracking + if loop not in self._loop_data: + return resolver, client_set = self._loop_data[loop] client_set.discard(client) # If no more clients for this loop, cancel and remove its resolver if not client_set: - resolver.cancel() + if resolver is not None: + resolver.cancel() del self._loop_data[loop] diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 9a6a782c06a..f6963121eb7 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -628,3 +628,49 @@ async def test_dns_resolver_manager_multiple_event_loops( # Verify resolver cleanup resolver1.cancel.assert_called_once() resolver2.cancel.assert_called_once() + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_weakref_garbage_collection() -> None: + """Test that release_resolver handles None resolver due to weakref garbage collection.""" + manager = _DNSResolverManager() + + # Create a mock resolver that will be None when accessed + mock_resolver = Mock() + mock_resolver.cancel = Mock() + + with patch("aiodns.DNSResolver", return_value=mock_resolver): + # Create an AsyncResolver to get a resolver from the manager + resolver = AsyncResolver() + loop = asyncio.get_running_loop() + + # Manually corrupt the data to simulate garbage collection + # by setting the resolver to None + manager._loop_data[loop] = (None, manager._loop_data[loop][1]) # type: ignore[assignment] + + # This should not raise an AttributeError: 'NoneType' object has no attribute 'cancel' + await resolver.close() + + # Verify no exception was raised and the loop data was cleaned up properly + # Since we set resolver to None and there was one client, the entry should be removed + assert loop not in manager._loop_data + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_dns_resolver_manager_missing_loop_data() -> None: + """Test that release_resolver handles missing loop data gracefully.""" + manager = _DNSResolverManager() + + with patch("aiodns.DNSResolver"): + # Create an AsyncResolver + resolver = AsyncResolver() + loop = asyncio.get_running_loop() + + # Manually remove the loop data to simulate race condition + manager._loop_data.clear() + + # This should not raise a KeyError + await resolver.close() + + # Verify no exception was raised + assert loop not in manager._loop_data From d2f682f0aebafaa5923c7024033f2f8037d19619 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 11:22:40 +0000 Subject: [PATCH 1415/1511] Bump typing-inspection from 0.4.0 to 0.4.1 (#10927) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [typing-inspection](https://github.com/pydantic/typing-inspection) from 0.4.0 to 0.4.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/typing-inspection/blob/main/HISTORY.md">typing-inspection's changelog</a>.</em></p> <blockquote> <h2>v0.4.1 (2025-05-21)</h2> <ul> <li>Use <code>list</code> as a type hint for <code>InspectedAnnotation.metadata</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/typing-inspection/pull/43">#43</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/typing-inspection/commit/3bc3f963f17eb674fa4fbf94cb262dfc4b9033ee"><code>3bc3f96</code></a> Prepare release 0.4.1 (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/44">#44</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/17b939c7bbe33834e57efe445a1be525db52a078"><code>17b939c</code></a> Bump development dependencies (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/46">#46</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/dcdd318de330db1c2ded8da91e9d288ee5356c64"><code>dcdd318</code></a> Add proper reference to <code>dataclasses.IniVar</code> in <code>AnnotationSource.DATACLASS</code> ...</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/5f86b149b02518566ec7ec17c1749059c6906b60"><code>5f86b14</code></a> Use <code>list</code> as a type hint for <code>InspectedAnnotation.metadata</code> (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/43">#43</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/b7378550328f395eac9e9d8b30f5e64469543028"><code>b737855</code></a> Add SPDX license identifier (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/42">#42</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/61c25e5197df56a77eca460d6c5362d76882b242"><code>61c25e5</code></a> Improve test coverage (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/41">#41</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/a56b8c3a3e7fc54bc84c5816503ee564735bff47"><code>a56b8c3</code></a> Fix implementation of <code>is_union_origin()</code> (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/40">#40</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/e20451f3290a1e8833098749c766cb167ab83a40"><code>e20451f</code></a> Add <code>typing_objects.is_forwardref()</code> (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/38">#38</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/eb7654bbe2b311c54b822e38243c61021132b0ea"><code>eb7654b</code></a> Fix some typos (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/36">#36</a>)</li> <li><a href="https://github.com/pydantic/typing-inspection/commit/5cb72572c4c474254a2016ff72a3dc1ec6fbeb63"><code>5cb7257</code></a> Fix compatibility with latest Python 3.14 release (<a href="https://redirect.github.com/pydantic/typing-inspection/issues/37">#37</a>)</li> <li>Additional commits viewable in <a href="https://github.com/pydantic/typing-inspection/compare/v0.4.0...v0.4.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-inspection&package-manager=pip&previous-version=0.4.0&new-version=0.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bcb2e81a5e0..f63a437a276 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -272,7 +272,7 @@ typing-extensions==4.13.2 # python-on-whales # rich # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via pydantic uritemplate==4.1.1 # via gidgethub diff --git a/requirements/dev.txt b/requirements/dev.txt index 0a992d8a1e1..704649df008 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -263,7 +263,7 @@ typing-extensions==4.13.2 # python-on-whales # rich # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via pydantic uritemplate==4.1.1 # via gidgethub diff --git a/requirements/lint.txt b/requirements/lint.txt index 28aa349a511..99fcd3969e3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -106,7 +106,7 @@ typing-extensions==4.13.2 # python-on-whales # rich # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via pydantic uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in diff --git a/requirements/test.txt b/requirements/test.txt index 683001e8967..da25768851e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -135,7 +135,7 @@ typing-extensions==4.13.2 # python-on-whales # rich # typing-inspection -typing-inspection==0.4.0 +typing-inspection==0.4.1 # via pydantic uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in From ee207f530a1c473719f4c1e68e79a1f85baffa98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 11:24:42 +0000 Subject: [PATCH 1416/1511] Bump coverage from 7.8.0 to 7.8.1 (#10928) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.8.0 to 7.8.1. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst">coverage's changelog</a>.</em></p> <blockquote> <h2>Version 7.8.1 — 2025-05-21</h2> <ul> <li> <p>A number of EncodingWarnings were fixed that could appear if you've enabled PYTHONWARNDEFAULTENCODING, fixing <code>issue 1966</code><em>. Thanks, <code>Henry Schreiner <pull 1967_></code></em>.</p> </li> <li> <p>Fixed a race condition when using sys.monitoring with free-threading Python, closing <code>issue 1970</code>_.</p> </li> </ul> <p>.. _issue 1966: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1966">nedbat/coveragepy#1966</a> .. _pull 1967: <a href="https://redirect.github.com/nedbat/coveragepy/pull/1967">nedbat/coveragepy#1967</a> .. _issue 1970: <a href="https://redirect.github.com/nedbat/coveragepy/issues/1970">nedbat/coveragepy#1970</a></p> <p>.. _changes_7-8-0:</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/nedbat/coveragepy/commit/ed98b8708ccc380bcb1490cd73b3e476f69c234f"><code>ed98b87</code></a> docs: sample HTML for 7.8.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/b98bc9b9878ff8c23bcaa0d7c5b2a55269c6783f"><code>b98bc9b</code></a> docs: prep for 7.8.1</li> <li><a href="https://github.com/nedbat/coveragepy/commit/ecbd4daf401664bbabcbb1e954855b0dbe92878e"><code>ecbd4da</code></a> build: make a step more explicit</li> <li><a href="https://github.com/nedbat/coveragepy/commit/277441030e56ff14dc1f6612cd3a5771353ef28a"><code>2774410</code></a> test: simplify skipper, and make it suppressable</li> <li><a href="https://github.com/nedbat/coveragepy/commit/8b9cecc43a8f2491b3d63f549cc3809a9f2b2b24"><code>8b9cecc</code></a> fix: close a sys.monitoring race condition with free-threading. <a href="https://redirect.github.com/nedbat/coveragepy/issues/1970">#1970</a></li> <li><a href="https://github.com/nedbat/coveragepy/commit/66e4f8d9ad61e3100af263011a89cbbe476970eb"><code>66e4f8d</code></a> test: try to unflake a test</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9975d0c8df66ddbf61bf783c11691b59bd377dfe"><code>9975d0c</code></a> build: no need for a separate doc_upgrade target</li> <li><a href="https://github.com/nedbat/coveragepy/commit/6dec28bb8fa0a1875b9c5646f4ca5145cbdfc2fb"><code>6dec28b</code></a> build: delete unused code in igor.py</li> <li><a href="https://github.com/nedbat/coveragepy/commit/6376e358f05d3749dfcb12907904c2761f331195"><code>6376e35</code></a> build: clarify a .ignore rule</li> <li><a href="https://github.com/nedbat/coveragepy/commit/9bdf3665853eb19211e5ca6cb844a426455cdf24"><code>9bdf366</code></a> chore: make upgrade doc_upgrade</li> <li>Additional commits viewable in <a href="https://github.com/nedbat/coveragepy/compare/7.8.0...7.8.1">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.8.0&new-version=7.8.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f63a437a276..e79f7008a7d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.8.0 +coverage==7.8.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 704649df008..9b2c3ebeab3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.8.0 +coverage==7.8.1 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index da25768851e..63cb482c5e0 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==1.17.1 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.8.0 +coverage==7.8.1 # via # -r requirements/test.in # pytest-cov From 40563751adf02b811d5acf95696c096a4dbd9ed4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 14:47:14 +0000 Subject: [PATCH 1417/1511] [PR #10760/4152a083 backport][3.12] Support using system llhttp library (#10929) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Michał Górny <mgorny@gentoo.org> Co-authored-by: 🇺🇦 Sviatoslav Sydorenko (Святослав Сидоренко) <wk.cvs.github@sydorenko.org.ua> --- CHANGES/10759.packaging.rst | 5 +++++ aiohttp/_cparser.pxd | 2 +- docs/glossary.rst | 11 +++++++++++ docs/spelling_wordlist.txt | 1 + pyproject.toml | 1 + requirements/test.in | 1 + setup.py | 37 +++++++++++++++++++++++++++++++------ 7 files changed, 51 insertions(+), 7 deletions(-) create mode 100644 CHANGES/10759.packaging.rst diff --git a/CHANGES/10759.packaging.rst b/CHANGES/10759.packaging.rst new file mode 100644 index 00000000000..6f41e873229 --- /dev/null +++ b/CHANGES/10759.packaging.rst @@ -0,0 +1,5 @@ +Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + +This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that +can be used to build aiohttp against the system install of the ``llhttp`` library rather +than the vendored one. diff --git a/aiohttp/_cparser.pxd b/aiohttp/_cparser.pxd index c2cd5a92fda..1b3be6d4efb 100644 --- a/aiohttp/_cparser.pxd +++ b/aiohttp/_cparser.pxd @@ -1,7 +1,7 @@ from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t -cdef extern from "../vendor/llhttp/build/llhttp.h": +cdef extern from "llhttp.h": struct llhttp__internal_s: int32_t _index diff --git a/docs/glossary.rst b/docs/glossary.rst index 392ef740cd1..996ea982d58 100644 --- a/docs/glossary.rst +++ b/docs/glossary.rst @@ -151,6 +151,17 @@ Environment Variables ===================== +.. envvar:: AIOHTTP_NO_EXTENSIONS + + If set to a non-empty value while building from source, aiohttp will be built without speedups + written as C extensions. This option is primarily useful for debugging. + +.. envvar:: AIOHTTP_USE_SYSTEM_DEPS + + If set to a non-empty value while building from source, aiohttp will be built against + the system installation of llhttp rather than the vendored library. This option is primarily + meant to be used by downstream redistributors. + .. envvar:: NETRC If set, HTTP Basic Auth will be read from the file pointed to by this environment variable, diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 421ef842678..b7153c68be8 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -175,6 +175,7 @@ kwargs latin lifecycle linux +llhttp localhost Locator login diff --git a/pyproject.toml b/pyproject.toml index 69f8a6b58b6..3ef37b5978b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ [build-system] requires = [ + "pkgconfig", "setuptools >= 46.4.0", ] build-backend = "setuptools.build_meta" diff --git a/requirements/test.in b/requirements/test.in index 91b5e115952..1563689deae 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -5,6 +5,7 @@ coverage freezegun isal mypy; implementation_name == "cpython" +pkgconfig proxy.py >= 2.4.4rc5 pytest pytest-cov diff --git a/setup.py b/setup.py index 2f024e87ef2..fafb7dc7941 100644 --- a/setup.py +++ b/setup.py @@ -8,6 +8,9 @@ raise RuntimeError("aiohttp 3.x requires Python 3.9+") +USE_SYSTEM_DEPS = bool( + os.environ.get("AIOHTTP_USE_SYSTEM_DEPS", os.environ.get("USE_SYSTEM_DEPS")) +) NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) HERE = pathlib.Path(__file__).parent IS_GIT_REPO = (HERE / ".git").exists() @@ -17,7 +20,11 @@ NO_EXTENSIONS = True -if IS_GIT_REPO and not (HERE / "vendor/llhttp/README.md").exists(): +if ( + not USE_SYSTEM_DEPS + and IS_GIT_REPO + and not (HERE / "vendor/llhttp/README.md").exists() +): print("Install submodules when building from git clone", file=sys.stderr) print("Hint:", file=sys.stderr) print(" git submodule update --init", file=sys.stderr) @@ -26,6 +33,27 @@ # NOTE: makefile cythonizes all Cython modules +if USE_SYSTEM_DEPS: + import shlex + + import pkgconfig + + llhttp_sources = [] + llhttp_kwargs = { + "extra_compile_args": shlex.split(pkgconfig.cflags("libllhttp")), + "extra_link_args": shlex.split(pkgconfig.libs("libllhttp")), + } +else: + llhttp_sources = [ + "vendor/llhttp/build/c/llhttp.c", + "vendor/llhttp/src/native/api.c", + "vendor/llhttp/src/native/http.c", + ] + llhttp_kwargs = { + "define_macros": [("LLHTTP_STRICT_MODE", 0)], + "include_dirs": ["vendor/llhttp/build"], + } + extensions = [ Extension("aiohttp._websocket.mask", ["aiohttp/_websocket/mask.c"]), Extension( @@ -33,12 +61,9 @@ [ "aiohttp/_http_parser.c", "aiohttp/_find_header.c", - "vendor/llhttp/build/c/llhttp.c", - "vendor/llhttp/src/native/api.c", - "vendor/llhttp/src/native/http.c", + *llhttp_sources, ], - define_macros=[("LLHTTP_STRICT_MODE", 0)], - include_dirs=["vendor/llhttp/build"], + **llhttp_kwargs, ), Extension("aiohttp._http_writer", ["aiohttp/_http_writer.c"]), Extension("aiohttp._websocket.reader_c", ["aiohttp/_websocket/reader_c.c"]), From 10f0cf8d74d59aa293e2d998256e8d16fad7bd7e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 14:51:24 +0000 Subject: [PATCH 1418/1511] [PR #10922/5fac5f19 backport][3.12] Add Winloop to test suite if User is using Windows (#10930) Co-authored-by: Vizonex <114684698+Vizonex@users.noreply.github.com> Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: J. Nick Koston <nick+github@koston.org> Co-authored-by: Sam Bull <aa6bs0@sambull.org> --- CHANGES/10922.contrib.rst | 1 + CONTRIBUTORS.txt | 1 + docs/spelling_wordlist.txt | 1 + requirements/base.in | 1 + requirements/base.txt | 1 + tests/conftest.py | 5 ++++- 6 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10922.contrib.rst diff --git a/CHANGES/10922.contrib.rst b/CHANGES/10922.contrib.rst new file mode 100644 index 00000000000..e5e1cfd8af6 --- /dev/null +++ b/CHANGES/10922.contrib.rst @@ -0,0 +1 @@ +Added Winloop to test suite to support in the future -- by :user:`Vizonex`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 5ff1eea3da7..59edfd7ac3f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -359,6 +359,7 @@ Vincent Maillol Vitalik Verhovodov Vitaly Haritonsky Vitaly Magerya +Vizonex Vladimir Kamarzin Vladimir Kozlovski Vladimir Rutsky diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index b7153c68be8..d0328529cfd 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -368,6 +368,7 @@ websocket’s websockets Websockets wildcard +Winloop Workflow ws wsgi diff --git a/requirements/base.in b/requirements/base.in index 70493b6c83a..816a4e84026 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -2,3 +2,4 @@ gunicorn uvloop; platform_system != "Windows" and implementation_name == "cpython" # MagicStack/uvloop#14 +winloop; platform_system == "Windows" and implementation_name == "cpython" diff --git a/requirements/base.txt b/requirements/base.txt index 26c18e2f53e..2cd73f52418 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -43,6 +43,7 @@ pycparser==2.22 typing-extensions==4.13.2 # via multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" +winloop==0.1.8; platform_system == "Windows" and implementation_name == "cpython" # via -r requirements/base.in yarl==1.20.0 # via -r requirements/runtime-deps.in diff --git a/tests/conftest.py b/tests/conftest.py index 27cd5cbd6db..d9831aea523 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -34,7 +34,10 @@ try: - import uvloop + if sys.platform == "win32": + import winloop as uvloop + else: + import uvloop except ImportError: uvloop = None # type: ignore[assignment] From 0c161025b5f0f15f13e66cc1cba906e2428cc276 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 10:22:46 -0500 Subject: [PATCH 1419/1511] [PR #10915/545783b backport][3.12] Fix connection reuse for file-like data payloads (#10931) --- CHANGES/10325.bugfix.rst | 1 + CHANGES/10915.bugfix.rst | 3 + aiohttp/client_reqrep.py | 77 +++++- aiohttp/payload.py | 428 ++++++++++++++++++++++++++++---- tests/conftest.py | 16 ++ tests/test_client_functional.py | 170 ++++++++++++- tests/test_client_request.py | 98 +++++++- tests/test_payload.py | 351 ++++++++++++++++++++++++-- 8 files changed, 1060 insertions(+), 84 deletions(-) create mode 120000 CHANGES/10325.bugfix.rst create mode 100644 CHANGES/10915.bugfix.rst diff --git a/CHANGES/10325.bugfix.rst b/CHANGES/10325.bugfix.rst new file mode 120000 index 00000000000..aa085cc590d --- /dev/null +++ b/CHANGES/10325.bugfix.rst @@ -0,0 +1 @@ +10915.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10915.bugfix.rst b/CHANGES/10915.bugfix.rst new file mode 100644 index 00000000000..f564603306b --- /dev/null +++ b/CHANGES/10915.bugfix.rst @@ -0,0 +1,3 @@ +Fixed connection reuse for file-like data payloads by ensuring buffer +truncation respects content-length boundaries and preventing premature +connection closure race -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index ef0dd42b969..a50917150c5 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -370,6 +370,23 @@ def __init__( def __reset_writer(self, _: object = None) -> None: self.__writer = None + def _get_content_length(self) -> Optional[int]: + """Extract and validate Content-Length header value. + + Returns parsed Content-Length value or None if not set. + Raises ValueError if header exists but cannot be parsed as an integer. + """ + if hdrs.CONTENT_LENGTH not in self.headers: + return None + + content_length_hdr = self.headers[hdrs.CONTENT_LENGTH] + try: + return int(content_length_hdr) + except ValueError: + raise ValueError( + f"Invalid Content-Length header: {content_length_hdr}" + ) from None + @property def skip_auto_headers(self) -> CIMultiDict[None]: return self._skip_auto_headers or CIMultiDict() @@ -659,9 +676,37 @@ def update_proxy( self.proxy_headers = proxy_headers async def write_bytes( - self, writer: AbstractStreamWriter, conn: "Connection" + self, + writer: AbstractStreamWriter, + conn: "Connection", + content_length: Optional[int], ) -> None: - """Support coroutines that yields bytes objects.""" + """ + Write the request body to the connection stream. + + This method handles writing different types of request bodies: + 1. Payload objects (using their specialized write_with_length method) + 2. Bytes/bytearray objects + 3. Iterable body content + + Args: + writer: The stream writer to write the body to + conn: The connection being used for this request + content_length: Optional maximum number of bytes to write from the body + (None means write the entire body) + + The method properly handles: + - Waiting for 100-Continue responses if required + - Content length constraints for chunked encoding + - Error handling for network issues, cancellation, and other exceptions + - Signaling EOF and timeout management + + Raises: + ClientOSError: When there's an OS-level error writing the body + ClientConnectionError: When there's a general connection error + asyncio.CancelledError: When the operation is cancelled + + """ # 100 response if self._continue is not None: await writer.drain() @@ -671,16 +716,30 @@ async def write_bytes( assert protocol is not None try: if isinstance(self.body, payload.Payload): - await self.body.write(writer) + # Specialized handling for Payload objects that know how to write themselves + await self.body.write_with_length(writer, content_length) else: + # Handle bytes/bytearray by converting to an iterable for consistent handling if isinstance(self.body, (bytes, bytearray)): self.body = (self.body,) - for chunk in self.body: - await writer.write(chunk) + if content_length is None: + # Write the entire body without length constraint + for chunk in self.body: + await writer.write(chunk) + else: + # Write with length constraint, respecting content_length limit + # If the body is larger than content_length, we truncate it + remaining_bytes = content_length + for chunk in self.body: + await writer.write(chunk[:remaining_bytes]) + remaining_bytes -= len(chunk) + if remaining_bytes <= 0: + break except OSError as underlying_exc: reraised_exc = underlying_exc + # Distinguish between timeout and other OS errors for better error reporting exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( underlying_exc, asyncio.TimeoutError ) @@ -692,18 +751,20 @@ async def write_bytes( set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: - # Body hasn't been fully sent, so connection can't be reused. + # Body hasn't been fully sent, so connection can't be reused conn.close() raise except Exception as underlying_exc: set_exception( protocol, ClientConnectionError( - f"Failed to send bytes into the underlying connection {conn !s}", + "Failed to send bytes into the underlying connection " + f"{conn !s}: {underlying_exc!r}", ), underlying_exc, ) else: + # Successfully wrote the body, signal EOF and start response timeout await writer.write_eof() protocol.start_timeout() @@ -768,7 +829,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": await writer.write_headers(status_line, self.headers) task: Optional["asyncio.Task[None]"] if self.body or self._continue is not None or protocol.writing_paused: - coro = self.write_bytes(writer, conn) + coro = self.write_bytes(writer, conn, self._get_content_length()) if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to write # bytes immediately to avoid having to schedule diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 3f6d3672db2..823940902f5 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -16,6 +16,7 @@ Final, Iterable, Optional, + Set, TextIO, Tuple, Type, @@ -53,6 +54,9 @@ ) TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB +READ_SIZE: Final[int] = 2**16 # 64 KB +_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() + if TYPE_CHECKING: from typing import List @@ -238,10 +242,46 @@ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: @abstractmethod async def write(self, writer: AbstractStreamWriter) -> None: - """Write payload. + """Write payload to the writer stream. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + + This is a legacy method that writes the entire payload without length constraints. + + Important: + For new implementations, use write_with_length() instead of this method. + This method is maintained for backwards compatibility and will eventually + delegate to write_with_length(writer, None) in all implementations. + + All payload subclasses must override this method for backwards compatibility, + but new code should use write_with_length for more flexibility and control. + """ + + # write_with_length is new in aiohttp 3.12 + # it should be overridden by subclasses + async def write_with_length( + self, writer: AbstractStreamWriter, content_length: Optional[int] + ) -> None: + """ + Write payload with a specific content length constraint. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + content_length: Maximum number of bytes to write (None for unlimited) + + This method allows writing payload content with a specific length constraint, + which is particularly useful for HTTP responses with Content-Length header. + + Note: + This is the base implementation that provides backwards compatibility + for subclasses that don't override this method. Specific payload types + should override this method to implement proper length-constrained writing. - writer is an AbstractStreamWriter instance: """ + # Backwards compatibility for subclasses that don't override this method + # and for the default implementation + await self.write(writer) class BytesPayload(Payload): @@ -276,8 +316,40 @@ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return self._value.decode(encoding, errors) async def write(self, writer: AbstractStreamWriter) -> None: + """Write the entire bytes payload to the writer stream. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + + This method writes the entire bytes content without any length constraint. + + Note: + For new implementations that need length control, use write_with_length(). + This method is maintained for backwards compatibility and is equivalent + to write_with_length(writer, None). + """ await writer.write(self._value) + async def write_with_length( + self, writer: AbstractStreamWriter, content_length: Optional[int] + ) -> None: + """ + Write bytes payload with a specific content length constraint. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + content_length: Maximum number of bytes to write (None for unlimited) + + This method writes either the entire byte sequence or a slice of it + up to the specified content_length. For BytesPayload, this operation + is performed efficiently using array slicing. + + """ + if content_length is not None: + await writer.write(self._value[:content_length]) + else: + await writer.write(self._value) + class StringPayload(BytesPayload): def __init__( @@ -330,15 +402,165 @@ def __init__( if hdrs.CONTENT_DISPOSITION not in self.headers: self.set_content_disposition(disposition, filename=self._filename) + def _read_and_available_len( + self, remaining_content_len: Optional[int] + ) -> Tuple[Optional[int], bytes]: + """ + Read the file-like object and return both its total size and the first chunk. + + Args: + remaining_content_len: Optional limit on how many bytes to read in this operation. + If None, READ_SIZE will be used as the default chunk size. + + Returns: + A tuple containing: + - The total size of the remaining unread content (None if size cannot be determined) + - The first chunk of bytes read from the file object + + This method is optimized to perform both size calculation and initial read + in a single operation, which is executed in a single executor job to minimize + context switches and file operations when streaming content. + + """ + size = self.size # Call size only once since it does I/O + return size, self._value.read( + min(size or READ_SIZE, remaining_content_len or READ_SIZE) + ) + + def _read(self, remaining_content_len: Optional[int]) -> bytes: + """ + Read a chunk of data from the file-like object. + + Args: + remaining_content_len: Optional maximum number of bytes to read. + If None, READ_SIZE will be used as the default chunk size. + + Returns: + A chunk of bytes read from the file object, respecting the + remaining_content_len limit if specified. + + This method is used for subsequent reads during streaming after + the initial _read_and_available_len call has been made. + + """ + return self._value.read(remaining_content_len or READ_SIZE) # type: ignore[no-any-return] + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except (AttributeError, OSError): + return None + async def write(self, writer: AbstractStreamWriter) -> None: - loop = asyncio.get_event_loop() + """ + Write the entire file-like payload to the writer stream. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + + This method writes the entire file content without any length constraint. + It delegates to write_with_length() with no length limit for implementation + consistency. + + Note: + For new implementations that need length control, use write_with_length() directly. + This method is maintained for backwards compatibility with existing code. + + """ + await self.write_with_length(writer, None) + + async def write_with_length( + self, writer: AbstractStreamWriter, content_length: Optional[int] + ) -> None: + """ + Write file-like payload with a specific content length constraint. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + content_length: Maximum number of bytes to write (None for unlimited) + + This method implements optimized streaming of file content with length constraints: + + 1. File reading is performed in a thread pool to avoid blocking the event loop + 2. Content is read and written in chunks to maintain memory efficiency + 3. Writing stops when either: + - All available file content has been written (when size is known) + - The specified content_length has been reached + 4. File resources are properly closed even if the operation is cancelled + + The implementation carefully handles both known-size and unknown-size payloads, + as well as constrained and unconstrained content lengths. + + """ + loop = asyncio.get_running_loop() + total_written_len = 0 + remaining_content_len = content_length + try: - chunk = await loop.run_in_executor(None, self._value.read, 2**16) + # Get initial data and available length + available_len, chunk = await loop.run_in_executor( + None, self._read_and_available_len, remaining_content_len + ) + # Process data chunks until done while chunk: - await writer.write(chunk) - chunk = await loop.run_in_executor(None, self._value.read, 2**16) + chunk_len = len(chunk) + + # Write data with or without length constraint + if remaining_content_len is None: + await writer.write(chunk) + else: + await writer.write(chunk[:remaining_content_len]) + remaining_content_len -= chunk_len + + total_written_len += chunk_len + + # Check if we're done writing + if self._should_stop_writing( + available_len, total_written_len, remaining_content_len + ): + return + + # Read next chunk + chunk = await loop.run_in_executor( + None, self._read, remaining_content_len + ) finally: - await loop.run_in_executor(None, self._value.close) + # Handle closing the file without awaiting to prevent cancellation issues + # when the StreamReader reaches EOF + self._schedule_file_close(loop) + + def _should_stop_writing( + self, + available_len: Optional[int], + total_written_len: int, + remaining_content_len: Optional[int], + ) -> bool: + """ + Determine if we should stop writing data. + + Args: + available_len: Known size of the payload if available (None if unknown) + total_written_len: Number of bytes already written + remaining_content_len: Remaining bytes to be written for content-length limited responses + + Returns: + True if we should stop writing data, based on either: + - Having written all available data (when size is known) + - Having written all requested content (when content-length is specified) + + """ + return (available_len is not None and total_written_len >= available_len) or ( + remaining_content_len is not None and remaining_content_len <= 0 + ) + + def _schedule_file_close(self, loop: asyncio.AbstractEventLoop) -> None: + """Schedule file closing without awaiting to prevent cancellation issues.""" + close_future = loop.run_in_executor(None, self._value.close) + # Hold a strong reference to the future to prevent it from being + # garbage collected before it completes. + _CLOSE_FUTURES.add(close_future) + close_future.add_done_callback(_CLOSE_FUTURES.remove) def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return "".join(r.decode(encoding, errors) for r in self._value.readlines()) @@ -375,31 +597,60 @@ def __init__( **kwargs, ) - @property - def size(self) -> Optional[int]: - try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: - return None + def _read_and_available_len( + self, remaining_content_len: Optional[int] + ) -> Tuple[Optional[int], bytes]: + """ + Read the text file-like object and return both its total size and the first chunk. + + Args: + remaining_content_len: Optional limit on how many bytes to read in this operation. + If None, READ_SIZE will be used as the default chunk size. + + Returns: + A tuple containing: + - The total size of the remaining unread content (None if size cannot be determined) + - The first chunk of bytes read from the file object, encoded using the payload's encoding + + This method is optimized to perform both size calculation and initial read + in a single operation, which is executed in a single executor job to minimize + context switches and file operations when streaming content. + + Note: + TextIOPayload handles encoding of the text content before writing it + to the stream. If no encoding is specified, UTF-8 is used as the default. + + """ + size = self.size + chunk = self._value.read( + min(size or READ_SIZE, remaining_content_len or READ_SIZE) + ) + return size, chunk.encode(self._encoding) if self._encoding else chunk.encode() + + def _read(self, remaining_content_len: Optional[int]) -> bytes: + """ + Read a chunk of data from the text file-like object. + + Args: + remaining_content_len: Optional maximum number of bytes to read. + If None, READ_SIZE will be used as the default chunk size. + + Returns: + A chunk of bytes read from the file object and encoded using the payload's + encoding. The data is automatically converted from text to bytes. + + This method is used for subsequent reads during streaming after + the initial _read_and_available_len call has been made. It properly + handles text encoding, converting the text content to bytes using + the specified encoding (or UTF-8 if none was provided). + + """ + chunk = self._value.read(remaining_content_len or READ_SIZE) + return chunk.encode(self._encoding) if self._encoding else chunk.encode() def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return self._value.read() - async def write(self, writer: AbstractStreamWriter) -> None: - loop = asyncio.get_event_loop() - try: - chunk = await loop.run_in_executor(None, self._value.read, 2**16) - while chunk: - data = ( - chunk.encode(encoding=self._encoding) - if self._encoding - else chunk.encode() - ) - await writer.write(data) - chunk = await loop.run_in_executor(None, self._value.read, 2**16) - finally: - await loop.run_in_executor(None, self._value.close) - class BytesIOPayload(IOBasePayload): _value: io.BytesIO @@ -414,20 +665,55 @@ def size(self) -> int: def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return self._value.read().decode(encoding, errors) + async def write(self, writer: AbstractStreamWriter) -> None: + return await self.write_with_length(writer, None) -class BufferedReaderPayload(IOBasePayload): - _value: io.BufferedIOBase + async def write_with_length( + self, writer: AbstractStreamWriter, content_length: Optional[int] + ) -> None: + """ + Write BytesIO payload with a specific content length constraint. - @property - def size(self) -> Optional[int]: + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + content_length: Maximum number of bytes to write (None for unlimited) + + This implementation is specifically optimized for BytesIO objects: + + 1. Reads content in chunks to maintain memory efficiency + 2. Yields control back to the event loop periodically to prevent blocking + when dealing with large BytesIO objects + 3. Respects content_length constraints when specified + 4. Properly cleans up by closing the BytesIO object when done or on error + + The periodic yielding to the event loop is important for maintaining + responsiveness when processing large in-memory buffers. + + """ + loop_count = 0 + remaining_bytes = content_length try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() - except (OSError, AttributeError): - # data.fileno() is not supported, e.g. - # io.BufferedReader(io.BytesIO(b'data')) - # For some file-like objects (e.g. tarfile), the fileno() attribute may - # not exist at all, and will instead raise an AttributeError. - return None + while chunk := self._value.read(READ_SIZE): + if loop_count > 0: + # Avoid blocking the event loop + # if they pass a large BytesIO object + # and we are not in the first iteration + # of the loop + await asyncio.sleep(0) + if remaining_bytes is None: + await writer.write(chunk) + else: + await writer.write(chunk[:remaining_bytes]) + remaining_bytes -= len(chunk) + if remaining_bytes <= 0: + return + loop_count += 1 + finally: + self._value.close() + + +class BufferedReaderPayload(IOBasePayload): + _value: io.BufferedIOBase def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return self._value.read().decode(encoding, errors) @@ -486,15 +772,63 @@ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: self._iter = value.__aiter__() async def write(self, writer: AbstractStreamWriter) -> None: - if self._iter: - try: - # iter is not None check prevents rare cases - # when the case iterable is used twice - while True: - chunk = await self._iter.__anext__() + """ + Write the entire async iterable payload to the writer stream. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + + This method iterates through the async iterable and writes each chunk + to the writer without any length constraint. + + Note: + For new implementations that need length control, use write_with_length() directly. + This method is maintained for backwards compatibility with existing code. + + """ + await self.write_with_length(writer, None) + + async def write_with_length( + self, writer: AbstractStreamWriter, content_length: Optional[int] + ) -> None: + """ + Write async iterable payload with a specific content length constraint. + + Args: + writer: An AbstractStreamWriter instance that handles the actual writing + content_length: Maximum number of bytes to write (None for unlimited) + + This implementation handles streaming of async iterable content with length constraints: + + 1. Iterates through the async iterable one chunk at a time + 2. Respects content_length constraints when specified + 3. Handles the case when the iterable might be used twice + + Since async iterables are consumed as they're iterated, there is no way to + restart the iteration if it's already in progress or completed. + + """ + if self._iter is None: + return + + remaining_bytes = content_length + + try: + while True: + chunk = await self._iter.__anext__() + if remaining_bytes is None: await writer.write(chunk) - except StopAsyncIteration: - self._iter = None + # If we have a content length limit + elif remaining_bytes > 0: + await writer.write(chunk[:remaining_bytes]) + remaining_bytes -= len(chunk) + # We still want to exhaust the iterator even + # if we have reached the content length limit + # since the file handle may not get closed by + # the iterator if we don't do this + except StopAsyncIteration: + # Iterator is exhausted + self._iter = None def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: raise TypeError("Unable to decode.") diff --git a/tests/conftest.py b/tests/conftest.py index d9831aea523..696f5d0d035 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,6 +17,7 @@ import zlib_ng.zlib_ng from blockbuster import blockbuster_ctx +from aiohttp import payload from aiohttp.client_proto import ResponseHandler from aiohttp.compression_utils import ZLibBackend, ZLibBackendProtocol, set_zlib_backend from aiohttp.http import WS_KEY @@ -332,3 +333,18 @@ def parametrize_zlib_backend( yield set_zlib_backend(original_backend) + + +@pytest.fixture() +def cleanup_payload_pending_file_closes( + loop: asyncio.AbstractEventLoop, +) -> Generator[None, None, None]: + """Ensure all pending file close operations complete during test teardown.""" + yield + if payload._CLOSE_FUTURES: + # Only wait for futures from the current loop + loop_futures = [f for f in payload._CLOSE_FUTURES if f.get_loop() is loop] + if loop_futures: + loop.run_until_complete( + asyncio.gather(*loop_futures, return_exceptions=True) + ) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 1154c7e5805..bb4d70ef530 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,7 +12,16 @@ import tarfile import time import zipfile -from typing import Any, AsyncIterator, Awaitable, Callable, List, NoReturn, Type +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + List, + NoReturn, + Optional, + Type, +) from unittest import mock import pytest @@ -41,6 +50,13 @@ from aiohttp.typedefs import Handler +@pytest.fixture(autouse=True) +def cleanup( + cleanup_payload_pending_file_closes: None, +) -> None: + """Ensure all pending file close operations complete during test teardown.""" + + @pytest.fixture def here(): return pathlib.Path(__file__).parent @@ -1560,7 +1576,10 @@ async def handler(request: web.Request) -> web.Response: original_write_bytes = ClientRequest.write_bytes async def write_bytes( - self: ClientRequest, writer: StreamWriter, conn: Connection + self: ClientRequest, + writer: StreamWriter, + conn: Connection, + content_length: Optional[int] = None, ) -> None: nonlocal write_mock original_write = writer._write @@ -1568,7 +1587,7 @@ async def write_bytes( with mock.patch.object( writer, "_write", autospec=True, spec_set=True, side_effect=original_write ) as write_mock: - await original_write_bytes(self, writer, conn) + await original_write_bytes(self, writer, conn, content_length) with mock.patch.object(ClientRequest, "write_bytes", write_bytes): app = web.Application() @@ -1940,8 +1959,7 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - with fname.open("rb") as f: - data_size = len(f.read()) + data_size = len(expected) with pytest.warns(DeprecationWarning): @@ -4146,3 +4164,145 @@ async def handler(request: web.Request) -> web.Response: with pytest.raises(RuntimeError, match="Connection closed"): await resp.read() + + +async def test_content_length_limit_enforced(aiohttp_server: AiohttpServer) -> None: + """Test that Content-Length header value limits the amount of data sent to the server.""" + received_data = bytearray() + + async def handler(request: web.Request) -> web.Response: + # Read all data from the request and store it + data = await request.read() + received_data.extend(data) + return web.Response(text="OK") + + app = web.Application() + app.router.add_post("/", handler) + + server = await aiohttp_server(app) + + # Create data larger than what we'll limit with Content-Length + data = b"X" * 1000 + # Only send 500 bytes even though data is 1000 bytes + headers = {"Content-Length": "500"} + + async with aiohttp.ClientSession() as session: + await session.post(server.make_url("/"), data=data, headers=headers) + + # Verify only 500 bytes (not the full 1000) were received by the server + assert len(received_data) == 500 + assert received_data == b"X" * 500 + + +async def test_content_length_limit_with_multiple_reads( + aiohttp_server: AiohttpServer, +) -> None: + """Test that Content-Length header value limits multi read data properly.""" + received_data = bytearray() + + async def handler(request: web.Request) -> web.Response: + # Read all data from the request and store it + data = await request.read() + received_data.extend(data) + return web.Response(text="OK") + + app = web.Application() + app.router.add_post("/", handler) + + server = await aiohttp_server(app) + + # Create an async generator of data + async def data_generator() -> AsyncIterator[bytes]: + yield b"Chunk1" * 100 # 600 bytes + yield b"Chunk2" * 100 # another 600 bytes + + # Limit to 800 bytes even though we'd generate 1200 bytes + headers = {"Content-Length": "800"} + + async with aiohttp.ClientSession() as session: + await session.post(server.make_url("/"), data=data_generator(), headers=headers) + + # Verify only 800 bytes (not the full 1200) were received by the server + assert len(received_data) == 800 + # First chunk fully sent (600 bytes) + assert received_data.startswith(b"Chunk1" * 100) + + # The rest should be from the second chunk (the exact split might vary by implementation) + assert b"Chunk2" in received_data # Some part of the second chunk was sent + # 200 bytes from the second chunk + assert len(received_data) - len(b"Chunk1" * 100) == 200 + + +async def test_post_connection_cleanup_with_bytesio( + aiohttp_client: AiohttpClient, +) -> None: + """Test that connections are properly cleaned up when using BytesIO data.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"") + + app = web.Application() + app.router.add_post("/hello", handler) + client = await aiohttp_client(app) + + # Test with direct bytes and BytesIO multiple times to ensure connection cleanup + for _ in range(10): + async with client.post( + "/hello", + data=b"x", + headers={"Content-Length": "1"}, + ) as response: + response.raise_for_status() + + assert client._session.connector is not None + assert len(client._session.connector._conns) == 1 + + x = io.BytesIO(b"x") + async with client.post( + "/hello", + data=x, + headers={"Content-Length": "1"}, + ) as response: + response.raise_for_status() + + assert len(client._session.connector._conns) == 1 + + +async def test_post_connection_cleanup_with_file( + aiohttp_client: AiohttpClient, here: pathlib.Path +) -> None: + """Test that connections are properly cleaned up when using file data.""" + + async def handler(request: web.Request) -> web.Response: + await request.read() + return web.Response(body=b"") + + app = web.Application() + app.router.add_post("/hello", handler) + client = await aiohttp_client(app) + + test_file = here / "data.unknown_mime_type" + + # Test with direct bytes and file multiple times to ensure connection cleanup + for _ in range(10): + async with client.post( + "/hello", + data=b"xx", + headers={"Content-Length": "2"}, + ) as response: + response.raise_for_status() + + assert client._session.connector is not None + assert len(client._session.connector._conns) == 1 + fh = await asyncio.get_running_loop().run_in_executor( + None, open, test_file, "rb" + ) + + async with client.post( + "/hello", + data=fh, + headers={"Content-Length": str(test_file.stat().st_size)}, + ) as response: + response.raise_for_status() + + assert len(client._session.connector._conns) == 1 diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 4706c10a588..70b30dd14f2 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -4,8 +4,9 @@ import pathlib import sys import urllib.parse +from collections.abc import Callable, Iterable from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import Any, Callable, Dict, Iterable, Optional +from typing import Any, Optional, Protocol, Union from unittest import mock import pytest @@ -14,6 +15,7 @@ import aiohttp from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp.abc import AbstractStreamWriter from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, @@ -23,7 +25,11 @@ _merge_ssl_params, ) from aiohttp.compression_utils import ZLibBackend -from aiohttp.http import HttpVersion10, HttpVersion11 +from aiohttp.http import HttpVersion10, HttpVersion11, StreamWriter + + +class _RequestMaker(Protocol): + def __call__(self, method: str, url: str, **kwargs: Any) -> ClientRequest: ... class WriterMock(mock.AsyncMock): @@ -309,7 +315,7 @@ def test_default_loop(loop) -> None: ), ) def test_host_header_fqdn( - make_request: Any, url: str, headers: Dict[str, str], expected: str + make_request: Any, url: str, headers: dict[str, str], expected: str ) -> None: req = make_request("get", url, headers=headers) assert req.headers["HOST"] == expected @@ -995,10 +1001,12 @@ async def gen(): assert req.headers["TRANSFER-ENCODING"] == "chunked" original_write_bytes = req.write_bytes - async def _mock_write_bytes(*args, **kwargs): + async def _mock_write_bytes( + writer: AbstractStreamWriter, conn: mock.Mock, content_length: Optional[int] + ) -> None: # Ensure the task is scheduled await asyncio.sleep(0) - return await original_write_bytes(*args, **kwargs) + await original_write_bytes(writer, conn, content_length) with mock.patch.object(req, "write_bytes", _mock_write_bytes): resp = await req.send(conn) @@ -1197,7 +1205,7 @@ async def test_oserror_on_write_bytes(loop, conn) -> None: writer = WriterMock() writer.write.side_effect = OSError - await req.write_bytes(writer, conn) + await req.write_bytes(writer, conn, None) assert conn.protocol.set_exception.called exc = conn.protocol.set_exception.call_args[0][0] @@ -1522,3 +1530,81 @@ def test_request_info_tuple_new() -> None: ).real_url is url ) + + +def test_get_content_length(make_request: _RequestMaker) -> None: + """Test _get_content_length method extracts Content-Length correctly.""" + req = make_request("get", "http://python.org/") + + # No Content-Length header + assert req._get_content_length() is None + + # Valid Content-Length header + req.headers["Content-Length"] = "42" + assert req._get_content_length() == 42 + + # Invalid Content-Length header + req.headers["Content-Length"] = "invalid" + with pytest.raises(ValueError, match="Invalid Content-Length header: invalid"): + req._get_content_length() + + +async def test_write_bytes_with_content_length_limit( + loop: asyncio.AbstractEventLoop, buf: bytearray, conn: mock.Mock +) -> None: + """Test that write_bytes respects content_length limit for different body types.""" + # Test with bytes data + data = b"Hello World" + req = ClientRequest("post", URL("http://python.org/"), loop=loop) + + req.body = data + + writer = StreamWriter(protocol=conn.protocol, loop=loop) + # Use content_length=5 to truncate data + await req.write_bytes(writer, conn, 5) + + # Verify only the first 5 bytes were written + assert buf == b"Hello" + await req.close() + + +@pytest.mark.parametrize( + "data", + [ + [b"Part1", b"Part2", b"Part3"], + b"Part1Part2Part3", + ], +) +async def test_write_bytes_with_iterable_content_length_limit( + loop: asyncio.AbstractEventLoop, + buf: bytearray, + conn: mock.Mock, + data: Union[list[bytes], bytes], +) -> None: + """Test that write_bytes respects content_length limit for iterable data.""" + # Test with iterable data + req = ClientRequest("post", URL("http://python.org/"), loop=loop) + req.body = data + + writer = StreamWriter(protocol=conn.protocol, loop=loop) + # Use content_length=7 to truncate at the middle of Part2 + await req.write_bytes(writer, conn, 7) + assert len(buf) == 7 + assert buf == b"Part1Pa" + await req.close() + + +async def test_write_bytes_empty_iterable_with_content_length( + loop: asyncio.AbstractEventLoop, buf: bytearray, conn: mock.Mock +) -> None: + """Test that write_bytes handles empty iterable body with content_length.""" + req = ClientRequest("post", URL("http://python.org/"), loop=loop) + req.body = [] # Empty iterable + + writer = StreamWriter(protocol=conn.protocol, loop=loop) + # Use content_length=10 with empty body + await req.write_bytes(writer, conn, 10) + + # Verify nothing was written + assert len(buf) == 0 + await req.close() diff --git a/tests/test_payload.py b/tests/test_payload.py index 0e2db91135b..af0230776e5 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -1,11 +1,22 @@ import array -import asyncio +import io +import unittest.mock +from collections.abc import AsyncIterator from io import StringIO -from unittest import mock +from typing import Optional, Union import pytest +from multidict import CIMultiDict -from aiohttp import payload, streams +from aiohttp import payload +from aiohttp.abc import AbstractStreamWriter + + +@pytest.fixture(autouse=True) +def cleanup( + cleanup_payload_pending_file_closes: None, +) -> None: + """Ensure all pending file close operations complete during test teardown.""" @pytest.fixture @@ -121,22 +132,326 @@ async def gen(): def test_async_iterable_payload_not_async_iterable() -> None: with pytest.raises(TypeError): - payload.AsyncIterablePayload(object()) + payload.AsyncIterablePayload(object()) # type: ignore[arg-type] + + +class MockStreamWriter(AbstractStreamWriter): + """Mock stream writer for testing payload writes.""" + + def __init__(self) -> None: + self.written: list[bytes] = [] + + async def write( + self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] + ) -> None: + """Store the chunk in the written list.""" + self.written.append(bytes(chunk)) + + async def write_eof(self, chunk: Optional[bytes] = None) -> None: + """write_eof implementation - no-op for tests.""" + + async def drain(self) -> None: + """Drain implementation - no-op for tests.""" + + def enable_compression( + self, encoding: str = "deflate", strategy: Optional[int] = None + ) -> None: + """Enable compression - no-op for tests.""" + + def enable_chunking(self) -> None: + """Enable chunking - no-op for tests.""" + + async def write_headers(self, status_line: str, headers: CIMultiDict[str]) -> None: + """Write headers - no-op for tests.""" + + def get_written_bytes(self) -> bytes: + """Return all written bytes as a single bytes object.""" + return b"".join(self.written) + + +async def test_bytes_payload_write_with_length_no_limit() -> None: + """Test BytesPayload writing with no content length limit.""" + data = b"0123456789" + p = payload.BytesPayload(data) + writer = MockStreamWriter() + + await p.write_with_length(writer, None) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == 10 + + +async def test_bytes_payload_write_with_length_exact() -> None: + """Test BytesPayload writing with exact content length.""" + data = b"0123456789" + p = payload.BytesPayload(data) + writer = MockStreamWriter() + + await p.write_with_length(writer, 10) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == 10 + + +async def test_bytes_payload_write_with_length_truncated() -> None: + """Test BytesPayload writing with truncated content length.""" + data = b"0123456789" + p = payload.BytesPayload(data) + writer = MockStreamWriter() + + await p.write_with_length(writer, 5) + assert writer.get_written_bytes() == b"01234" + assert len(writer.get_written_bytes()) == 5 + + +async def test_iobase_payload_write_with_length_no_limit() -> None: + """Test IOBasePayload writing with no content length limit.""" + data = b"0123456789" + p = payload.IOBasePayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await p.write_with_length(writer, None) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == 10 + + +async def test_iobase_payload_write_with_length_exact() -> None: + """Test IOBasePayload writing with exact content length.""" + data = b"0123456789" + p = payload.IOBasePayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await p.write_with_length(writer, 10) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == 10 + + +async def test_iobase_payload_write_with_length_truncated() -> None: + """Test IOBasePayload writing with truncated content length.""" + data = b"0123456789" + p = payload.IOBasePayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await p.write_with_length(writer, 5) + assert writer.get_written_bytes() == b"01234" + assert len(writer.get_written_bytes()) == 5 + + +async def test_bytesio_payload_write_with_length_no_limit() -> None: + """Test BytesIOPayload writing with no content length limit.""" + data = b"0123456789" + p = payload.BytesIOPayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await p.write_with_length(writer, None) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == 10 + + +async def test_bytesio_payload_write_with_length_exact() -> None: + """Test BytesIOPayload writing with exact content length.""" + data = b"0123456789" + p = payload.BytesIOPayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await p.write_with_length(writer, 10) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == 10 + + +async def test_bytesio_payload_write_with_length_truncated() -> None: + """Test BytesIOPayload writing with truncated content length.""" + data = b"0123456789" + payload_bytesio = payload.BytesIOPayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await payload_bytesio.write_with_length(writer, 5) + assert writer.get_written_bytes() == b"01234" + assert len(writer.get_written_bytes()) == 5 + + +async def test_bytesio_payload_write_with_length_remaining_zero() -> None: + """Test BytesIOPayload with content_length smaller than first read chunk.""" + data = b"0123456789" * 10 # 100 bytes + bio = io.BytesIO(data) + payload_bytesio = payload.BytesIOPayload(bio) + writer = MockStreamWriter() + + # Mock the read method to return smaller chunks + original_read = bio.read + read_calls = 0 + + def mock_read(size: Optional[int] = None) -> bytes: + nonlocal read_calls + read_calls += 1 + if read_calls == 1: + # First call: return 3 bytes (less than content_length=5) + return original_read(3) + else: + # Subsequent calls return remaining data normally + return original_read(size) + + with unittest.mock.patch.object(bio, "read", mock_read): + await payload_bytesio.write_with_length(writer, 5) + + assert len(writer.get_written_bytes()) == 5 + assert writer.get_written_bytes() == b"01234" + + +async def test_bytesio_payload_large_data_multiple_chunks() -> None: + """Test BytesIOPayload with large data requiring multiple read chunks.""" + chunk_size = 2**16 # 64KB (READ_SIZE) + data = b"x" * (chunk_size + 1000) # Slightly larger than READ_SIZE + payload_bytesio = payload.BytesIOPayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await payload_bytesio.write_with_length(writer, None) + assert writer.get_written_bytes() == data + assert len(writer.get_written_bytes()) == chunk_size + 1000 -async def test_stream_reader_long_lines() -> None: - loop = asyncio.get_event_loop() - DATA = b"0" * 1024**3 +async def test_bytesio_payload_remaining_bytes_exhausted() -> None: + """Test BytesIOPayload when remaining_bytes becomes <= 0.""" + data = b"0123456789abcdef" * 1000 # 16000 bytes + payload_bytesio = payload.BytesIOPayload(io.BytesIO(data)) + writer = MockStreamWriter() - stream = streams.StreamReader(mock.Mock(), 2**16, loop=loop) - stream.feed_data(DATA) - stream.feed_eof() - body = payload.get_payload(stream) + await payload_bytesio.write_with_length(writer, 8000) # Exactly half the data + written = writer.get_written_bytes() + assert len(written) == 8000 + assert written == data[:8000] + + +async def test_iobase_payload_exact_chunk_size_limit() -> None: + """Test IOBasePayload with content length matching exactly one read chunk.""" + chunk_size = 2**16 # 65536 bytes (READ_SIZE) + data = b"x" * chunk_size + b"extra" # Slightly larger than one read chunk + p = payload.IOBasePayload(io.BytesIO(data)) + writer = MockStreamWriter() + + await p.write_with_length(writer, chunk_size) + written = writer.get_written_bytes() + assert len(written) == chunk_size + assert written == data[:chunk_size] + + +async def test_async_iterable_payload_write_with_length_no_limit() -> None: + """Test AsyncIterablePayload writing with no content length limit.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"0123" + yield b"4567" + yield b"89" + + p = payload.AsyncIterablePayload(gen()) + writer = MockStreamWriter() + + await p.write_with_length(writer, None) + assert writer.get_written_bytes() == b"0123456789" + assert len(writer.get_written_bytes()) == 10 + + +async def test_async_iterable_payload_write_with_length_exact() -> None: + """Test AsyncIterablePayload writing with exact content length.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"0123" + yield b"4567" + yield b"89" + + p = payload.AsyncIterablePayload(gen()) + writer = MockStreamWriter() + + await p.write_with_length(writer, 10) + assert writer.get_written_bytes() == b"0123456789" + assert len(writer.get_written_bytes()) == 10 + + +async def test_async_iterable_payload_write_with_length_truncated_mid_chunk() -> None: + """Test AsyncIterablePayload writing with content length truncating mid-chunk.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"0123" + yield b"4567" + yield b"89" # pragma: no cover + + p = payload.AsyncIterablePayload(gen()) + writer = MockStreamWriter() + + await p.write_with_length(writer, 6) + assert writer.get_written_bytes() == b"012345" + assert len(writer.get_written_bytes()) == 6 + + +async def test_async_iterable_payload_write_with_length_truncated_at_chunk() -> None: + """Test AsyncIterablePayload writing with content length truncating at chunk boundary.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"0123" + yield b"4567" # pragma: no cover + yield b"89" # pragma: no cover + + p = payload.AsyncIterablePayload(gen()) + writer = MockStreamWriter() + + await p.write_with_length(writer, 4) + assert writer.get_written_bytes() == b"0123" + assert len(writer.get_written_bytes()) == 4 + + +async def test_bytes_payload_backwards_compatibility() -> None: + """Test BytesPayload.write() backwards compatibility delegates to write_with_length().""" + p = payload.BytesPayload(b"1234567890") + writer = MockStreamWriter() + + await p.write(writer) + assert writer.get_written_bytes() == b"1234567890" + + +async def test_textio_payload_with_encoding() -> None: + """Test TextIOPayload reading with encoding and size constraints.""" + data = io.StringIO("hello world") + p = payload.TextIOPayload(data, encoding="utf-8") + writer = MockStreamWriter() + + await p.write_with_length(writer, 8) + # Should write exactly 8 bytes: "hello wo" + assert writer.get_written_bytes() == b"hello wo" + + +async def test_bytesio_payload_backwards_compatibility() -> None: + """Test BytesIOPayload.write() backwards compatibility delegates to write_with_length().""" + data = io.BytesIO(b"test data") + p = payload.BytesIOPayload(data) + writer = MockStreamWriter() + + await p.write(writer) + assert writer.get_written_bytes() == b"test data" + + +async def test_async_iterable_payload_backwards_compatibility() -> None: + """Test AsyncIterablePayload.write() backwards compatibility delegates to write_with_length().""" + + async def gen() -> AsyncIterator[bytes]: + yield b"chunk1" + yield b"chunk2" # pragma: no cover + + p = payload.AsyncIterablePayload(gen()) + writer = MockStreamWriter() + + await p.write(writer) + assert writer.get_written_bytes() == b"chunk1chunk2" + + +async def test_async_iterable_payload_with_none_iterator() -> None: + """Test AsyncIterablePayload with None iterator returns early without writing.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"test" # pragma: no cover + + p = payload.AsyncIterablePayload(gen()) + # Manually set _iter to None to test the guard clause + p._iter = None + writer = MockStreamWriter() - writer = mock.Mock() - writer.write.return_value = loop.create_future() - writer.write.return_value.set_result(None) - await body.write(writer) - writer.write.assert_called_once_with(mock.ANY) - (chunk,), _ = writer.write.call_args - assert len(chunk) == len(DATA) + # Should return early without writing anything + await p.write_with_length(writer, 10) + assert writer.get_written_bytes() == b"" From 11aaa23d5b3716114730cb90a81983d1110cae14 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 11:33:27 -0500 Subject: [PATCH 1420/1511] [PR #10932/6b3672f0 backport][3.12] Fix flakey test_normal_closure_while_client_sends_msg test (#10935) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_web_websocket_functional.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 945096a2af3..0229809592a 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1246,13 +1246,13 @@ async def handler(request: web.Request) -> web.WebSocketResponse: async def test_normal_closure_while_client_sends_msg( aiohttp_client: AiohttpClient, ) -> None: - """Test abnormal closure when the server closes and the client doesn't respond.""" + """Test normal closure when the server closes and the client responds properly.""" close_code: Optional[WSCloseCode] = None got_close_code = asyncio.Event() async def handler(request: web.Request) -> web.WebSocketResponse: - # Setting a short close timeout - ws = web.WebSocketResponse(timeout=0.2) + # Setting a longer close timeout to avoid race conditions + ws = web.WebSocketResponse(timeout=1.0) await ws.prepare(request) await ws.close() From 38c23ede00245bcc875746a82aa9635d112781c4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 11:46:08 -0500 Subject: [PATCH 1421/1511] [PR #10933/597161d backport][3.12] Fix flakey client functional keep alive tests (#10937) --- tests/test_client_functional.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index bb4d70ef530..c9d62184ef6 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -248,8 +248,8 @@ async def handler(request): assert 0 == len(client._session.connector._conns) -async def test_keepalive_timeout_async_sleep() -> None: - async def handler(request): +async def test_keepalive_timeout_async_sleep(unused_port_socket: socket.socket) -> None: + async def handler(request: web.Request) -> web.Response: body = await request.read() assert b"" == body return web.Response(body=b"OK") @@ -260,17 +260,18 @@ async def handler(request): runner = web.AppRunner(app, tcp_keepalive=True, keepalive_timeout=0.001) await runner.setup() - port = unused_port() - site = web.TCPSite(runner, host="localhost", port=port) + site = web.SockSite(runner, unused_port_socket) await site.start() + host, port = unused_port_socket.getsockname()[:2] + try: - async with aiohttp.client.ClientSession() as sess: - resp1 = await sess.get(f"http://localhost:{port}/") + async with aiohttp.ClientSession() as sess: + resp1 = await sess.get(f"http://{host}:{port}/") await resp1.read() # wait for server keepalive_timeout await asyncio.sleep(0.01) - resp2 = await sess.get(f"http://localhost:{port}/") + resp2 = await sess.get(f"http://{host}:{port}/") await resp2.read() finally: await asyncio.gather(runner.shutdown(), site.stop()) @@ -280,8 +281,8 @@ async def handler(request): sys.version_info[:2] == (3, 11), reason="https://github.com/pytest-dev/pytest/issues/10763", ) -async def test_keepalive_timeout_sync_sleep() -> None: - async def handler(request): +async def test_keepalive_timeout_sync_sleep(unused_port_socket: socket.socket) -> None: + async def handler(request: web.Request) -> web.Response: body = await request.read() assert b"" == body return web.Response(body=b"OK") @@ -292,18 +293,19 @@ async def handler(request): runner = web.AppRunner(app, tcp_keepalive=True, keepalive_timeout=0.001) await runner.setup() - port = unused_port() - site = web.TCPSite(runner, host="localhost", port=port) + site = web.SockSite(runner, unused_port_socket) await site.start() + host, port = unused_port_socket.getsockname()[:2] + try: - async with aiohttp.client.ClientSession() as sess: - resp1 = await sess.get(f"http://localhost:{port}/") + async with aiohttp.ClientSession() as sess: + resp1 = await sess.get(f"http://{host}:{port}/") await resp1.read() # wait for server keepalive_timeout # time.sleep is a more challenging scenario than asyncio.sleep time.sleep(0.01) - resp2 = await sess.get(f"http://localhost:{port}/") + resp2 = await sess.get(f"http://{host}:{port}/") await resp2.read() finally: await asyncio.gather(runner.shutdown(), site.stop()) From 69182c7b1b7d8a5ba0830b2f370c64464e94512a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 12:17:17 -0500 Subject: [PATCH 1422/1511] [PR #10938/77c0115e backport][3.12] Fix flakey test_content_length_limit_with_multiple_reads test (#10939) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_client_functional.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index c9d62184ef6..5e1a4d2ddb5 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -4222,7 +4222,10 @@ async def data_generator() -> AsyncIterator[bytes]: headers = {"Content-Length": "800"} async with aiohttp.ClientSession() as session: - await session.post(server.make_url("/"), data=data_generator(), headers=headers) + async with session.post( + server.make_url("/"), data=data_generator(), headers=headers + ) as resp: + await resp.read() # Ensure response is fully read and connection cleaned up # Verify only 800 bytes (not the full 1200) were received by the server assert len(received_data) == 800 From 69a7fd782d937f4ad6f1e92f46b245791e38b264 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 12:48:26 -0500 Subject: [PATCH 1423/1511] Release 3.12.0b1 (#10940) --- CHANGES.rst | 223 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 224 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 651437c90bd..b455c45f7a9 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,229 @@ .. towncrier release notes start +3.12.0b1 (2025-05-22) +===================== + +Bug fixes +--------- + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed connection reuse for file-like data payloads by ensuring buffer + truncation respects content-length boundaries and preventing premature + connection closure race -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10325`, :issue:`10915`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`, :issue:`10923`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + + This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that + can be used to build aiohttp against the system install of the ``llhttp`` library rather + than the vendored one. + + + *Related issues and pull requests on GitHub:* + :issue:`10759`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. + + + *Related issues and pull requests on GitHub:* + :issue:`10922`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.12.0b0 (2025-05-20) ===================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 9ca85c654c5..972eabeab7e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0b0" +__version__ = "3.12.0b1" from typing import TYPE_CHECKING, Tuple From e9808c36da4968ff3ff6596a038599ec48a2e045 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 18:29:51 +0000 Subject: [PATCH 1424/1511] [PR #10941/6512aaa4 backport][3.12] Use anext in AsyncIterablePayload on Python 3.10+ (#10942) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10941.bugfix.rst | 1 + aiohttp/payload.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10941.bugfix.rst diff --git a/CHANGES/10941.bugfix.rst b/CHANGES/10941.bugfix.rst new file mode 120000 index 00000000000..aa085cc590d --- /dev/null +++ b/CHANGES/10941.bugfix.rst @@ -0,0 +1 @@ +10915.bugfix.rst \ No newline at end of file diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 823940902f5..c954091adad 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -815,7 +815,10 @@ async def write_with_length( try: while True: - chunk = await self._iter.__anext__() + if sys.version_info >= (3, 10): + chunk = await anext(self._iter) + else: + chunk = await self._iter.__anext__() if remaining_bytes is None: await writer.write(chunk) # If we have a content length limit From 9bd43ed9d283425ede643b2ff575e3d5a229b6ed Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 13:58:00 -0500 Subject: [PATCH 1425/1511] [PR #10943/b1e9462 backport][3.12] Small improvements to payload cleanup fixture (#10944) --- CHANGES/10943.bugfix.rst | 1 + tests/conftest.py | 10 ++++------ 2 files changed, 5 insertions(+), 6 deletions(-) create mode 120000 CHANGES/10943.bugfix.rst diff --git a/CHANGES/10943.bugfix.rst b/CHANGES/10943.bugfix.rst new file mode 120000 index 00000000000..aa085cc590d --- /dev/null +++ b/CHANGES/10943.bugfix.rst @@ -0,0 +1 @@ +10915.bugfix.rst \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 696f5d0d035..69469b3c793 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,7 @@ from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Generator, Iterator +from typing import Any, AsyncIterator, Generator, Iterator from unittest import mock from uuid import uuid4 @@ -336,15 +336,13 @@ def parametrize_zlib_backend( @pytest.fixture() -def cleanup_payload_pending_file_closes( +async def cleanup_payload_pending_file_closes( loop: asyncio.AbstractEventLoop, -) -> Generator[None, None, None]: +) -> AsyncIterator[None]: """Ensure all pending file close operations complete during test teardown.""" yield if payload._CLOSE_FUTURES: # Only wait for futures from the current loop loop_futures = [f for f in payload._CLOSE_FUTURES if f.get_loop() is loop] if loop_futures: - loop.run_until_complete( - asyncio.gather(*loop_futures, return_exceptions=True) - ) + await asyncio.gather(*loop_futures, return_exceptions=True) From 1f1bc8f7fa9d59454a03fde35a89cea315db5f41 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 19:57:52 +0000 Subject: [PATCH 1426/1511] [PR #10946/3c88f811 backport][3.12] Ensure AsyncResolver.close() can be called multiple times (#10947) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10946.feature.rst | 1 + aiohttp/resolver.py | 3 ++- tests/test_resolver.py | 37 +++++++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10946.feature.rst diff --git a/CHANGES/10946.feature.rst b/CHANGES/10946.feature.rst new file mode 120000 index 00000000000..879a4227358 --- /dev/null +++ b/CHANGES/10946.feature.rst @@ -0,0 +1 @@ +10847.feature.rst \ No newline at end of file diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 05accd19564..1dcfca48153 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -198,7 +198,8 @@ async def close(self) -> None: self._resolver = None # type: ignore[assignment] # Clear reference to resolver return # Otherwise cancel our dedicated resolver - self._resolver.cancel() + if self._resolver is not None: + self._resolver.cancel() self._resolver = None # type: ignore[assignment] # Clear reference diff --git a/tests/test_resolver.py b/tests/test_resolver.py index f6963121eb7..17f1227cc72 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -674,3 +674,40 @@ async def test_dns_resolver_manager_missing_loop_data() -> None: # Verify no exception was raised assert loop not in manager._loop_data + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_close_multiple_times() -> None: + """Test that AsyncResolver.close() can be called multiple times without error.""" + with patch("aiodns.DNSResolver") as mock_dns_resolver: + mock_resolver = Mock() + mock_resolver.cancel = Mock() + mock_dns_resolver.return_value = mock_resolver + + # Create a resolver with custom args (dedicated resolver) + resolver = AsyncResolver(nameservers=["8.8.8.8"]) + + # Close it once + await resolver.close() + mock_resolver.cancel.assert_called_once() + + # Close it again - should not raise AttributeError + await resolver.close() + # cancel should still only be called once + mock_resolver.cancel.assert_called_once() + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_close_with_none_resolver() -> None: + """Test that AsyncResolver.close() handles None resolver gracefully.""" + with patch("aiodns.DNSResolver"): + # Create a resolver with custom args (dedicated resolver) + resolver = AsyncResolver(nameservers=["8.8.8.8"]) + + # Manually set resolver to None to simulate edge case + resolver._resolver = None # type: ignore[assignment] + + # This should not raise AttributeError + await resolver.close() From 31d363823d1096b29772c9703d742be83373d6ce Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 15:13:23 -0500 Subject: [PATCH 1427/1511] Release 3.12.0b2 (#10948) --- CHANGES.rst | 223 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 224 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index b455c45f7a9..a4b4886d291 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,229 @@ .. towncrier release notes start +3.12.0b2 (2025-05-22) +===================== + +Bug fixes +--------- + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed connection reuse for file-like data payloads by ensuring buffer + truncation respects content-length boundaries and preventing premature + connection closure race -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`, :issue:`10923`, :issue:`10946`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + + This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that + can be used to build aiohttp against the system install of the ``llhttp`` library rather + than the vendored one. + + + *Related issues and pull requests on GitHub:* + :issue:`10759`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. + + + *Related issues and pull requests on GitHub:* + :issue:`10922`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.12.0b1 (2025-05-22) ===================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 972eabeab7e..2ab58f23574 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0b1" +__version__ = "3.12.0b2" from typing import TYPE_CHECKING, Tuple From b00739bce116ab5aa6b7064dcfb51196b710b2a2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 16:56:01 -0500 Subject: [PATCH 1428/1511] [PR #10949/06e3b36 backport][3.12] Improve connection reuse test coverage (#10950) --- tests/test_client_functional.py | 125 ++++++++++++++++++++++++++++++++ tests/test_web_functional.py | 4 + 2 files changed, 129 insertions(+) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 5e1a4d2ddb5..ff9a33bda1b 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -4311,3 +4311,128 @@ async def handler(request: web.Request) -> web.Response: response.raise_for_status() assert len(client._session.connector._conns) == 1 + + +async def test_post_content_exception_connection_kept( + aiohttp_client: AiohttpClient, +) -> None: + """Test that connections are kept after content.set_exception() with POST.""" + + async def handler(request: web.Request) -> web.Response: + await request.read() + return web.Response( + body=b"x" * 1000 + ) # Larger response to ensure it's not pre-buffered + + app = web.Application() + app.router.add_post("/", handler) + client = await aiohttp_client(app) + + # POST request with body - connection should be closed after content exception + resp = await client.post("/", data=b"request body") + + with pytest.raises(RuntimeError): + async with resp: + assert resp.status == 200 + resp.content.set_exception(RuntimeError("Simulated error")) + await resp.read() + + assert resp.closed + + # Wait for any pending operations to complete + await resp.wait_for_close() + + assert client._session.connector is not None + # Connection is kept because content.set_exception() is a client-side operation + # that doesn't affect the underlying connection state + assert len(client._session.connector._conns) == 1 + + +async def test_network_error_connection_closed( + aiohttp_client: AiohttpClient, +) -> None: + """Test that connections are closed after network errors.""" + + async def handler(request: web.Request) -> NoReturn: + # Read the request body + await request.read() + + # Start sending response but close connection before completing + response = web.StreamResponse() + response.content_length = 1000 # Promise 1000 bytes + await response.prepare(request) + + # Send partial data then force close the connection + await response.write(b"x" * 100) # Only send 100 bytes + # Force close the transport to simulate network error + assert request.transport is not None + request.transport.close() + assert False, "Will not return" + + app = web.Application() + app.router.add_post("/", handler) + client = await aiohttp_client(app) + + # POST request that will fail due to network error + with pytest.raises(aiohttp.ClientPayloadError): + resp = await client.post("/", data=b"request body") + async with resp: + await resp.read() # This should fail + + # Give event loop a chance to process connection cleanup + await asyncio.sleep(0) + + assert client._session.connector is not None + # Connection should be closed due to network error + assert len(client._session.connector._conns) == 0 + + +async def test_client_side_network_error_connection_closed( + aiohttp_client: AiohttpClient, +) -> None: + """Test that connections are closed after client-side network errors.""" + handler_done = asyncio.Event() + + async def handler(request: web.Request) -> NoReturn: + # Read the request body + await request.read() + + # Start sending a large response + response = web.StreamResponse() + response.content_length = 10000 # Promise 10KB + await response.prepare(request) + + # Send some data + await response.write(b"x" * 1000) + + # Keep the response open - we'll interrupt from client side + await asyncio.wait_for(handler_done.wait(), timeout=5.0) + assert False, "Will not return" + + app = web.Application() + app.router.add_post("/", handler) + client = await aiohttp_client(app) + + # POST request that will fail due to client-side network error + with pytest.raises(aiohttp.ClientPayloadError): + resp = await client.post("/", data=b"request body") + async with resp: + # Simulate client-side network error by closing the transport + # This simulates connection reset, network failure, etc. + assert resp.connection is not None + assert resp.connection.protocol is not None + assert resp.connection.protocol.transport is not None + resp.connection.protocol.transport.close() + + # This should fail with connection error + await resp.read() + + # Signal handler to finish + handler_done.set() + + # Give event loop a chance to process connection cleanup + await asyncio.sleep(0) + + assert client._session.connector is not None + # Connection should be closed due to client-side network error + assert len(client._session.connector._conns) == 0 diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index b6caf23df53..c33b3cec1ff 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -1956,6 +1956,10 @@ async def handler(request): await resp.read() assert resp.closed + # Wait for any pending operations to complete + await resp.wait_for_close() + + assert session._connector is not None assert len(session._connector._conns) == 1 await session.close() From 12ff66d5312bd9df894e506f8802b133d0293b91 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 17:00:11 -0500 Subject: [PATCH 1429/1511] [3.12] Fix AsyncResolver not using the loop argument (#10951) fixes #10787 --- CHANGES/10951.bugfix.rst | 1 + aiohttp/resolver.py | 2 +- tests/test_resolver.py | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10951.bugfix.rst diff --git a/CHANGES/10951.bugfix.rst b/CHANGES/10951.bugfix.rst new file mode 100644 index 00000000000..d539fc1a52d --- /dev/null +++ b/CHANGES/10951.bugfix.rst @@ -0,0 +1 @@ +Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 1dcfca48153..118bf8cbff7 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -94,7 +94,7 @@ def __init__( if aiodns is None: raise RuntimeError("Resolver requires aiodns library") - self._loop = asyncio.get_running_loop() + self._loop = loop or asyncio.get_running_loop() self._manager: Optional[_DNSResolverManager] = None # If custom args are provided, create a dedicated resolver instance # This means each AsyncResolver with custom args gets its own diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 17f1227cc72..1866939ba6b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -711,3 +711,37 @@ async def test_async_resolver_close_with_none_resolver() -> None: # This should not raise AttributeError await resolver.close() + + +@pytest.mark.skipif(aiodns is None, reason="aiodns required") +def test_async_resolver_uses_provided_loop() -> None: + """Test that AsyncResolver uses the loop parameter when provided.""" + # Create a custom event loop + custom_loop = asyncio.new_event_loop() + + try: + # Need to set the loop as current for get_running_loop() to work + asyncio.set_event_loop(custom_loop) + + # Create resolver with explicit loop parameter + resolver = AsyncResolver(loop=custom_loop) + + # Check that the resolver uses the provided loop + assert resolver._loop is custom_loop + finally: + asyncio.set_event_loop(None) + custom_loop.close() + + +@pytest.mark.skipif(aiodns is None, reason="aiodns required") +@pytest.mark.usefixtures("check_no_lingering_resolvers") +async def test_async_resolver_uses_running_loop_when_none_provided() -> None: + """Test that AsyncResolver uses get_running_loop() when no loop is provided.""" + # Create resolver without loop parameter + resolver = AsyncResolver() + + # Check that the resolver uses the current running loop + assert resolver._loop is asyncio.get_running_loop() + + # Clean up + await resolver.close() From 2eb3f6ca1420a2784a95c06f45eba6f73a0434f1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 17:42:31 -0500 Subject: [PATCH 1430/1511] [PR #10952/45b74cfc backport][3.12] Remove manual release call in middleware (#10953) Co-authored-by: J. Nick Koston <nick@koston.org> closes #10901 --- CHANGES/10952.feature.rst | 1 + aiohttp/client_middleware_digest_auth.py | 2 -- tests/test_client_middleware.py | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) create mode 120000 CHANGES/10952.feature.rst diff --git a/CHANGES/10952.feature.rst b/CHANGES/10952.feature.rst new file mode 120000 index 00000000000..b565aa68ee0 --- /dev/null +++ b/CHANGES/10952.feature.rst @@ -0,0 +1 @@ +9732.feature.rst \ No newline at end of file diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index e9eb3ba82e2..b63efaf0142 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -408,8 +408,6 @@ async def __call__( # Check if we need to authenticate if not self._authenticate(response): break - elif retry_count < 1: - response.release() # Release the response to enable connection reuse on retry # At this point, response is guaranteed to be defined assert response is not None diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index 5894795dc21..883d853d2e8 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -891,7 +891,6 @@ async def __call__( response = await handler(request) if retry_count == 0: retry_count += 1 - response.release() # Release the response to enable connection reuse continue return response From b5a061bb556e2e2ab1d16603c9b0fc492eccad6f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Thu, 22 May 2025 18:05:58 -0500 Subject: [PATCH 1431/1511] Release 3.12.0b3 (#10955) --- CHANGES.rst | 231 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 232 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index a4b4886d291..c0a9b20f200 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,237 @@ .. towncrier release notes start +3.12.0b3 (2025-05-22) +===================== + +Bug fixes +--------- + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed connection reuse for file-like data payloads by ensuring buffer + truncation respects content-length boundaries and preventing premature + connection closure race -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10951`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`, :issue:`10952`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`, :issue:`10923`, :issue:`10946`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + + This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that + can be used to build aiohttp against the system install of the ``llhttp`` library rather + than the vendored one. + + + *Related issues and pull requests on GitHub:* + :issue:`10759`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. + + + *Related issues and pull requests on GitHub:* + :issue:`10922`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.12.0b2 (2025-05-22) ===================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 2ab58f23574..0ca44564e46 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0b2" +__version__ = "3.12.0b3" from typing import TYPE_CHECKING, Tuple From 6ccd3d5b91a9d0b3003a7c15e19542aa25f46a00 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 23:53:40 +0000 Subject: [PATCH 1432/1511] [PR #10956/5dcb36a4 backport][3.12] Fix some missing connector closes in tests (#10957) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_client_middleware.py | 2 ++ tests/test_proxy_functional.py | 3 +++ tests/test_web_sendfile_functional.py | 1 + 3 files changed, 6 insertions(+) diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index 883d853d2e8..9d49b750333 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -793,6 +793,8 @@ async def blocking_middleware( # Check that no connections were leaked assert len(connector._conns) == 0 + await connector.close() + async def test_client_middleware_blocks_connection_without_dns_lookup( aiohttp_server: AiohttpServer, diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 78521ae6008..5b33ed6ca3b 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -418,6 +418,7 @@ async def test_proxy_http_acquired_cleanup(proxy_test_server, loop) -> None: assert 0 == len(conn._acquired) await sess.close() + await conn.close() @pytest.mark.skip("we need to reconsider how we test this") @@ -439,6 +440,7 @@ async def request(): assert 0 == len(conn._acquired) await sess.close() + await conn.close() @pytest.mark.skip("we need to reconsider how we test this") @@ -470,6 +472,7 @@ async def request(pid): assert {resp.status for resp in responses} == {200} await sess.close() + await conn.close() @pytest.mark.xfail diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 0c3e9ba68b5..0325a4658e2 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -614,6 +614,7 @@ async def test_static_file_ssl( await resp.release() await client.close() + await conn.close() async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: From 2c55e880b9131da8ea3f6787904937f5fc3f52ef Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 21:34:35 -0500 Subject: [PATCH 1433/1511] [PR #10959/cc234c6d backport][3.12] Change ClientSession middlewares default to be an empty tuple (#10960) Co-authored-by: J. Nick Koston <nick@koston.org> closes #10905 --- CHANGES/10959.feature.rst | 1 + aiohttp/client.py | 2 +- docs/client_reference.rst | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) create mode 120000 CHANGES/10959.feature.rst diff --git a/CHANGES/10959.feature.rst b/CHANGES/10959.feature.rst new file mode 120000 index 00000000000..b565aa68ee0 --- /dev/null +++ b/CHANGES/10959.feature.rst @@ -0,0 +1 @@ +9732.feature.rst \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index bea1c6f61e7..811c8f97588 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -302,7 +302,7 @@ def __init__( max_line_size: int = 8190, max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", - middlewares: Optional[Sequence[ClientMiddlewareType]] = None, + middlewares: Sequence[ClientMiddlewareType] = (), ) -> None: # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 97933ada1ed..cd825b403a0 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -53,7 +53,7 @@ The client session supports the context manager protocol for self closing. trust_env=False, \ requote_redirect_url=True, \ trace_configs=None, \ - middlewares=None, \ + middlewares=(), \ read_bufsize=2**16, \ max_line_size=8190, \ max_field_size=8190, \ @@ -232,7 +232,7 @@ The client session supports the context manager protocol for self closing. :param middlewares: A sequence of middleware instances to apply to all session requests. Each middleware must match the :type:`ClientMiddlewareType` signature. - ``None`` (default) is used when no middleware is needed. + ``()`` (empty tuple, default) is used when no middleware is needed. See :ref:`aiohttp-client-middleware` for more information. .. versionadded:: 3.12 From 15bef6ed99cd99d067eaa65566731fd0c01a3da1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 10:59:04 +0000 Subject: [PATCH 1434/1511] Bump pydantic from 2.11.4 to 2.11.5 (#10963) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.11.4 to 2.11.5. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/releases">pydantic's releases</a>.</em></p> <blockquote> <h2>v2.11.5 2025-05-22</h2> <!-- raw HTML omitted --> <h2>What's Changed</h2> <h3>Fixes</h3> <ul> <li>Check if <code>FieldInfo</code> is complete after applying type variable map by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11855">#11855</a></li> <li>Do not delete mock validator/serializer in <code>model_rebuild()</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11890">#11890</a></li> <li>Do not duplicate metadata on model rebuild by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11902">#11902</a></li> </ul> <p><strong>Full Changelog</strong>: <a href="https://github.com/pydantic/pydantic/compare/v2.11.4...v2.11.5">https://github.com/pydantic/pydantic/compare/v2.11.4...v2.11.5</a></p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pydantic/pydantic/blob/main/HISTORY.md">pydantic's changelog</a>.</em></p> <blockquote> <h2>v2.11.5 (2025-05-22)</h2> <p><a href="https://github.com/pydantic/pydantic/releases/tag/v2.11.5">GitHub release</a></p> <h3>What's Changed</h3> <h4>Fixes</h4> <ul> <li>Check if <code>FieldInfo</code> is complete after applying type variable map by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11855">#11855</a></li> <li>Do not delete mock validator/serializer in <code>model_rebuild()</code> by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11890">#11890</a></li> <li>Do not duplicate metadata on model rebuild by <a href="https://github.com/Viicos"><code>@​Viicos</code></a> in <a href="https://redirect.github.com/pydantic/pydantic/pull/11902">#11902</a></li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pydantic/pydantic/commit/5e6d1dc71fe9bd832635cb2e9b4af92286fd00b8"><code>5e6d1dc</code></a> Prepare release v2.11.5</li> <li><a href="https://github.com/pydantic/pydantic/commit/1b63218c42b515bd1f6b0dd323190236ead14bdb"><code>1b63218</code></a> Do not duplicate metadata on model rebuild (<a href="https://redirect.github.com/pydantic/pydantic/issues/11902">#11902</a>)</li> <li><a href="https://github.com/pydantic/pydantic/commit/5aefad873b3dfd60c419bd081ffaf0ac197c7b60"><code>5aefad8</code></a> Do not delete mock validator/serializer in <code>model_rebuild()</code></li> <li><a href="https://github.com/pydantic/pydantic/commit/8fbe6585f4d6179e5234ab61de00059c52e57975"><code>8fbe658</code></a> Check if <code>FieldInfo</code> is complete after applying type variable map</li> <li><a href="https://github.com/pydantic/pydantic/commit/12b371a0f7f800bf65daa3eaada1b4348348d9c4"><code>12b371a</code></a> Update documentation about <code>@dataclass_transform</code> support</li> <li><a href="https://github.com/pydantic/pydantic/commit/3a6aef4400afe6ac1fcaab4f31774c1ee4aadcb3"><code>3a6aef4</code></a> Fix missing link in documentation</li> <li><a href="https://github.com/pydantic/pydantic/commit/0506b9cd8b3d544f135c624f4a7584dd53098cb7"><code>0506b9c</code></a> Fix light/dark mode documentation toggle</li> <li><a href="https://github.com/pydantic/pydantic/commit/58078c8b5624d800ec80dff295972737149f8080"><code>58078c8</code></a> Fix typo in documentation</li> <li>See full diff in <a href="https://github.com/pydantic/pydantic/compare/v2.11.4...v2.11.5">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.11.4&new-version=2.11.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 +++- requirements/dev.txt | 4 +++- requirements/lint.txt | 2 +- requirements/test.txt | 4 +++- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e79f7008a7d..9bcdeb5ff8b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -136,6 +136,8 @@ packaging==25.0 # sphinx pip-tools==7.4.1 # via -r requirements/dev.in +pkgconfig==1.5.5 + # via -r requirements/test.in platformdirs==4.3.8 # via virtualenv pluggy==1.6.0 @@ -152,7 +154,7 @@ pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.5 # via python-on-whales pydantic-core==2.33.2 # via pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index 9b2c3ebeab3..26728928cee 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -133,6 +133,8 @@ packaging==25.0 # sphinx pip-tools==7.4.1 # via -r requirements/dev.in +pkgconfig==1.5.5 + # via -r requirements/test.in platformdirs==4.3.8 # via virtualenv pluggy==1.6.0 @@ -149,7 +151,7 @@ pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.5 # via python-on-whales pydantic-core==2.33.2 # via pydantic diff --git a/requirements/lint.txt b/requirements/lint.txt index 99fcd3969e3..57729254937 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -63,7 +63,7 @@ pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.5 # via python-on-whales pydantic-core==2.33.2 # via pydantic diff --git a/requirements/test.txt b/requirements/test.txt index 63cb482c5e0..007852dbcaa 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -71,6 +71,8 @@ packaging==25.0 # via # gunicorn # pytest +pkgconfig==1.5.5 + # via -r requirements/test.in pluggy==1.6.0 # via pytest propcache==0.3.1 @@ -83,7 +85,7 @@ pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.5 # via python-on-whales pydantic-core==2.33.2 # via pydantic From 11c7c433df6453cd1d06467d0e325a8256308249 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 14:00:51 +0000 Subject: [PATCH 1435/1511] [PR #10962/84decfe5 backport][3.12] add example of setting network interface in custom socket creation (#10966) Co-authored-by: Cycloctane <Cycloctane@outlook.com> Co-authored-by: J. Nick Koston <nick@koston.org> closes #7132 --- CHANGES/10962.feature.rst | 1 + docs/client_advanced.rst | 13 +++++++++++++ 2 files changed, 14 insertions(+) create mode 120000 CHANGES/10962.feature.rst diff --git a/CHANGES/10962.feature.rst b/CHANGES/10962.feature.rst new file mode 120000 index 00000000000..7c4f9a7b83b --- /dev/null +++ b/CHANGES/10962.feature.rst @@ -0,0 +1 @@ +10520.feature.rst \ No newline at end of file diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index d598a40c6ab..033b5f5705d 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -714,6 +714,19 @@ make all sockets respect 9*7200 = 18 hours:: return sock conn = aiohttp.TCPConnector(socket_factory=socket_factory) +``socket_factory`` may also be used for binding to the specific network +interface on supported platforms:: + + def socket_factory(addr_info): + family, type_, proto, _, _ = addr_info + sock = socket.socket(family=family, type=type_, proto=proto) + sock.setsockopt( + socket.SOL_SOCKET, socket.SO_BINDTODEVICE, b'eth0' + ) + return sock + + conn = aiohttp.TCPConnector(socket_factory=socket_factory) + Named pipes in Windows ^^^^^^^^^^^^^^^^^^^^^^ From 82497a690746f6c81455f1dc879d33545b07ad99 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 14:02:04 +0000 Subject: [PATCH 1436/1511] [PR #10961/5e68276c backport][3.12] fix example in socket_factory docs (#10967) Co-authored-by: Cycloctane <Cycloctane@outlook.com> Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10961.feature.rst | 1 + docs/client_advanced.rst | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10961.feature.rst diff --git a/CHANGES/10961.feature.rst b/CHANGES/10961.feature.rst new file mode 120000 index 00000000000..7c4f9a7b83b --- /dev/null +++ b/CHANGES/10961.feature.rst @@ -0,0 +1 @@ +10520.feature.rst \ No newline at end of file diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 033b5f5705d..c5b542e82fd 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -706,12 +706,13 @@ make all sockets respect 9*7200 = 18 hours:: import socket def socket_factory(addr_info): - family, type_, proto, _, _, _ = addr_info + family, type_, proto, _, _ = addr_info sock = socket.socket(family=family, type=type_, proto=proto) sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7200) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 9) return sock + conn = aiohttp.TCPConnector(socket_factory=socket_factory) ``socket_factory`` may also be used for binding to the specific network From 6f4f83f04bd98e8b736b767fdd98323fdd578185 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 17:39:13 +0200 Subject: [PATCH 1437/1511] [PR #10945/18785096 backport][3.12] Add Client Middleware Cookbook (#10969) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10945.feature.rst | 1 + docs/client.rst | 1 + docs/client_advanced.rst | 2 + docs/client_middleware_cookbook.rst | 358 +++++++++++++++++++++++++++ docs/spelling_wordlist.txt | 1 + examples/basic_auth_middleware.py | 190 ++++++++++++++ examples/combined_middleware.py | 320 ++++++++++++++++++++++++ examples/logging_middleware.py | 169 +++++++++++++ examples/retry_middleware.py | 245 ++++++++++++++++++ examples/token_refresh_middleware.py | 336 +++++++++++++++++++++++++ 10 files changed, 1623 insertions(+) create mode 120000 CHANGES/10945.feature.rst create mode 100644 docs/client_middleware_cookbook.rst create mode 100644 examples/basic_auth_middleware.py create mode 100644 examples/combined_middleware.py create mode 100644 examples/logging_middleware.py create mode 100644 examples/retry_middleware.py create mode 100644 examples/token_refresh_middleware.py diff --git a/CHANGES/10945.feature.rst b/CHANGES/10945.feature.rst new file mode 120000 index 00000000000..b565aa68ee0 --- /dev/null +++ b/CHANGES/10945.feature.rst @@ -0,0 +1 @@ +9732.feature.rst \ No newline at end of file diff --git a/docs/client.rst b/docs/client.rst index 78fbeae4ded..9109c3772da 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -14,6 +14,7 @@ The page contains all information about aiohttp Client API: Quickstart <client_quickstart> Advanced Usage <client_advanced> + Client Middleware Cookbook <client_middleware_cookbook> Reference <client_reference> Tracing Reference <tracing_reference> The aiohttp Request Lifecycle <http_request_lifecycle> diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index c5b542e82fd..5a94e68ec1f 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -126,6 +126,8 @@ Client Middleware The client supports middleware to intercept requests and responses. This can be useful for authentication, logging, request/response modification, and retries. +For practical examples and common middleware patterns, see the :ref:`aiohttp-client-middleware-cookbook`. + Creating Middleware ^^^^^^^^^^^^^^^^^^^ diff --git a/docs/client_middleware_cookbook.rst b/docs/client_middleware_cookbook.rst new file mode 100644 index 00000000000..4b8d6ddd5f8 --- /dev/null +++ b/docs/client_middleware_cookbook.rst @@ -0,0 +1,358 @@ +.. currentmodule:: aiohttp + +.. _aiohttp-client-middleware-cookbook: + +Client Middleware Cookbook +========================== + +This cookbook provides practical examples of implementing client middleware for common use cases. + +.. note:: + + All examples in this cookbook are also available as complete, runnable scripts in the + ``examples/`` directory of the aiohttp repository. Look for files named ``*_middleware.py``. + +.. _cookbook-basic-auth-middleware: + +Basic Authentication Middleware +------------------------------- + +Basic authentication is a simple authentication scheme built into the HTTP protocol. +Here's a middleware that automatically adds Basic Auth headers to all requests: + +.. code-block:: python + + import base64 + from aiohttp import ClientRequest, ClientResponse, ClientHandlerType, hdrs + + class BasicAuthMiddleware: + """Middleware that adds Basic Authentication to all requests.""" + + def __init__(self, username: str, password: str) -> None: + self.username = username + self.password = password + self._auth_header = self._encode_credentials() + + def _encode_credentials(self) -> str: + """Encode username and password to base64.""" + credentials = f"{self.username}:{self.password}" + encoded = base64.b64encode(credentials.encode()).decode() + return f"Basic {encoded}" + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + """Add Basic Auth header to the request.""" + # Only add auth if not already present + if hdrs.AUTHORIZATION not in request.headers: + request.headers[hdrs.AUTHORIZATION] = self._auth_header + + # Proceed with the request + return await handler(request) + +Usage example: + +.. code-block:: python + + import aiohttp + import asyncio + import logging + + _LOGGER = logging.getLogger(__name__) + + async def main(): + # Create middleware instance + auth_middleware = BasicAuthMiddleware("user", "pass") + + # Use middleware in session + async with aiohttp.ClientSession(middlewares=(auth_middleware,)) as session: + async with session.get("https://httpbin.org/basic-auth/user/pass") as resp: + _LOGGER.debug("Status: %s", resp.status) + data = await resp.json() + _LOGGER.debug("Response: %s", data) + + asyncio.run(main()) + +.. _cookbook-retry-middleware: + +Simple Retry Middleware +----------------------- + +A retry middleware that automatically retries failed requests with exponential backoff: + +.. code-block:: python + + import asyncio + import logging + from http import HTTPStatus + from typing import Union, Set + from aiohttp import ClientRequest, ClientResponse, ClientHandlerType + + _LOGGER = logging.getLogger(__name__) + + DEFAULT_RETRY_STATUSES = { + HTTPStatus.TOO_MANY_REQUESTS, + HTTPStatus.INTERNAL_SERVER_ERROR, + HTTPStatus.BAD_GATEWAY, + HTTPStatus.SERVICE_UNAVAILABLE, + HTTPStatus.GATEWAY_TIMEOUT + } + + class RetryMiddleware: + """Middleware that retries failed requests with exponential backoff.""" + + def __init__( + self, + max_retries: int = 3, + retry_statuses: Union[Set[int], None] = None, + initial_delay: float = 1.0, + backoff_factor: float = 2.0 + ) -> None: + self.max_retries = max_retries + self.retry_statuses = retry_statuses or DEFAULT_RETRY_STATUSES + self.initial_delay = initial_delay + self.backoff_factor = backoff_factor + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + """Execute request with retry logic.""" + last_response = None + delay = self.initial_delay + + for attempt in range(self.max_retries + 1): + if attempt > 0: + _LOGGER.info( + "Retrying request to %s (attempt %s/%s)", + request.url, + attempt + 1, + self.max_retries + 1 + ) + + # Execute the request + response = await handler(request) + last_response = response + + # Check if we should retry + if response.status not in self.retry_statuses: + return response + + # Don't retry if we've exhausted attempts + if attempt >= self.max_retries: + _LOGGER.warning( + "Max retries (%s) exceeded for %s", + self.max_retries, + request.url + ) + return response + + # Wait before retrying + _LOGGER.debug("Waiting %ss before retry...", delay) + await asyncio.sleep(delay) + delay *= self.backoff_factor + + # Return the last response + return last_response + +Usage example: + +.. code-block:: python + + import aiohttp + import asyncio + import logging + from http import HTTPStatus + + _LOGGER = logging.getLogger(__name__) + + RETRY_STATUSES = { + HTTPStatus.TOO_MANY_REQUESTS, + HTTPStatus.INTERNAL_SERVER_ERROR, + HTTPStatus.BAD_GATEWAY, + HTTPStatus.SERVICE_UNAVAILABLE, + HTTPStatus.GATEWAY_TIMEOUT + } + + async def main(): + # Create retry middleware with custom settings + retry_middleware = RetryMiddleware( + max_retries=3, + retry_statuses=RETRY_STATUSES, + initial_delay=0.5, + backoff_factor=2.0 + ) + + async with aiohttp.ClientSession(middlewares=(retry_middleware,)) as session: + # This will automatically retry on server errors + async with session.get("https://httpbin.org/status/500") as resp: + _LOGGER.debug("Final status: %s", resp.status) + + asyncio.run(main()) + +.. _cookbook-combining-middleware: + +Combining Multiple Middleware +----------------------------- + +You can combine multiple middleware to create powerful request pipelines: + +.. code-block:: python + + import time + import logging + from aiohttp import ClientRequest, ClientResponse, ClientHandlerType + + _LOGGER = logging.getLogger(__name__) + + class LoggingMiddleware: + """Middleware that logs request timing and response status.""" + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + start_time = time.monotonic() + + # Log request + _LOGGER.debug("[REQUEST] %s %s", request.method, request.url) + + # Execute request + response = await handler(request) + + # Log response + duration = time.monotonic() - start_time + _LOGGER.debug("[RESPONSE] %s in %.2fs", response.status, duration) + + return response + + # Combine multiple middleware + async def main(): + # Middleware are applied in order: logging -> auth -> retry -> request + logging_middleware = LoggingMiddleware() + auth_middleware = BasicAuthMiddleware("user", "pass") + retry_middleware = RetryMiddleware(max_retries=2) + + async with aiohttp.ClientSession( + middlewares=(logging_middleware, auth_middleware, retry_middleware) + ) as session: + async with session.get("https://httpbin.org/basic-auth/user/pass") as resp: + text = await resp.text() + _LOGGER.debug("Response text: %s", text) + +.. _cookbook-token-refresh-middleware: + +Token Refresh Middleware +------------------------ + +A more advanced example showing JWT token refresh: + +.. code-block:: python + + import asyncio + import time + from http import HTTPStatus + from typing import Union + from aiohttp import ClientRequest, ClientResponse, ClientHandlerType, hdrs + + class TokenRefreshMiddleware: + """Middleware that handles JWT token refresh automatically.""" + + def __init__(self, token_endpoint: str, refresh_token: str) -> None: + self.token_endpoint = token_endpoint + self.refresh_token = refresh_token + self.access_token: Union[str, None] = None + self.token_expires_at: Union[float, None] = None + self._refresh_lock = asyncio.Lock() + + async def _refresh_access_token(self, session) -> str: + """Refresh the access token using the refresh token.""" + async with self._refresh_lock: + # Check if another coroutine already refreshed the token + if self.token_expires_at and time.time() < self.token_expires_at: + return self.access_token + + # Make refresh request without middleware to avoid recursion + async with session.post( + self.token_endpoint, + json={"refresh_token": self.refresh_token}, + middlewares=() # Disable middleware for this request + ) as resp: + resp.raise_for_status() + data = await resp.json() + + if "access_token" not in data: + raise ValueError("No access_token in refresh response") + + self.access_token = data["access_token"] + # Token expires in 1 hour for demo, refresh 5 min early + expires_in = data.get("expires_in", 3600) + self.token_expires_at = time.time() + expires_in - 300 + return self.access_token + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType + ) -> ClientResponse: + """Add auth token to request, refreshing if needed.""" + # Skip token for refresh endpoint + if str(request.url).endswith('/token/refresh'): + return await handler(request) + + # Refresh token if needed + if not self.access_token or ( + self.token_expires_at and time.time() >= self.token_expires_at + ): + await self._refresh_access_token(request.session) + + # Add token to request + request.headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}" + + # Execute request + response = await handler(request) + + # If we get 401, try refreshing token once + if response.status == HTTPStatus.UNAUTHORIZED: + await self._refresh_access_token(request.session) + request.headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}" + response = await handler(request) + + return response + +Best Practices +-------------- + +1. **Keep middleware focused**: Each middleware should have a single responsibility. + +2. **Order matters**: Middleware execute in the order they're listed. Place logging first, + authentication before retry, etc. + +3. **Avoid infinite recursion**: When making HTTP requests inside middleware, either: + + - Use ``middlewares=()`` to disable middleware for internal requests + - Check the request URL/host to skip middleware for specific endpoints + - Use a separate session for internal requests + +4. **Handle errors gracefully**: Don't let middleware errors break the request flow unless + absolutely necessary. + +5. **Use bounded loops**: Always use ``for`` loops with a maximum iteration count instead + of unbounded ``while`` loops to prevent infinite retries. + +6. **Consider performance**: Each middleware adds overhead. For simple cases like adding + static headers, consider using session or request parameters instead. + +7. **Test thoroughly**: Middleware can affect all requests in subtle ways. Test edge cases + like network errors, timeouts, and concurrent requests. + +See Also +-------- + +- :ref:`aiohttp-client-middleware` - Core middleware documentation +- :ref:`aiohttp-client-advanced` - Advanced client usage +- :class:`DigestAuthMiddleware` - Built-in digest authentication middleware diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index d0328529cfd..c22e584cadf 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -28,6 +28,7 @@ autoformatters autogenerates autogeneration awaitable +backoff backend backends backport diff --git a/examples/basic_auth_middleware.py b/examples/basic_auth_middleware.py new file mode 100644 index 00000000000..4c30f477505 --- /dev/null +++ b/examples/basic_auth_middleware.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 +""" +Example of using basic authentication middleware with aiohttp client. + +This example shows how to implement a middleware that automatically adds +Basic Authentication headers to all requests. The middleware encodes the +username and password in base64 format as required by the HTTP Basic Auth +specification. + +This example includes a test server that validates basic auth credentials. +""" + +import asyncio +import base64 +import binascii +import logging + +from aiohttp import ( + ClientHandlerType, + ClientRequest, + ClientResponse, + ClientSession, + hdrs, + web, +) + +logging.basicConfig(level=logging.DEBUG) +_LOGGER = logging.getLogger(__name__) + + +class BasicAuthMiddleware: + """Middleware that adds Basic Authentication to all requests.""" + + def __init__(self, username: str, password: str) -> None: + self.username = username + self.password = password + self._auth_header = self._encode_credentials() + + def _encode_credentials(self) -> str: + """Encode username and password to base64.""" + credentials = f"{self.username}:{self.password}" + encoded = base64.b64encode(credentials.encode()).decode() + return f"Basic {encoded}" + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + """Add Basic Auth header to the request.""" + # Only add auth if not already present + if hdrs.AUTHORIZATION not in request.headers: + request.headers[hdrs.AUTHORIZATION] = self._auth_header + + # Proceed with the request + return await handler(request) + + +class TestServer: + """Test server for basic auth endpoints.""" + + async def handle_basic_auth(self, request: web.Request) -> web.Response: + """Handle basic auth validation.""" + # Get expected credentials from path + expected_user = request.match_info["user"] + expected_pass = request.match_info["pass"] + + # Check if Authorization header is present + auth_header = request.headers.get(hdrs.AUTHORIZATION, "") + + if not auth_header.startswith("Basic "): + return web.Response( + status=401, + text="Unauthorized", + headers={hdrs.WWW_AUTHENTICATE: 'Basic realm="test"'}, + ) + + # Decode the credentials + encoded_creds = auth_header[6:] # Remove "Basic " + try: + decoded = base64.b64decode(encoded_creds).decode() + username, password = decoded.split(":", 1) + except (ValueError, binascii.Error): + return web.Response( + status=401, + text="Invalid credentials format", + headers={hdrs.WWW_AUTHENTICATE: 'Basic realm="test"'}, + ) + + # Validate credentials + if username != expected_user or password != expected_pass: + return web.Response( + status=401, + text="Invalid username or password", + headers={hdrs.WWW_AUTHENTICATE: 'Basic realm="test"'}, + ) + + return web.json_response({"authenticated": True, "user": username}) + + async def handle_protected_resource(self, request: web.Request) -> web.Response: + """A protected resource that requires any valid auth.""" + auth_header = request.headers.get(hdrs.AUTHORIZATION, "") + + if not auth_header.startswith("Basic "): + return web.Response( + status=401, + text="Authentication required", + headers={hdrs.WWW_AUTHENTICATE: 'Basic realm="protected"'}, + ) + + return web.json_response( + { + "message": "Access granted to protected resource", + "auth_provided": True, + } + ) + + +async def run_test_server() -> web.AppRunner: + """Run a simple test server with basic auth endpoints.""" + app = web.Application() + server = TestServer() + + app.router.add_get("/basic-auth/{user}/{pass}", server.handle_basic_auth) + app.router.add_get("/protected", server.handle_protected_resource) + + runner = web.AppRunner(app) + await runner.setup() + site = web.TCPSite(runner, "localhost", 8080) + await site.start() + return runner + + +async def run_tests() -> None: + """Run all basic auth middleware tests.""" + # Create middleware instance + auth_middleware = BasicAuthMiddleware("user", "pass") + + # Use middleware in session + async with ClientSession(middlewares=(auth_middleware,)) as session: + # Test 1: Correct credentials endpoint + print("=== Test 1: Correct credentials ===") + async with session.get("http://localhost:8080/basic-auth/user/pass") as resp: + _LOGGER.info("Status: %s", resp.status) + + if resp.status == 200: + data = await resp.json() + _LOGGER.info("Response: %s", data) + print("Authentication successful!") + print(f"Authenticated: {data.get('authenticated')}") + print(f"User: {data.get('user')}") + else: + print("Authentication failed!") + print(f"Status: {resp.status}") + text = await resp.text() + print(f"Response: {text}") + + # Test 2: Wrong credentials endpoint + print("\n=== Test 2: Wrong credentials endpoint ===") + async with session.get("http://localhost:8080/basic-auth/other/secret") as resp: + if resp.status == 401: + print("Authentication failed as expected (wrong credentials)") + text = await resp.text() + print(f"Response: {text}") + else: + print(f"Unexpected status: {resp.status}") + + # Test 3: Protected resource + print("\n=== Test 3: Access protected resource ===") + async with session.get("http://localhost:8080/protected") as resp: + if resp.status == 200: + data = await resp.json() + print("Successfully accessed protected resource!") + print(f"Response: {data}") + else: + print(f"Failed to access protected resource: {resp.status}") + + +async def main() -> None: + # Start test server + server = await run_test_server() + + try: + await run_tests() + finally: + await server.cleanup() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/combined_middleware.py b/examples/combined_middleware.py new file mode 100644 index 00000000000..8646a182b98 --- /dev/null +++ b/examples/combined_middleware.py @@ -0,0 +1,320 @@ +#!/usr/bin/env python3 +""" +Example of combining multiple middleware with aiohttp client. + +This example shows how to chain multiple middleware together to create +a powerful request pipeline. Middleware are applied in order, demonstrating +how logging, authentication, and retry logic can work together. + +The order of middleware matters: +1. Logging (outermost) - logs all attempts including retries +2. Authentication - adds auth headers before retry logic +3. Retry (innermost) - retries requests on failure +""" + +import asyncio +import base64 +import binascii +import logging +import time +from http import HTTPStatus +from typing import TYPE_CHECKING, Set, Union + +from aiohttp import ( + ClientHandlerType, + ClientRequest, + ClientResponse, + ClientSession, + hdrs, + web, +) + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) +_LOGGER = logging.getLogger(__name__) + + +class LoggingMiddleware: + """Middleware that logs request timing and response status.""" + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + start_time = time.monotonic() + + # Log request + _LOGGER.info("[REQUEST] %s %s", request.method, request.url) + + # Execute request + response = await handler(request) + + # Log response + duration = time.monotonic() - start_time + _LOGGER.info( + "[RESPONSE] %s in %.2fs - Status: %s", + request.url.path, + duration, + response.status, + ) + + return response + + +class BasicAuthMiddleware: + """Middleware that adds Basic Authentication to all requests.""" + + def __init__(self, username: str, password: str) -> None: + self.username = username + self.password = password + self._auth_header = self._encode_credentials() + + def _encode_credentials(self) -> str: + """Encode username and password to base64.""" + credentials = f"{self.username}:{self.password}" + encoded = base64.b64encode(credentials.encode()).decode() + return f"Basic {encoded}" + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + """Add Basic Auth header to the request.""" + # Only add auth if not already present + if hdrs.AUTHORIZATION not in request.headers: + request.headers[hdrs.AUTHORIZATION] = self._auth_header + _LOGGER.debug("Added Basic Auth header") + + # Proceed with the request + return await handler(request) + + +DEFAULT_RETRY_STATUSES: Set[HTTPStatus] = { + HTTPStatus.TOO_MANY_REQUESTS, + HTTPStatus.INTERNAL_SERVER_ERROR, + HTTPStatus.BAD_GATEWAY, + HTTPStatus.SERVICE_UNAVAILABLE, + HTTPStatus.GATEWAY_TIMEOUT, +} + + +class RetryMiddleware: + """Middleware that retries failed requests with exponential backoff.""" + + def __init__( + self, + max_retries: int = 3, + retry_statuses: Union[Set[HTTPStatus], None] = None, + initial_delay: float = 1.0, + backoff_factor: float = 2.0, + ) -> None: + self.max_retries = max_retries + self.retry_statuses = retry_statuses or DEFAULT_RETRY_STATUSES + self.initial_delay = initial_delay + self.backoff_factor = backoff_factor + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + """Execute request with retry logic.""" + last_response: Union[ClientResponse, None] = None + delay = self.initial_delay + + for attempt in range(self.max_retries + 1): + if attempt > 0: + _LOGGER.info( + "Retrying request (attempt %s/%s)", + attempt + 1, + self.max_retries + 1, + ) + + # Execute the request + response = await handler(request) + last_response = response + + # Check if we should retry + if response.status not in self.retry_statuses: + return response + + # Don't retry if we've exhausted attempts + if attempt >= self.max_retries: + _LOGGER.warning("Max retries exceeded") + return response + + # Wait before retrying + _LOGGER.debug("Waiting %ss before retry...", delay) + await asyncio.sleep(delay) + delay *= self.backoff_factor + + if TYPE_CHECKING: + assert last_response is not None # Always set since we loop at least once + return last_response + + +class TestServer: + """Test server with stateful endpoints for middleware testing.""" + + def __init__(self) -> None: + self.flaky_counter = 0 + self.protected_counter = 0 + + async def handle_protected(self, request: web.Request) -> web.Response: + """Protected endpoint that requires authentication and is flaky on first attempt.""" + auth_header = request.headers.get(hdrs.AUTHORIZATION, "") + + if not auth_header.startswith("Basic "): + return web.Response( + status=401, + text="Unauthorized", + headers={hdrs.WWW_AUTHENTICATE: 'Basic realm="test"'}, + ) + + # Decode the credentials + encoded_creds = auth_header[6:] # Remove "Basic " + try: + decoded = base64.b64decode(encoded_creds).decode() + username, password = decoded.split(":", 1) + except (ValueError, binascii.Error): + return web.Response( + status=401, + text="Invalid credentials format", + headers={hdrs.WWW_AUTHENTICATE: 'Basic realm="test"'}, + ) + + # Validate credentials + if username != "user" or password != "pass": + return web.Response(status=401, text="Invalid credentials") + + # Fail with 500 on first attempt to test retry + auth combination + self.protected_counter += 1 + if self.protected_counter == 1: + return web.Response( + status=500, text="Internal server error (first attempt)" + ) + + return web.json_response( + { + "message": "Access granted", + "user": username, + "resource": "protected data", + } + ) + + async def handle_flaky(self, request: web.Request) -> web.Response: + """Endpoint that fails a few times before succeeding.""" + self.flaky_counter += 1 + + # Fail the first 2 requests, succeed on the 3rd + if self.flaky_counter <= 2: + return web.Response( + status=503, + text=f"Service temporarily unavailable (attempt {self.flaky_counter})", + ) + + # Reset counter and return success + self.flaky_counter = 0 + return web.json_response( + { + "message": "Success after retries!", + "data": "Important information retrieved", + } + ) + + async def handle_always_fail(self, request: web.Request) -> web.Response: + """Endpoint that always returns an error.""" + return web.Response(status=500, text="Internal server error") + + async def handle_status(self, request: web.Request) -> web.Response: + """Return the status code specified in the path.""" + status = int(request.match_info["status"]) + return web.Response(status=status, text=f"Status: {status}") + + +async def run_test_server() -> web.AppRunner: + """Run a test server with various endpoints.""" + app = web.Application() + server = TestServer() + + app.router.add_get("/protected", server.handle_protected) + app.router.add_get("/flaky", server.handle_flaky) + app.router.add_get("/always-fail", server.handle_always_fail) + app.router.add_get("/status/{status}", server.handle_status) + + runner = web.AppRunner(app) + await runner.setup() + site = web.TCPSite(runner, "localhost", 8080) + await site.start() + return runner + + +async def run_tests() -> None: + """Run all the middleware tests.""" + # Create middleware instances + logging_middleware = LoggingMiddleware() + auth_middleware = BasicAuthMiddleware("user", "pass") + retry_middleware = RetryMiddleware(max_retries=2, initial_delay=0.5) + + # Combine middleware - order matters! + # Applied in order: logging -> auth -> retry -> request + async with ClientSession( + middlewares=(logging_middleware, auth_middleware, retry_middleware) + ) as session: + + print( + "=== Test 1: Protected endpoint with auth (fails once, then succeeds) ===" + ) + print("This tests retry + auth working together...") + async with session.get("http://localhost:8080/protected") as resp: + if resp.status == 200: + data = await resp.json() + print(f"Success after retry! Response: {data}") + else: + print(f"Failed with status: {resp.status}") + + print("\n=== Test 2: Flaky endpoint (fails twice, then succeeds) ===") + print("Watch the logs to see retries in action...") + async with session.get("http://localhost:8080/flaky") as resp: + if resp.status == 200: + data = await resp.json() + print(f"Success after retries! Response: {data}") + else: + text = await resp.text() + print(f"Failed with status {resp.status}: {text}") + + print("\n=== Test 3: Always failing endpoint ===") + async with session.get("http://localhost:8080/always-fail") as resp: + print(f"Final status after retries: {resp.status}") + + print("\n=== Test 4: Non-retryable status (404) ===") + async with session.get("http://localhost:8080/status/404") as resp: + print(f"Status: {resp.status} (no retries for 404)") + + # Test without middleware for comparison + print("\n=== Test 5: Request without middleware ===") + print("Making a request to protected endpoint without middleware...") + async with session.get( + "http://localhost:8080/protected", middlewares=() + ) as resp: + print(f"Status without middleware: {resp.status}") + if resp.status == 401: + print("Failed as expected - no auth header added") + + +async def main() -> None: + # Start test server + server = await run_test_server() + + try: + await run_tests() + + finally: + await server.cleanup() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/logging_middleware.py b/examples/logging_middleware.py new file mode 100644 index 00000000000..b6345953db2 --- /dev/null +++ b/examples/logging_middleware.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 +""" +Example of using logging middleware with aiohttp client. + +This example shows how to implement a middleware that logs request timing +and response status. This is useful for debugging, monitoring, and +understanding the flow of HTTP requests in your application. + +This example includes a test server with various endpoints. +""" + +import asyncio +import json +import logging +import time +from typing import Any, Coroutine, List + +from aiohttp import ClientHandlerType, ClientRequest, ClientResponse, ClientSession, web + +logging.basicConfig( + level=logging.DEBUG, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) +_LOGGER = logging.getLogger(__name__) + + +class LoggingMiddleware: + """Middleware that logs request timing and response status.""" + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + start_time = time.monotonic() + + # Log request + _LOGGER.info("[REQUEST] %s %s", request.method, request.url) + if request.headers: + _LOGGER.debug("[REQUEST HEADERS] %s", request.headers) + + # Execute request + response = await handler(request) + + # Log response + duration = time.monotonic() - start_time + _LOGGER.info( + "[RESPONSE] %s %s - Status: %s - Duration: %.3fs", + request.method, + request.url, + response.status, + duration, + ) + _LOGGER.debug("[RESPONSE HEADERS] %s", response.headers) + + return response + + +class TestServer: + """Test server for logging middleware demo.""" + + async def handle_hello(self, request: web.Request) -> web.Response: + """Simple hello endpoint.""" + name = request.match_info.get("name", "World") + return web.json_response({"message": f"Hello, {name}!"}) + + async def handle_slow(self, request: web.Request) -> web.Response: + """Endpoint that simulates slow response.""" + delay = float(request.match_info.get("delay", 1)) + await asyncio.sleep(delay) + return web.json_response({"message": "Slow response completed", "delay": delay}) + + async def handle_error(self, request: web.Request) -> web.Response: + """Endpoint that returns an error.""" + status = int(request.match_info.get("status", 500)) + return web.Response(status=status, text=f"Error response with status {status}") + + async def handle_json_data(self, request: web.Request) -> web.Response: + """Endpoint that echoes JSON data.""" + try: + data = await request.json() + return web.json_response({"echo": data, "received_at": time.time()}) + except json.JSONDecodeError: + return web.json_response({"error": "Invalid JSON"}, status=400) + + +async def run_test_server() -> web.AppRunner: + """Run a simple test server.""" + app = web.Application() + server = TestServer() + + app.router.add_get("/hello", server.handle_hello) + app.router.add_get("/hello/{name}", server.handle_hello) + app.router.add_get("/slow/{delay}", server.handle_slow) + app.router.add_get("/error/{status}", server.handle_error) + app.router.add_post("/echo", server.handle_json_data) + + runner = web.AppRunner(app) + await runner.setup() + site = web.TCPSite(runner, "localhost", 8080) + await site.start() + return runner + + +async def run_tests() -> None: + """Run all the middleware tests.""" + # Create logging middleware + logging_middleware = LoggingMiddleware() + + # Use middleware in session + async with ClientSession(middlewares=(logging_middleware,)) as session: + # Test 1: Simple GET request + print("\n=== Test 1: Simple GET request ===") + async with session.get("http://localhost:8080/hello") as resp: + data = await resp.json() + print(f"Response: {data}") + + # Test 2: GET with parameter + print("\n=== Test 2: GET with parameter ===") + async with session.get("http://localhost:8080/hello/Alice") as resp: + data = await resp.json() + print(f"Response: {data}") + + # Test 3: Slow request + print("\n=== Test 3: Slow request (2 seconds) ===") + async with session.get("http://localhost:8080/slow/2") as resp: + data = await resp.json() + print(f"Response: {data}") + + # Test 4: Error response + print("\n=== Test 4: Error response ===") + async with session.get("http://localhost:8080/error/404") as resp: + text = await resp.text() + print(f"Response: {text}") + + # Test 5: POST with JSON data + print("\n=== Test 5: POST with JSON data ===") + payload = {"name": "Bob", "age": 30, "city": "New York"} + async with session.post("http://localhost:8080/echo", json=payload) as resp: + data = await resp.json() + print(f"Response: {data}") + + # Test 6: Multiple concurrent requests + print("\n=== Test 6: Multiple concurrent requests ===") + coros: List[Coroutine[Any, Any, ClientResponse]] = [] + for i in range(3): + coro = session.get(f"http://localhost:8080/hello/User{i}") + coros.append(coro) + + responses = await asyncio.gather(*coros) + for i, resp in enumerate(responses): + async with resp: + data = await resp.json() + print(f"Concurrent request {i}: {data}") + + +async def main() -> None: + # Start test server + server = await run_test_server() + + try: + await run_tests() + + finally: + # Cleanup server + await server.cleanup() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/retry_middleware.py b/examples/retry_middleware.py new file mode 100644 index 00000000000..c8fa829455a --- /dev/null +++ b/examples/retry_middleware.py @@ -0,0 +1,245 @@ +#!/usr/bin/env python3 +""" +Example of using retry middleware with aiohttp client. + +This example shows how to implement a middleware that automatically retries +failed requests with exponential backoff. The middleware can be configured +with custom retry statuses, maximum retries, and backoff parameters. + +This example includes a test server that simulates various HTTP responses +and can return different status codes on sequential requests. +""" + +import asyncio +import logging +from http import HTTPStatus +from typing import TYPE_CHECKING, Dict, List, Set, Union + +from aiohttp import ClientHandlerType, ClientRequest, ClientResponse, ClientSession, web + +logging.basicConfig(level=logging.INFO) +_LOGGER = logging.getLogger(__name__) + +DEFAULT_RETRY_STATUSES: Set[HTTPStatus] = { + HTTPStatus.TOO_MANY_REQUESTS, + HTTPStatus.INTERNAL_SERVER_ERROR, + HTTPStatus.BAD_GATEWAY, + HTTPStatus.SERVICE_UNAVAILABLE, + HTTPStatus.GATEWAY_TIMEOUT, +} + + +class RetryMiddleware: + """Middleware that retries failed requests with exponential backoff.""" + + def __init__( + self, + max_retries: int = 3, + retry_statuses: Union[Set[HTTPStatus], None] = None, + initial_delay: float = 1.0, + backoff_factor: float = 2.0, + ) -> None: + self.max_retries = max_retries + self.retry_statuses = retry_statuses or DEFAULT_RETRY_STATUSES + self.initial_delay = initial_delay + self.backoff_factor = backoff_factor + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + """Execute request with retry logic.""" + last_response: Union[ClientResponse, None] = None + delay = self.initial_delay + + for attempt in range(self.max_retries + 1): + if attempt > 0: + _LOGGER.info( + "Retrying request to %s (attempt %s/%s)", + request.url, + attempt + 1, + self.max_retries + 1, + ) + + # Execute the request + response = await handler(request) + last_response = response + + # Check if we should retry + if response.status not in self.retry_statuses: + return response + + # Don't retry if we've exhausted attempts + if attempt >= self.max_retries: + _LOGGER.warning( + "Max retries (%s) exceeded for %s", self.max_retries, request.url + ) + return response + + # Wait before retrying + _LOGGER.debug("Waiting %ss before retry...", delay) + await asyncio.sleep(delay) + delay *= self.backoff_factor + + # Return the last response + if TYPE_CHECKING: + assert last_response is not None # Always set since we loop at least once + return last_response + + +class TestServer: + """Test server with stateful endpoints for retry testing.""" + + def __init__(self) -> None: + self.request_counters: Dict[str, int] = {} + self.status_sequences: Dict[str, List[int]] = { + "eventually-ok": [500, 503, 502, 200], # Fails 3 times, then succeeds + "always-error": [500, 500, 500, 500], # Always fails + "immediate-ok": [200], # Succeeds immediately + "flaky": [503, 200], # Fails once, then succeeds + } + + async def handle_status(self, request: web.Request) -> web.Response: + """Return the status code specified in the path.""" + status = int(request.match_info["status"]) + return web.Response(status=status, text=f"Status: {status}") + + async def handle_status_sequence(self, request: web.Request) -> web.Response: + """Return different status codes on sequential requests.""" + path = request.path + + # Initialize counter for this path if needed + if path not in self.request_counters: + self.request_counters[path] = 0 + + # Get the status sequence for this path + sequence_name = request.match_info["name"] + if sequence_name not in self.status_sequences: + return web.Response(status=404, text="Sequence not found") + + sequence = self.status_sequences[sequence_name] + + # Get the current status based on request count + count = self.request_counters[path] + if count < len(sequence): + status = sequence[count] + else: + # After sequence ends, always return the last status + status = sequence[-1] + + # Increment counter for next request + self.request_counters[path] += 1 + + return web.Response( + status=status, text=f"Request #{count + 1}: Status {status}" + ) + + async def handle_delay(self, request: web.Request) -> web.Response: + """Delay response by specified seconds.""" + delay = float(request.match_info["delay"]) + await asyncio.sleep(delay) + return web.json_response({"delay": delay, "message": "Response after delay"}) + + async def handle_reset(self, request: web.Request) -> web.Response: + """Reset request counters.""" + self.request_counters = {} + return web.Response(text="Counters reset") + + +async def run_test_server() -> web.AppRunner: + """Run a simple test server.""" + app = web.Application() + server = TestServer() + + app.router.add_get("/status/{status}", server.handle_status) + app.router.add_get("/sequence/{name}", server.handle_status_sequence) + app.router.add_get("/delay/{delay}", server.handle_delay) + app.router.add_post("/reset", server.handle_reset) + + runner = web.AppRunner(app) + await runner.setup() + site = web.TCPSite(runner, "localhost", 8080) + await site.start() + return runner + + +async def run_tests() -> None: + """Run all retry middleware tests.""" + # Create retry middleware with custom settings + retry_middleware = RetryMiddleware( + max_retries=3, + retry_statuses=DEFAULT_RETRY_STATUSES, + initial_delay=0.5, + backoff_factor=2.0, + ) + + async with ClientSession(middlewares=(retry_middleware,)) as session: + # Reset counters before tests + await session.post("http://localhost:8080/reset") + + # Test 1: Request that succeeds immediately + print("=== Test 1: Immediate success ===") + async with session.get("http://localhost:8080/sequence/immediate-ok") as resp: + text = await resp.text() + print(f"Final status: {resp.status}") + print(f"Response: {text}") + print("Success - no retries needed\n") + + # Test 2: Request that eventually succeeds after retries + print("=== Test 2: Eventually succeeds (500->503->502->200) ===") + async with session.get("http://localhost:8080/sequence/eventually-ok") as resp: + text = await resp.text() + print(f"Final status: {resp.status}") + print(f"Response: {text}") + if resp.status == 200: + print("Success after retries!\n") + else: + print("Failed after retries\n") + + # Test 3: Request that always fails + print("=== Test 3: Always fails (500->500->500->500) ===") + async with session.get("http://localhost:8080/sequence/always-error") as resp: + text = await resp.text() + print(f"Final status: {resp.status}") + print(f"Response: {text}") + print("Failed after exhausting all retries\n") + + # Test 4: Flaky service (fails once then succeeds) + print("=== Test 4: Flaky service (503->200) ===") + await session.post("http://localhost:8080/reset") # Reset counters + async with session.get("http://localhost:8080/sequence/flaky") as resp: + text = await resp.text() + print(f"Final status: {resp.status}") + print(f"Response: {text}") + print("Success after one retry!\n") + + # Test 5: Non-retryable status + print("=== Test 5: Non-retryable status (404) ===") + async with session.get("http://localhost:8080/status/404") as resp: + print(f"Final status: {resp.status}") + print("Failed immediately - not a retryable status\n") + + # Test 6: Delayed response + print("=== Test 6: Testing with delay endpoint ===") + try: + async with session.get("http://localhost:8080/delay/0.5") as resp: + print(f"Status: {resp.status}") + data = await resp.json() + print(f"Response received after delay: {data}\n") + except asyncio.TimeoutError: + print("Request timed out\n") + + +async def main() -> None: + # Start test server + server = await run_test_server() + + try: + await run_tests() + finally: + await server.cleanup() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/token_refresh_middleware.py b/examples/token_refresh_middleware.py new file mode 100644 index 00000000000..8a7ff963850 --- /dev/null +++ b/examples/token_refresh_middleware.py @@ -0,0 +1,336 @@ +#!/usr/bin/env python3 +""" +Example of using token refresh middleware with aiohttp client. + +This example shows how to implement a middleware that handles JWT token +refresh automatically. The middleware: +- Adds bearer tokens to requests +- Detects when tokens are expired +- Automatically refreshes tokens when needed +- Handles concurrent requests during token refresh + +This example includes a test server that simulates a JWT auth system. +Note: This is a simplified example for demonstration purposes. +In production, use proper JWT libraries and secure token storage. +""" + +import asyncio +import hashlib +import json +import logging +import secrets +import time +from http import HTTPStatus +from typing import TYPE_CHECKING, Any, Coroutine, Dict, List, Union + +from aiohttp import ( + ClientHandlerType, + ClientRequest, + ClientResponse, + ClientSession, + hdrs, + web, +) + +logging.basicConfig(level=logging.INFO) +_LOGGER = logging.getLogger(__name__) + + +class TokenRefreshMiddleware: + """Middleware that handles JWT token refresh automatically.""" + + def __init__(self, token_endpoint: str, refresh_token: str) -> None: + self.token_endpoint = token_endpoint + self.refresh_token = refresh_token + self.access_token: Union[str, None] = None + self.token_expires_at: Union[float, None] = None + self._refresh_lock = asyncio.Lock() + + async def _refresh_access_token(self, session: ClientSession) -> str: + """Refresh the access token using the refresh token.""" + async with self._refresh_lock: + # Check if another coroutine already refreshed the token + if ( + self.token_expires_at + and time.time() < self.token_expires_at + and self.access_token + ): + _LOGGER.debug("Token already refreshed by another request") + return self.access_token + + _LOGGER.info("Refreshing access token...") + + # Make refresh request without middleware to avoid recursion + async with session.post( + self.token_endpoint, + json={"refresh_token": self.refresh_token}, + middlewares=(), # Disable middleware for this request + ) as resp: + resp.raise_for_status() + data = await resp.json() + + if "access_token" not in data: + raise ValueError("No access_token in refresh response") + + self.access_token = data["access_token"] + # Token expires in 5 minutes for demo, refresh 30 seconds early + expires_in = data.get("expires_in", 300) + self.token_expires_at = time.time() + expires_in - 30 + + _LOGGER.info( + "Token refreshed successfully, expires in %s seconds", expires_in + ) + if TYPE_CHECKING: + assert self.access_token is not None # Just assigned above + return self.access_token + + async def __call__( + self, + request: ClientRequest, + handler: ClientHandlerType, + ) -> ClientResponse: + """Add auth token to request, refreshing if needed.""" + # Skip token for refresh endpoint to avoid recursion + if str(request.url).endswith("/token/refresh"): + return await handler(request) + + # Refresh token if needed + if not self.access_token or ( + self.token_expires_at and time.time() >= self.token_expires_at + ): + await self._refresh_access_token(request.session) + + # Add token to request + request.headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}" + _LOGGER.debug("Added Bearer token to request") + + # Execute request + response = await handler(request) + + # If we get 401, try refreshing token once + if response.status == HTTPStatus.UNAUTHORIZED: + _LOGGER.info("Got 401, attempting token refresh...") + await self._refresh_access_token(request.session) + request.headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}" + response = await handler(request) + + return response + + +class TestServer: + """Test server with JWT-like token authentication.""" + + def __init__(self) -> None: + self.tokens_db: Dict[str, Dict[str, Union[str, float]]] = {} + self.refresh_tokens_db: Dict[str, Dict[str, Union[str, float]]] = { + # Hash of refresh token -> user data + hashlib.sha256(b"demo_refresh_token_12345").hexdigest(): { + "user_id": "user123", + "username": "testuser", + "issued_at": time.time(), + } + } + + def generate_access_token(self) -> str: + """Generate a secure random access token.""" + return secrets.token_urlsafe(32) + + async def _process_token_refresh(self, data: Dict[str, str]) -> web.Response: + """Process the token refresh request.""" + refresh_token = data.get("refresh_token") + + if not refresh_token: + return web.json_response({"error": "refresh_token required"}, status=400) + + # Hash the refresh token to look it up + refresh_token_hash = hashlib.sha256(refresh_token.encode()).hexdigest() + + if refresh_token_hash not in self.refresh_tokens_db: + return web.json_response({"error": "Invalid refresh token"}, status=401) + + user_data = self.refresh_tokens_db[refresh_token_hash] + + # Generate new access token + access_token = self.generate_access_token() + expires_in = 300 # 5 minutes for demo + + # Store the access token with expiry + token_hash = hashlib.sha256(access_token.encode()).hexdigest() + self.tokens_db[token_hash] = { + "user_id": user_data["user_id"], + "username": user_data["username"], + "expires_at": time.time() + expires_in, + "issued_at": time.time(), + } + + # Clean up expired tokens periodically + current_time = time.time() + self.tokens_db = { + k: v + for k, v in self.tokens_db.items() + if isinstance(v["expires_at"], float) and v["expires_at"] > current_time + } + + return web.json_response( + { + "access_token": access_token, + "token_type": "Bearer", + "expires_in": expires_in, + } + ) + + async def handle_token_refresh(self, request: web.Request) -> web.Response: + """Handle token refresh requests.""" + try: + data = await request.json() + return await self._process_token_refresh(data) + except json.JSONDecodeError: + return web.json_response({"error": "Invalid request"}, status=400) + + async def verify_bearer_token( + self, request: web.Request + ) -> Union[Dict[str, Union[str, float]], None]: + """Verify bearer token and return user data if valid.""" + auth_header = request.headers.get(hdrs.AUTHORIZATION, "") + + if not auth_header.startswith("Bearer "): + return None + + token = auth_header[7:] # Remove "Bearer " + token_hash = hashlib.sha256(token.encode()).hexdigest() + + # Check if token exists and is not expired + if token_hash in self.tokens_db: + token_data = self.tokens_db[token_hash] + if ( + isinstance(token_data["expires_at"], float) + and token_data["expires_at"] > time.time() + ): + return token_data + + return None + + async def handle_protected_resource(self, request: web.Request) -> web.Response: + """Protected endpoint that requires valid bearer token.""" + user_data = await self.verify_bearer_token(request) + + if not user_data: + return web.json_response({"error": "Invalid or expired token"}, status=401) + + return web.json_response( + { + "message": "Access granted to protected resource", + "user": user_data["username"], + "data": "Secret information", + } + ) + + async def handle_user_info(self, request: web.Request) -> web.Response: + """Another protected endpoint.""" + user_data = await self.verify_bearer_token(request) + + if not user_data: + return web.json_response({"error": "Invalid or expired token"}, status=401) + + return web.json_response( + { + "user_id": user_data["user_id"], + "username": user_data["username"], + "email": f"{user_data['username']}@example.com", + "roles": ["user", "admin"], + } + ) + + +async def run_test_server() -> web.AppRunner: + """Run a test server with JWT auth endpoints.""" + test_server = TestServer() + app = web.Application() + app.router.add_post("/token/refresh", test_server.handle_token_refresh) + app.router.add_get("/api/protected", test_server.handle_protected_resource) + app.router.add_get("/api/user", test_server.handle_user_info) + + runner = web.AppRunner(app) + await runner.setup() + site = web.TCPSite(runner, "localhost", 8080) + await site.start() + return runner + + +async def run_tests() -> None: + """Run all token refresh middleware tests.""" + # Create token refresh middleware + # In a real app, this refresh token would be securely stored + token_middleware = TokenRefreshMiddleware( + token_endpoint="http://localhost:8080/token/refresh", + refresh_token="demo_refresh_token_12345", + ) + + async with ClientSession(middlewares=(token_middleware,)) as session: + print("=== Test 1: First request (will trigger token refresh) ===") + async with session.get("http://localhost:8080/api/protected") as resp: + if resp.status == 200: + data = await resp.json() + print(f"Success! Response: {data}") + else: + print(f"Failed with status: {resp.status}") + + print("\n=== Test 2: Second request (uses cached token) ===") + async with session.get("http://localhost:8080/api/user") as resp: + if resp.status == 200: + data = await resp.json() + print(f"User info: {data}") + else: + print(f"Failed with status: {resp.status}") + + print("\n=== Test 3: Multiple concurrent requests ===") + print("(Should only refresh token once)") + coros: List[Coroutine[Any, Any, ClientResponse]] = [] + for i in range(3): + coro = session.get("http://localhost:8080/api/protected") + coros.append(coro) + + responses = await asyncio.gather(*coros) + for i, resp in enumerate(responses): + async with resp: + if resp.status == 200: + print(f"Request {i + 1}: Success") + else: + print(f"Request {i + 1}: Failed with {resp.status}") + + print("\n=== Test 4: Simulate token expiry ===") + # For demo purposes, force token expiry + token_middleware.token_expires_at = time.time() - 1 + + print("Token expired, next request should trigger refresh...") + async with session.get("http://localhost:8080/api/protected") as resp: + if resp.status == 200: + data = await resp.json() + print(f"Success after token refresh! Response: {data}") + else: + print(f"Failed with status: {resp.status}") + + print("\n=== Test 5: Request without middleware (no auth) ===") + # Make a request without any middleware to show the difference + async with session.get( + "http://localhost:8080/api/protected", + middlewares=(), # Bypass all middleware for this request + ) as resp: + print(f"Status: {resp.status}") + if resp.status == 401: + error = await resp.json() + print(f"Failed as expected without auth: {error}") + + +async def main() -> None: + # Start test server + server = await run_test_server() + + try: + await run_tests() + finally: + await server.cleanup() + + +if __name__ == "__main__": + asyncio.run(main()) From ca98b978f09324429fc87d6df74db905a68af71c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 17:39:01 +0100 Subject: [PATCH 1438/1511] [PR #10972/a023a245 backport][3.12] Upgrade to llhttp 3.9 (#10973) **This is a backport of PR #10972 as merged into master (a023a245f675b77c746d4cac37ac5289e4196070).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/10972.feature.rst | 1 + vendor/llhttp | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 CHANGES/10972.feature.rst diff --git a/CHANGES/10972.feature.rst b/CHANGES/10972.feature.rst new file mode 100644 index 00000000000..1d3779a3969 --- /dev/null +++ b/CHANGES/10972.feature.rst @@ -0,0 +1 @@ +Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. diff --git a/vendor/llhttp b/vendor/llhttp index b0b279fb5a6..36151b9a7d6 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit b0b279fb5a617ab3bc2fc11c5f8bd937aac687c1 +Subproject commit 36151b9a7d6320072e24e472a769a5e09f9e969d From 8368069d3f7ac363d15f9312eb1a7edbfdd66736 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 12:04:09 -0500 Subject: [PATCH 1439/1511] [PR #10968/ff7feaf4 backport][3.12] Update Key Features to mention client middleware (#10975) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10968.feature.rst | 1 + docs/client_reference.rst | 1 - docs/index.rst | 2 ++ 3 files changed, 3 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10968.feature.rst diff --git a/CHANGES/10968.feature.rst b/CHANGES/10968.feature.rst new file mode 120000 index 00000000000..b565aa68ee0 --- /dev/null +++ b/CHANGES/10968.feature.rst @@ -0,0 +1 @@ +9732.feature.rst \ No newline at end of file diff --git a/docs/client_reference.rst b/docs/client_reference.rst index cd825b403a0..fa0a50425af 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2051,7 +2051,6 @@ Utilities :return: encoded authentication data, :class:`str`. - .. class:: DigestAuthMiddleware(login, password) HTTP digest authentication client middleware. diff --git a/docs/index.rst b/docs/index.rst index 4ce20aca643..f9c4a4b2c54 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -23,6 +23,8 @@ Key Features without the Callback Hell. - Web-server has :ref:`aiohttp-web-middlewares`, :ref:`aiohttp-web-signals` and pluggable routing. +- Client supports :ref:`middleware <aiohttp-client-middleware>` for + customizing request/response processing. .. _aiohttp-installation: From bfe0bd18eb3df343f054c9259346d126d1742436 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 13:47:30 -0500 Subject: [PATCH 1440/1511] [PR #10977/48f5324b backport][3.12] Fix flakey test_aiohttp_request_ctx_manager_close_sess_on_error test (#10980) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_client_functional.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index ff9a33bda1b..6a031de6a35 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3393,6 +3393,9 @@ async def handler(request): pass assert cm._session.closed + # Allow event loop to process transport cleanup + # on Python < 3.11 + await asyncio.sleep(0) async def test_aiohttp_request_ctx_manager_not_found() -> None: From b21ae981269fe344bcc570ed443093f2f5f4d4ef Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 19:25:37 +0000 Subject: [PATCH 1441/1511] [PR #10971/1ee187c0 backport][3.12] Fix `WebSocketResponse.prepared` not correctly reflect the WebSocket's prepared state (#10983) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #6009 --- CHANGES/6009.bugfix.rst | 1 + aiohttp/web_ws.py | 4 + tests/test_web_websocket.py | 2 +- tests/test_web_websocket_functional.py | 113 +++++++++++++++++++++++++ 4 files changed, 119 insertions(+), 1 deletion(-) create mode 100644 CHANGES/6009.bugfix.rst diff --git a/CHANGES/6009.bugfix.rst b/CHANGES/6009.bugfix.rst new file mode 100644 index 00000000000..6462da31869 --- /dev/null +++ b/CHANGES/6009.bugfix.rst @@ -0,0 +1 @@ +Fixed ``WebSocketResponse.prepared`` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 439b8049987..575f9a3dc85 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -354,6 +354,10 @@ def can_prepare(self, request: BaseRequest) -> WebSocketReady: else: return WebSocketReady(True, protocol) + @property + def prepared(self) -> bool: + return self._writer is not None + @property def closed(self) -> bool: return self._closed diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 390d6224d3d..3a285b76aad 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -639,4 +639,4 @@ async def test_get_extra_info( await ws.prepare(req) ws._writer = ws_transport - assert ws.get_extra_info(valid_key, default_value) == expected_result + assert expected_result == ws.get_extra_info(valid_key, default_value) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 0229809592a..f7f5c31356c 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1281,3 +1281,116 @@ async def handler(request: web.Request) -> web.WebSocketResponse: ) await client.server.close() assert close_code == WSCloseCode.OK + + +async def test_websocket_prepare_timeout_close_issue( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that WebSocket can handle prepare with early returns. + + This is a regression test for issue #6009 where the prepared property + incorrectly checked _payload_writer instead of _writer. + """ + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + assert ws.can_prepare(request) + await ws.prepare(request) + await ws.send_str("test") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/ws", handler) + client = await aiohttp_client(app) + + # Connect via websocket + ws = await client.ws_connect("/ws") + msg = await ws.receive() + assert msg.type is WSMsgType.TEXT + assert msg.data == "test" + await ws.close() + + +async def test_websocket_prepare_timeout_from_issue_reproducer( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test websocket behavior when prepare is interrupted. + + This test verifies the fix for issue #6009 where close() would + fail after prepare() was interrupted. + """ + prepare_complete = asyncio.Event() + close_complete = asyncio.Event() + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + + # Prepare the websocket + await ws.prepare(request) + prepare_complete.set() + + # Send a message to confirm connection works + await ws.send_str("connected") + + # Wait for client to close + msg = await ws.receive() + assert msg.type is WSMsgType.CLOSE + await ws.close() + close_complete.set() + + return ws + + app = web.Application() + app.router.add_route("GET", "/ws", handler) + client = await aiohttp_client(app) + + # Connect and verify the connection works + ws = await client.ws_connect("/ws") + await prepare_complete.wait() + + msg = await ws.receive() + assert msg.type is WSMsgType.TEXT + assert msg.data == "connected" + + # Close the connection + await ws.close() + await close_complete.wait() + + +async def test_websocket_prepared_property( + loop: asyncio.AbstractEventLoop, aiohttp_client: AiohttpClient +) -> None: + """Test that WebSocketResponse.prepared property correctly reflects state.""" + prepare_called = asyncio.Event() + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + + # Initially not prepared + initial_state = ws.prepared + assert not initial_state + + # After prepare() is called, should be prepared + await ws.prepare(request) + prepare_called.set() + + # Check prepared state + prepared_state = ws.prepared + assert prepared_state + + # Send a message to verify the connection works + await ws.send_str("test") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/") + await prepare_called.wait() + msg = await ws.receive() + assert msg.type is WSMsgType.TEXT + assert msg.data == "test" + await ws.close() From 12ce8115da48f7db7f61fb2c267afffc3814ac8b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 19:43:40 +0000 Subject: [PATCH 1442/1511] [PR #10981/2617ab23 backport][3.12] Fix flakey test_client_middleware_retry_reuses_connection test (#10986) --- tests/test_client_middleware.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index 9d49b750333..e698e8ee825 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -863,8 +863,13 @@ async def test_client_middleware_retry_reuses_connection( aiohttp_server: AiohttpServer, ) -> None: """Test that connections are reused when middleware performs retries.""" + request_count = 0 async def handler(request: web.Request) -> web.Response: + nonlocal request_count + request_count += 1 + if request_count == 1: + return web.Response(status=400) # First request returns 400 with no body return web.Response(text="OK") class TrackingConnector(TCPConnector): @@ -891,7 +896,7 @@ async def __call__( while True: self.attempt_count += 1 response = await handler(request) - if retry_count == 0: + if response.status == 400 and retry_count == 0: retry_count += 1 continue return response From 6a60fb79b78b5b5f64494ff34d7db81390d1d46d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 23 May 2025 14:44:10 -0500 Subject: [PATCH 1443/1511] [PR #10970/bb5fc59 backport][3.12] Add warning about consuming the payload in middleware (#10984) --- CHANGES/2914.doc.rst | 4 ++ docs/web_advanced.rst | 99 ++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 97 insertions(+), 6 deletions(-) create mode 100644 CHANGES/2914.doc.rst diff --git a/CHANGES/2914.doc.rst b/CHANGES/2914.doc.rst new file mode 100644 index 00000000000..25592bf79bc --- /dev/null +++ b/CHANGES/2914.doc.rst @@ -0,0 +1,4 @@ +Improved documentation for middleware by adding warnings and examples about +request body stream consumption. The documentation now clearly explains that +request body streams can only be read once and provides best practices for +sharing parsed request data between middleware and handlers -- by :user:`bdraco`. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 070bae34f10..a4ca513b572 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -569,10 +569,14 @@ A *middleware* is a coroutine that can modify either the request or response. For example, here's a simple *middleware* which appends ``' wink'`` to the response:: - from aiohttp.web import middleware + from aiohttp import web + from typing import Callable, Awaitable - @middleware - async def middleware(request, handler): + @web.middleware + async def middleware( + request: web.Request, + handler: Callable[[web.Request], Awaitable[web.StreamResponse]] + ) -> web.StreamResponse: resp = await handler(request) resp.text = resp.text + ' wink' return resp @@ -614,20 +618,27 @@ post-processing like handling *CORS* and so on. The following code demonstrates middlewares execution order:: from aiohttp import web + from typing import Callable, Awaitable - async def test(request): + async def test(request: web.Request) -> web.Response: print('Handler function called') return web.Response(text="Hello") @web.middleware - async def middleware1(request, handler): + async def middleware1( + request: web.Request, + handler: Callable[[web.Request], Awaitable[web.StreamResponse]] + ) -> web.StreamResponse: print('Middleware 1 called') response = await handler(request) print('Middleware 1 finished') return response @web.middleware - async def middleware2(request, handler): + async def middleware2( + request: web.Request, + handler: Callable[[web.Request], Awaitable[web.StreamResponse]] + ) -> web.StreamResponse: print('Middleware 2 called') response = await handler(request) print('Middleware 2 finished') @@ -646,6 +657,82 @@ Produced output:: Middleware 2 finished Middleware 1 finished +Request Body Stream Consumption +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. warning:: + + When middleware reads the request body (using :meth:`~aiohttp.web.BaseRequest.read`, + :meth:`~aiohttp.web.BaseRequest.text`, :meth:`~aiohttp.web.BaseRequest.json`, or + :meth:`~aiohttp.web.BaseRequest.post`), the body stream is consumed. However, these + high-level methods cache their result, so subsequent calls from the handler or other + middleware will return the same cached value. + + The important distinction is: + + - High-level methods (:meth:`~aiohttp.web.BaseRequest.read`, :meth:`~aiohttp.web.BaseRequest.text`, + :meth:`~aiohttp.web.BaseRequest.json`, :meth:`~aiohttp.web.BaseRequest.post`) cache their + results internally, so they can be called multiple times and will return the same value. + - Direct stream access via :attr:`~aiohttp.web.BaseRequest.content` does NOT have this + caching behavior. Once you read from ``request.content`` directly (e.g., using + ``await request.content.read()``), subsequent reads will return empty bytes. + +Consider this middleware that logs request bodies:: + + from aiohttp import web + from typing import Callable, Awaitable + + async def logging_middleware( + request: web.Request, + handler: Callable[[web.Request], Awaitable[web.StreamResponse]] + ) -> web.StreamResponse: + # This consumes the request body stream + body = await request.text() + print(f"Request body: {body}") + return await handler(request) + + async def handler(request: web.Request) -> web.Response: + # This will return the same value that was read in the middleware + # (i.e., the cached result, not an empty string) + body = await request.text() + return web.Response(text=f"Received: {body}") + +In contrast, when accessing the stream directly (not recommended in middleware):: + + async def stream_middleware( + request: web.Request, + handler: Callable[[web.Request], Awaitable[web.StreamResponse]] + ) -> web.StreamResponse: + # Reading directly from the stream - this consumes it! + data = await request.content.read() + print(f"Stream data: {data}") + return await handler(request) + + async def handler(request: web.Request) -> web.Response: + # This will return empty bytes because the stream was already consumed + data = await request.content.read() + # data will be b'' (empty bytes) + + # However, high-level methods would still work if called for the first time: + # body = await request.text() # This would read from internal cache if available + return web.Response(text=f"Received: {data}") + +When working with raw stream data that needs to be shared between middleware and handlers:: + + async def stream_parsing_middleware( + request: web.Request, + handler: Callable[[web.Request], Awaitable[web.StreamResponse]] + ) -> web.StreamResponse: + # Read stream once and store the data + raw_data = await request.content.read() + request['raw_body'] = raw_data + return await handler(request) + + async def handler(request: web.Request) -> web.Response: + # Access the stored data instead of reading the stream again + raw_data = request.get('raw_body', b'') + return web.Response(body=raw_data) + Example ^^^^^^^ From fa088d0782d6a63ac3a1c182626692696b82eea7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 15:44:30 -0500 Subject: [PATCH 1444/1511] [PR #10988/54f1a84f backport][3.12] Add missing prepared method for WebSocketResponse to docs (#10990) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/10988.bugfix.rst | 1 + CHANGES/6009.bugfix.rst | 2 +- docs/web_reference.rst | 5 +++++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 120000 CHANGES/10988.bugfix.rst diff --git a/CHANGES/10988.bugfix.rst b/CHANGES/10988.bugfix.rst new file mode 120000 index 00000000000..6e737bb336c --- /dev/null +++ b/CHANGES/10988.bugfix.rst @@ -0,0 +1 @@ +6009.bugfix.rst \ No newline at end of file diff --git a/CHANGES/6009.bugfix.rst b/CHANGES/6009.bugfix.rst index 6462da31869..a530832c8a9 100644 --- a/CHANGES/6009.bugfix.rst +++ b/CHANGES/6009.bugfix.rst @@ -1 +1 @@ -Fixed ``WebSocketResponse.prepared`` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` +Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` diff --git a/docs/web_reference.rst b/docs/web_reference.rst index f2954b06b51..bcf20817aab 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1076,6 +1076,11 @@ and :ref:`aiohttp-web-signals` handlers:: of closing. :const:`~aiohttp.WSMsgType.CLOSE` message has been received from peer. + .. attribute:: prepared + + Read-only :class:`bool` property, ``True`` if :meth:`prepare` has + been called, ``False`` otherwise. + .. attribute:: close_code Read-only property, close code from peer. It is set to ``None`` on From 560ffbfaaaab89d0148ce572cd130782dd662f32 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 23 May 2025 18:33:06 -0500 Subject: [PATCH 1445/1511] Release 3.12.0rc0 (#10987) --- CHANGES.rst | 262 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 263 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index c0a9b20f200..3ea3455294d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,268 @@ .. towncrier release notes start +3.12.0rc0 (2025-05-23) +====================== + +Bug fixes +--------- + +- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`6009`, :issue:`10988`. + + + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed connection reuse for file-like data payloads by ensuring buffer + truncation respects content-length boundaries and preventing premature + connection closure race -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10951`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`, :issue:`10923`, :issue:`10946`. + + + +- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`10972`. + + + + +Improved documentation +---------------------- + +- Improved documentation for middleware by adding warnings and examples about + request body stream consumption. The documentation now clearly explains that + request body streams can only be read once and provides best practices for + sharing parsed request data between middleware and handlers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2914`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + + This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that + can be used to build aiohttp against the system install of the ``llhttp`` library rather + than the vendored one. + + + *Related issues and pull requests on GitHub:* + :issue:`10759`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. + + + *Related issues and pull requests on GitHub:* + :issue:`10922`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.12.0b3 (2025-05-22) ===================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 0ca44564e46..0de2fb48b1b 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0b3" +__version__ = "3.12.0rc0" from typing import TYPE_CHECKING, Tuple From c265228701988a0357ecf6f863b460335a161b74 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 24 May 2025 12:23:32 -0500 Subject: [PATCH 1446/1511] [PR #10991/452458a backport][3.12] Optimize small HTTP requests/responses by coalescing headers and body into a single packet (#10992) --- CHANGES/10991.feature.rst | 7 + aiohttp/abc.py | 7 + aiohttp/client_reqrep.py | 5 + aiohttp/http_writer.py | 158 ++++++- aiohttp/web_response.py | 5 + docs/spelling_wordlist.txt | 1 + tests/test_client_functional.py | 79 +++- tests/test_http_writer.py | 786 +++++++++++++++++++++++++++++++- tests/test_web_response.py | 43 ++ tests/test_web_sendfile.py | 30 ++ tests/test_web_server.py | 5 +- 11 files changed, 1099 insertions(+), 27 deletions(-) create mode 100644 CHANGES/10991.feature.rst diff --git a/CHANGES/10991.feature.rst b/CHANGES/10991.feature.rst new file mode 100644 index 00000000000..687a1a752f6 --- /dev/null +++ b/CHANGES/10991.feature.rst @@ -0,0 +1,7 @@ +Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`. + +This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding. + +When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads. + +This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 3c4f8c61b00..c1bf5032d0d 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -232,6 +232,13 @@ async def write_headers( ) -> None: """Write HTTP headers""" + def send_headers(self) -> None: + """Force sending buffered headers if not already sent. + + Required only if write_headers() buffers headers instead of sending immediately. + For backwards compatibility, this method does nothing by default. + """ + class AbstractAccessLogger(ABC): """Abstract writer to access log.""" diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a50917150c5..fb83eefd51f 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -709,6 +709,8 @@ async def write_bytes( """ # 100 response if self._continue is not None: + # Force headers to be sent before waiting for 100-continue + writer.send_headers() await writer.drain() await self._continue @@ -826,7 +828,10 @@ async def send(self, conn: "Connection") -> "ClientResponse": # status + headers status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" + + # Buffer headers for potential coalescing with body await writer.write_headers(status_line, self.headers) + task: Optional["asyncio.Task[None]"] if self.body or self._continue is not None or protocol.writing_paused: coro = self.write_bytes(writer, conn, self._get_content_length()) diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index 3e05628238d..a140b218b25 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -3,6 +3,7 @@ import asyncio import sys from typing import ( # noqa + TYPE_CHECKING, Any, Awaitable, Callable, @@ -66,6 +67,8 @@ def __init__( self.loop = loop self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent self._on_headers_sent: _T_OnHeadersSent = on_headers_sent + self._headers_buf: Optional[bytes] = None + self._headers_written: bool = False @property def transport(self) -> Optional[asyncio.Transport]: @@ -106,6 +109,49 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: else: transport.writelines(chunks) + def _write_chunked_payload( + self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] + ) -> None: + """Write a chunk with proper chunked encoding.""" + chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii") + self._writelines((chunk_len_pre, chunk, b"\r\n")) + + def _send_headers_with_payload( + self, + chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"], + is_eof: bool, + ) -> None: + """Send buffered headers with payload, coalescing into single write.""" + # Mark headers as written + self._headers_written = True + headers_buf = self._headers_buf + self._headers_buf = None + + if TYPE_CHECKING: + # Safe because callers (write() and write_eof()) only invoke this method + # after checking that self._headers_buf is truthy + assert headers_buf is not None + + if not self.chunked: + # Non-chunked: coalesce headers with body + if chunk: + self._writelines((headers_buf, chunk)) + else: + self._write(headers_buf) + return + + # Coalesce headers with chunked data + if chunk: + chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii") + if is_eof: + self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n0\r\n\r\n")) + else: + self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n")) + elif is_eof: + self._writelines((headers_buf, b"0\r\n\r\n")) + else: + self._write(headers_buf) + async def write( self, chunk: Union[bytes, bytearray, memoryview], @@ -113,7 +159,8 @@ async def write( drain: bool = True, LIMIT: int = 0x10000, ) -> None: - """Writes chunk of data to a stream. + """ + Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. @@ -142,31 +189,75 @@ async def write( if not chunk: return + # Handle buffered headers for small payload optimization + if self._headers_buf and not self._headers_written: + self._send_headers_with_payload(chunk, False) + if drain and self.buffer_size > LIMIT: + self.buffer_size = 0 + await self.drain() + return + if chunk: if self.chunked: - self._writelines( - (f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n") - ) + self._write_chunked_payload(chunk) else: self._write(chunk) - if self.buffer_size > LIMIT and drain: + if drain and self.buffer_size > LIMIT: self.buffer_size = 0 await self.drain() async def write_headers( self, status_line: str, headers: "CIMultiDict[str]" ) -> None: - """Write request/response status and headers.""" + """Write headers to the stream.""" if self._on_headers_sent is not None: await self._on_headers_sent(headers) - # status + headers buf = _serialize_headers(status_line, headers) - self._write(buf) + self._headers_written = False + self._headers_buf = buf + + def send_headers(self) -> None: + """Force sending buffered headers if not already sent.""" + if not self._headers_buf or self._headers_written: + return + + self._headers_written = True + headers_buf = self._headers_buf + self._headers_buf = None + + if TYPE_CHECKING: + # Safe because we only enter this block when self._headers_buf is truthy + assert headers_buf is not None + + self._write(headers_buf) def set_eof(self) -> None: """Indicate that the message is complete.""" + if self._eof: + return + + # If headers haven't been sent yet, send them now + # This handles the case where there's no body at all + if self._headers_buf and not self._headers_written: + self._headers_written = True + headers_buf = self._headers_buf + self._headers_buf = None + + if TYPE_CHECKING: + # Safe because we only enter this block when self._headers_buf is truthy + assert headers_buf is not None + + # Combine headers and chunked EOF marker in a single write + if self.chunked: + self._writelines((headers_buf, b"0\r\n\r\n")) + else: + self._write(headers_buf) + elif self.chunked and self._headers_written: + # Headers already sent, just send the final chunk marker + self._write(b"0\r\n\r\n") + self._eof = True async def write_eof(self, chunk: bytes = b"") -> None: @@ -176,6 +267,7 @@ async def write_eof(self, chunk: bytes = b"") -> None: if chunk and self._on_chunk_sent is not None: await self._on_chunk_sent(chunk) + # Handle body/compression if self._compress: chunks: List[bytes] = [] chunks_len = 0 @@ -188,6 +280,26 @@ async def write_eof(self, chunk: bytes = b"") -> None: chunks.append(flush_chunk) assert chunks_len + # Send buffered headers with compressed data if not yet sent + if self._headers_buf and not self._headers_written: + self._headers_written = True + headers_buf = self._headers_buf + self._headers_buf = None + + if self.chunked: + # Coalesce headers with compressed chunked data + chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii") + self._writelines( + (headers_buf, chunk_len_pre, *chunks, b"\r\n0\r\n\r\n") + ) + else: + # Coalesce headers with compressed data + self._writelines((headers_buf, *chunks)) + await self.drain() + self._eof = True + return + + # Headers already sent, just write compressed data if self.chunked: chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii") self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n")) @@ -195,16 +307,34 @@ async def write_eof(self, chunk: bytes = b"") -> None: self._writelines(chunks) else: self._write(chunks[0]) - elif self.chunked: + await self.drain() + self._eof = True + return + + # No compression - send buffered headers if not yet sent + if self._headers_buf and not self._headers_written: + # Use helper to send headers with payload + self._send_headers_with_payload(chunk, True) + await self.drain() + self._eof = True + return + + # Handle remaining body + if self.chunked: if chunk: - chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii") - self._writelines((chunk_len_pre, chunk, b"\r\n0\r\n\r\n")) + # Write final chunk with EOF marker + self._writelines( + (f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n0\r\n\r\n") + ) else: self._write(b"0\r\n\r\n") - elif chunk: - self._write(chunk) + await self.drain() + self._eof = True + return - await self.drain() + if chunk: + self._write(chunk) + await self.drain() self._eof = True diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 8a940ef43bf..84ad18e8b4f 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -89,6 +89,7 @@ class StreamResponse(BaseClass, HeadersMixin): _must_be_empty_body: Optional[bool] = None _body_length = 0 _cookies: Optional[SimpleCookie] = None + _send_headers_immediately = True def __init__( self, @@ -542,6 +543,9 @@ async def _write_headers(self) -> None: version = request.version status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) + # Send headers immediately if not opted into buffering + if self._send_headers_immediately: + writer.send_headers() async def write(self, data: Union[bytes, bytearray, memoryview]) -> None: assert isinstance( @@ -619,6 +623,7 @@ def __bool__(self) -> bool: class Response(StreamResponse): _compressed_body: Optional[bytes] = None + _send_headers_immediately = False def __init__( self, diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c22e584cadf..3f67df33159 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -153,6 +153,7 @@ initializer inline intaking io +IoT ip IP ipdb diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 6a031de6a35..29838c39a71 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -1575,6 +1575,7 @@ async def handler(request: web.Request) -> web.Response: return web.json_response({"ok": True}) write_mock = None + writelines_mock = None original_write_bytes = ClientRequest.write_bytes async def write_bytes( @@ -1583,12 +1584,26 @@ async def write_bytes( conn: Connection, content_length: Optional[int] = None, ) -> None: - nonlocal write_mock + nonlocal write_mock, writelines_mock original_write = writer._write - - with mock.patch.object( - writer, "_write", autospec=True, spec_set=True, side_effect=original_write - ) as write_mock: + original_writelines = writer._writelines + + with ( + mock.patch.object( + writer, + "_write", + autospec=True, + spec_set=True, + side_effect=original_write, + ) as write_mock, + mock.patch.object( + writer, + "_writelines", + autospec=True, + spec_set=True, + side_effect=original_writelines, + ) as writelines_mock, + ): await original_write_bytes(self, writer, conn, content_length) with mock.patch.object(ClientRequest, "write_bytes", write_bytes): @@ -1601,9 +1616,20 @@ async def write_bytes( content = await resp.json() assert content == {"ok": True} - assert write_mock is not None - # No chunks should have been sent for an empty body. - write_mock.assert_not_called() + # With packet coalescing, headers are buffered and may be written + # during write_bytes if there's an empty body to process. + # The test should verify no body chunks are written, but headers + # may be written as part of the coalescing optimization. + # If _write was called, it should only be for headers ending with \r\n\r\n + # and not any body content + for call in write_mock.call_args_list: # type: ignore[union-attr] + data = call[0][0] + assert data.endswith( + b"\r\n\r\n" + ), "Only headers should be written, not body chunks" + + # No body data should be written via writelines either + writelines_mock.assert_not_called() # type: ignore[union-attr] async def test_GET_DEFLATE_no_body(aiohttp_client: AiohttpClient) -> None: @@ -4439,3 +4465,40 @@ async def handler(request: web.Request) -> NoReturn: assert client._session.connector is not None # Connection should be closed due to client-side network error assert len(client._session.connector._conns) == 0 + + +async def test_empty_response_non_chunked(aiohttp_client: AiohttpClient) -> None: + """Test non-chunked response with empty body.""" + + async def handler(request: web.Request) -> web.Response: + # Return empty response with Content-Length: 0 + return web.Response(body=b"", headers={"Content-Length": "0"}) + + app = web.Application() + app.router.add_get("/empty", handler) + client = await aiohttp_client(app) + + resp = await client.get("/empty") + assert resp.status == 200 + assert resp.headers.get("Content-Length") == "0" + data = await resp.read() + assert data == b"" + resp.close() + + +async def test_set_eof_on_empty_response(aiohttp_client: AiohttpClient) -> None: + """Test that triggers set_eof() method.""" + + async def handler(request: web.Request) -> web.Response: + # Return response that completes immediately + return web.Response(status=204) # No Content + + app = web.Application() + app.router.add_get("/no-content", handler) + client = await aiohttp_client(app) + + resp = await client.get("/no-content") + assert resp.status == 204 + data = await resp.read() + assert data == b"" + resp.close() diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index ec256275d22..ffd20a0d677 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -87,7 +87,100 @@ def test_payloadwriter_properties(transport, protocol, loop) -> None: assert writer.transport == transport -async def test_write_payload_eof(transport, protocol, loop) -> None: +async def test_write_headers_buffered_small_payload( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + headers = CIMultiDict({"Content-Length": "11", "Host": "example.com"}) + + # Write headers - should be buffered + await msg.write_headers("GET / HTTP/1.1", headers) + assert len(buf) == 0 # Headers not sent yet + + # Write small body - should coalesce with headers + await msg.write(b"Hello World", drain=False) + + # Verify content + assert b"GET / HTTP/1.1\r\n" in buf + assert b"Host: example.com\r\n" in buf + assert b"Content-Length: 11\r\n" in buf + assert b"\r\n\r\nHello World" in buf + + +async def test_write_headers_chunked_coalescing( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_chunking() + headers = CIMultiDict({"Transfer-Encoding": "chunked", "Host": "example.com"}) + + # Write headers - should be buffered + await msg.write_headers("POST /upload HTTP/1.1", headers) + assert len(buf) == 0 # Headers not sent yet + + # Write first chunk - should coalesce with headers + await msg.write(b"First chunk", drain=False) + + # Verify content + assert b"POST /upload HTTP/1.1\r\n" in buf + assert b"Transfer-Encoding: chunked\r\n" in buf + # "b" is hex for 11 (length of "First chunk") + assert b"\r\n\r\nb\r\nFirst chunk\r\n" in buf + + +async def test_write_eof_with_buffered_headers( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + headers = CIMultiDict({"Content-Length": "9", "Host": "example.com"}) + + # Write headers - should be buffered + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Call write_eof with body - should coalesce + await msg.write_eof(b"Last data") + + # Verify content + assert b"POST /data HTTP/1.1\r\n" in buf + assert b"\r\n\r\nLast data" in buf + + +async def test_set_eof_sends_buffered_headers( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + headers = CIMultiDict({"Host": "example.com"}) + + # Write headers - should be buffered + await msg.write_headers("GET /empty HTTP/1.1", headers) + assert len(buf) == 0 + + # Call set_eof without body - headers should be sent + msg.set_eof() + + # Headers should be sent + assert len(buf) > 0 + assert b"GET /empty HTTP/1.1\r\n" in buf + + +async def test_write_payload_eof( + transport: asyncio.Transport, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, +) -> None: write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, loop) @@ -825,14 +918,66 @@ async def test_set_eof_after_write_headers( msg = http.StreamWriter(protocol, loop) status_line = "HTTP/1.1 200 OK" good_headers = CIMultiDict({"Set-Cookie": "abc=123"}) + + # Write headers - should be buffered await msg.write_headers(status_line, good_headers) + assert not transport.write.called # Headers are buffered + + # set_eof should send the buffered headers + msg.set_eof() assert transport.write.called + + # Subsequent write_eof should do nothing transport.write.reset_mock() - msg.set_eof() await msg.write_eof() assert not transport.write.called +async def test_write_headers_does_not_write_immediately( + protocol: BaseProtocol, + transport: mock.Mock, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + status_line = "HTTP/1.1 200 OK" + headers = CIMultiDict({"Content-Type": "text/plain"}) + + # write_headers should buffer, not write immediately + await msg.write_headers(status_line, headers) + assert not transport.write.called + assert not transport.writelines.called + + # Headers should be sent when set_eof is called + msg.set_eof() + assert transport.write.called + + +async def test_write_headers_with_compression_coalescing( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + headers = CIMultiDict({"Content-Encoding": "deflate", "Host": "example.com"}) + + # Write headers - should be buffered + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Write compressed data via write_eof - should coalesce + await msg.write_eof(b"Hello World") + + # Verify headers are present + assert b"POST /data HTTP/1.1\r\n" in buf + assert b"Content-Encoding: deflate\r\n" in buf + + # Verify compressed data is present + # The data should contain headers + compressed payload + assert len(buf) > 50 # Should have headers + some compressed data + + @pytest.mark.parametrize( "char", [ @@ -857,3 +1002,640 @@ def test_serialize_headers_raises_on_new_line_or_carriage_return(char: str) -> N ), ): _serialize_headers(status_line, headers) + + +async def test_write_compressed_data_with_headers_coalescing( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that headers are coalesced with compressed data in write() method.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + headers = CIMultiDict({"Content-Encoding": "deflate", "Host": "example.com"}) + + # Write headers - should be buffered + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Write compressed data - should coalesce with headers + await msg.write(b"Hello World") + + # Headers and compressed data should be written together + assert b"POST /data HTTP/1.1\r\n" in buf + assert b"Content-Encoding: deflate\r\n" in buf + assert len(buf) > 50 # Headers + compressed data + + +async def test_write_compressed_chunked_with_headers_coalescing( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test headers coalescing with compressed chunked data.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + headers = CIMultiDict( + {"Content-Encoding": "deflate", "Transfer-Encoding": "chunked"} + ) + + # Write headers - should be buffered + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Write compressed chunked data - should coalesce + await msg.write(b"Hello World") + + # Check headers are present + assert b"POST /data HTTP/1.1\r\n" in buf + assert b"Transfer-Encoding: chunked\r\n" in buf + + # Should have chunk size marker for compressed data + output = buf.decode("latin-1", errors="ignore") + assert "\r\n" in output # Should have chunk markers + + +async def test_write_multiple_compressed_chunks_after_headers_sent( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test multiple compressed writes after headers are already sent.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + headers = CIMultiDict({"Content-Encoding": "deflate"}) + + # Write headers and send them immediately by writing first chunk + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 # Headers buffered + + # Write first chunk - this will send headers + compressed data + await msg.write(b"First chunk of data that should compress") + len_after_first = len(buf) + assert len_after_first > 0 # Headers + first chunk written + + # Write second chunk and force flush via EOF + await msg.write(b"Second chunk of data that should also compress well") + await msg.write_eof() + + # After EOF, all compressed data should be flushed + final_len = len(buf) + assert final_len > len_after_first + + +async def test_write_eof_empty_compressed_with_buffered_headers( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test write_eof with no data but compression enabled and buffered headers.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + headers = CIMultiDict({"Content-Encoding": "deflate"}) + + # Write headers - should be buffered + await msg.write_headers("GET /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Write EOF with no data - should still coalesce headers with compression flush + await msg.write_eof() + + # Headers should be present + assert b"GET /data HTTP/1.1\r\n" in buf + assert b"Content-Encoding: deflate\r\n" in buf + # Should have compression flush data + assert len(buf) > 40 + + +async def test_write_compressed_gzip_with_headers_coalescing( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test gzip compression with header coalescing.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("gzip") + headers = CIMultiDict({"Content-Encoding": "gzip"}) + + # Write headers - should be buffered + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Write gzip compressed data via write_eof + await msg.write_eof(b"Test gzip compression") + + # Verify coalescing happened + assert b"POST /data HTTP/1.1\r\n" in buf + assert b"Content-Encoding: gzip\r\n" in buf + # Gzip typically produces more overhead than deflate + assert len(buf) > 60 + + +async def test_compression_with_content_length_constraint( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test compression respects content length constraints.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.length = 5 # Set small content length + headers = CIMultiDict({"Content-Length": "5"}) + + await msg.write_headers("POST /data HTTP/1.1", headers) + # Write some initial data to trigger headers to be sent + await msg.write(b"12345") # This matches our content length of 5 + headers_and_first_chunk_len = len(buf) + + # Try to write more data than content length allows + await msg.write(b"This is a longer message") + + # The second write should not add any data since content length is exhausted + # After writing 5 bytes, length becomes 0, so additional writes are ignored + assert len(buf) == headers_and_first_chunk_len # No additional data written + + +async def test_write_compressed_zero_length_chunk( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test writing empty chunk with compression.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + + await msg.write_headers("POST /data HTTP/1.1", CIMultiDict()) + # Force headers to be sent by writing something + await msg.write(b"x") # Write something to trigger header send + buf.clear() + + # Write empty chunk - compression may still produce output + await msg.write(b"") + + # With compression, even empty input might produce small output + # due to compression state, but it should be minimal + assert len(buf) < 10 # Should be very small if anything + + +async def test_chunked_compressed_eof_coalescing( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test chunked compressed data with EOF marker coalescing.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_compression("deflate") + msg.enable_chunking() + headers = CIMultiDict( + {"Content-Encoding": "deflate", "Transfer-Encoding": "chunked"} + ) + + # Buffer headers + await msg.write_headers("POST /data HTTP/1.1", headers) + assert len(buf) == 0 + + # Write compressed chunked data with EOF + await msg.write_eof(b"Final compressed chunk") + + # Should have headers + assert b"POST /data HTTP/1.1\r\n" in buf + + # Should end with chunked EOF marker + assert buf.endswith(b"0\r\n\r\n") + + # Should have chunk size in hex before the compressed data + output = buf + # Verify we have chunk markers - look for \r\n followed by hex digits + # The chunk size should be between the headers and the compressed data + assert b"\r\n\r\n" in output # End of headers + # After headers, we should have a hex chunk size + headers_end = output.find(b"\r\n\r\n") + 4 + chunk_data = output[headers_end:] + # Should start with hex digits followed by \r\n + assert ( + chunk_data[:10] + .strip() + .decode("ascii", errors="ignore") + .replace("\r\n", "") + .isalnum() + ) + + +async def test_compression_different_strategies( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test compression with different strategies.""" + # Test with best speed strategy (default) + msg1 = http.StreamWriter(protocol, loop) + msg1.enable_compression("deflate") # Default strategy + + await msg1.write_headers("POST /fast HTTP/1.1", CIMultiDict()) + await msg1.write_eof(b"Test data for compression test data for compression") + + buf1_len = len(buf) + + # Both should produce output + assert buf1_len > 0 + # Headers should be present + assert b"POST /fast HTTP/1.1\r\n" in buf + + # Since we can't easily test different compression strategies + # (the compressor initialization might not support strategy parameter), + # we just verify that compression works + + +async def test_chunked_headers_single_write_with_set_eof( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that set_eof combines headers and chunked EOF in single write.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_chunking() + + # Write headers - should be buffered + headers = CIMultiDict({"Transfer-Encoding": "chunked", "Host": "example.com"}) + await msg.write_headers("GET /test HTTP/1.1", headers) + assert len(buf) == 0 # Headers not sent yet + assert not transport.writelines.called # type: ignore[attr-defined] # No writelines calls yet + + # Call set_eof - should send headers + chunked EOF in single write call + msg.set_eof() + + # Should have exactly one write call (since payload is small, writelines falls back to write) + assert transport.write.call_count == 1 # type: ignore[attr-defined] + assert transport.writelines.call_count == 0 # type: ignore[attr-defined] # Not called for small payloads + + # The write call should have the combined headers and chunked EOF marker + write_data = transport.write.call_args[0][0] # type: ignore[attr-defined] + assert write_data.startswith(b"GET /test HTTP/1.1\r\n") + assert b"Transfer-Encoding: chunked\r\n" in write_data + assert write_data.endswith(b"\r\n\r\n0\r\n\r\n") # Headers end + chunked EOF + + # Verify final output + assert b"GET /test HTTP/1.1\r\n" in buf + assert b"Transfer-Encoding: chunked\r\n" in buf + assert buf.endswith(b"0\r\n\r\n") + + +async def test_send_headers_forces_header_write( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that send_headers() forces writing buffered headers.""" + msg = http.StreamWriter(protocol, loop) + headers = CIMultiDict({"Content-Length": "10", "Host": "example.com"}) + + # Write headers (should be buffered) + await msg.write_headers("GET /test HTTP/1.1", headers) + assert len(buf) == 0 # Headers buffered + + # Force send headers + msg.send_headers() + + # Headers should now be written + assert b"GET /test HTTP/1.1\r\n" in buf + assert b"Content-Length: 10\r\n" in buf + assert b"Host: example.com\r\n" in buf + + # Writing body should not resend headers + buf.clear() + await msg.write(b"0123456789") + assert b"GET /test" not in buf # Headers not repeated + assert buf == b"0123456789" # Just the body + + +async def test_send_headers_idempotent( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that send_headers() is idempotent and safe to call multiple times.""" + msg = http.StreamWriter(protocol, loop) + headers = CIMultiDict({"Content-Length": "5", "Host": "example.com"}) + + # Write headers (should be buffered) + await msg.write_headers("GET /test HTTP/1.1", headers) + assert len(buf) == 0 # Headers buffered + + # Force send headers + msg.send_headers() + headers_output = bytes(buf) + + # Call send_headers again - should be no-op + msg.send_headers() + assert buf == headers_output # No additional output + + # Call send_headers after headers already sent - should be no-op + await msg.write(b"hello") + msg.send_headers() + assert buf[len(headers_output) :] == b"hello" # Only body added + + +async def test_send_headers_no_buffered_headers( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that send_headers() is safe when no headers are buffered.""" + msg = http.StreamWriter(protocol, loop) + + # Call send_headers without writing headers first + msg.send_headers() # Should not crash + assert len(buf) == 0 # No output + + +async def test_write_drain_condition_with_small_buffer( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that drain is not called when buffer_size <= LIMIT.""" + msg = http.StreamWriter(protocol, loop) + + # Write headers first + await msg.write_headers("GET /test HTTP/1.1", CIMultiDict()) + msg.send_headers() # Send headers to start with clean state + + # Reset buffer size manually since send_headers doesn't do it + msg.buffer_size = 0 + + # Reset drain helper mock + protocol._drain_helper.reset_mock() # type: ignore[attr-defined] + + # Write small amount of data with drain=True but buffer under limit + small_data = b"x" * 100 # Much less than LIMIT (2**16) + await msg.write(small_data, drain=True) + + # Drain should NOT be called because buffer_size <= LIMIT + assert not protocol._drain_helper.called # type: ignore[attr-defined] + assert msg.buffer_size == 100 + assert small_data in buf + + +async def test_write_drain_condition_with_large_buffer( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that drain is called only when drain=True AND buffer_size > LIMIT.""" + msg = http.StreamWriter(protocol, loop) + + # Write headers first + await msg.write_headers("GET /test HTTP/1.1", CIMultiDict()) + msg.send_headers() # Send headers to start with clean state + + # Reset buffer size manually since send_headers doesn't do it + msg.buffer_size = 0 + + # Reset drain helper mock + protocol._drain_helper.reset_mock() # type: ignore[attr-defined] + + # Write large amount of data with drain=True + large_data = b"x" * (2**16 + 1) # Just over LIMIT + await msg.write(large_data, drain=True) + + # Drain should be called because drain=True AND buffer_size > LIMIT + assert protocol._drain_helper.called # type: ignore[attr-defined] + assert msg.buffer_size == 0 # Buffer reset after drain + assert large_data in buf + + +async def test_write_no_drain_with_large_buffer( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that drain is not called when drain=False even with large buffer.""" + msg = http.StreamWriter(protocol, loop) + + # Write headers first + await msg.write_headers("GET /test HTTP/1.1", CIMultiDict()) + msg.send_headers() # Send headers to start with clean state + + # Reset buffer size manually since send_headers doesn't do it + msg.buffer_size = 0 + + # Reset drain helper mock + protocol._drain_helper.reset_mock() # type: ignore[attr-defined] + + # Write large amount of data with drain=False + large_data = b"x" * (2**16 + 1) # Just over LIMIT + await msg.write(large_data, drain=False) + + # Drain should NOT be called because drain=False + assert not protocol._drain_helper.called # type: ignore[attr-defined] + assert msg.buffer_size == (2**16 + 1) # Buffer not reset + assert large_data in buf + + +async def test_set_eof_idempotent( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that set_eof() is idempotent and can be called multiple times safely.""" + msg = http.StreamWriter(protocol, loop) + + # Test 1: Multiple set_eof calls with buffered headers + headers = CIMultiDict({"Content-Length": "0"}) + await msg.write_headers("GET /test HTTP/1.1", headers) + + # First set_eof should send headers + msg.set_eof() + first_output = buf + assert b"GET /test HTTP/1.1\r\n" in first_output + assert b"Content-Length: 0\r\n" in first_output + + # Second set_eof should be no-op + msg.set_eof() + assert bytes(buf) == first_output # No additional output + + # Third set_eof should also be no-op + msg.set_eof() + assert bytes(buf) == first_output # Still no additional output + + # Test 2: set_eof with chunked encoding + buf.clear() + msg2 = http.StreamWriter(protocol, loop) + msg2.enable_chunking() + + headers2 = CIMultiDict({"Transfer-Encoding": "chunked"}) + await msg2.write_headers("POST /data HTTP/1.1", headers2) + + # First set_eof should send headers + chunked EOF + msg2.set_eof() + chunked_output = buf + assert b"POST /data HTTP/1.1\r\n" in buf + assert b"Transfer-Encoding: chunked\r\n" in buf + assert b"0\r\n\r\n" in buf # Chunked EOF marker + + # Second set_eof should be no-op + msg2.set_eof() + assert buf == chunked_output # No additional output + + # Test 3: set_eof after headers already sent + buf.clear() + msg3 = http.StreamWriter(protocol, loop) + + headers3 = CIMultiDict({"Content-Length": "5"}) + await msg3.write_headers("PUT /update HTTP/1.1", headers3) + + # Send headers by writing some data + await msg3.write(b"hello") + headers_and_body = buf + + # set_eof after headers sent should be no-op + msg3.set_eof() + assert buf == headers_and_body # No additional output + + # Another set_eof should still be no-op + msg3.set_eof() + assert buf == headers_and_body # Still no additional output + + +async def test_non_chunked_write_empty_body( + buf: bytearray, + protocol: BaseProtocol, + transport: mock.Mock, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test non-chunked response with empty body.""" + msg = http.StreamWriter(protocol, loop) + + # Non-chunked response with Content-Length: 0 + headers = CIMultiDict({"Content-Length": "0"}) + await msg.write_headers("GET /empty HTTP/1.1", headers) + + # Write empty body + await msg.write(b"") + + # Check the output + assert b"GET /empty HTTP/1.1\r\n" in buf + assert b"Content-Length: 0\r\n" in buf + + +async def test_chunked_headers_sent_with_empty_chunk_not_eof( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test chunked encoding where headers are sent without data and not EOF.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_chunking() + + headers = CIMultiDict({"Transfer-Encoding": "chunked"}) + await msg.write_headers("POST /upload HTTP/1.1", headers) + + # This should trigger the else case in _send_headers_with_payload + # by having no chunk data and is_eof=False + await msg.write(b"") + + # Headers should be sent alone + assert b"POST /upload HTTP/1.1\r\n" in buf + assert b"Transfer-Encoding: chunked\r\n" in buf + # Should not have any chunk markers yet + assert b"0\r\n" not in buf + + +async def test_chunked_set_eof_after_headers_sent( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test chunked encoding where set_eof is called after headers already sent.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_chunking() + + headers = CIMultiDict({"Transfer-Encoding": "chunked"}) + await msg.write_headers("POST /data HTTP/1.1", headers) + + # Send headers by writing some data + await msg.write(b"test data") + buf.clear() # Clear buffer to check only what set_eof writes + + # This should trigger writing chunked EOF when headers already sent + msg.set_eof() + + # Should only have the chunked EOF marker + assert buf == b"0\r\n\r\n" + + +@pytest.mark.usefixtures("enable_writelines") +@pytest.mark.usefixtures("force_writelines_small_payloads") +async def test_write_eof_chunked_with_data_using_writelines( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test write_eof with chunked data that uses writelines (line 336).""" + msg = http.StreamWriter(protocol, loop) + msg.enable_chunking() + + headers = CIMultiDict({"Transfer-Encoding": "chunked"}) + await msg.write_headers("POST /data HTTP/1.1", headers) + + # Send headers first + await msg.write(b"initial") + transport.writelines.reset_mock() # type: ignore[attr-defined] + + # This should trigger writelines for final chunk with EOF + await msg.write_eof(b"final chunk data") + + # Should have used writelines + assert transport.writelines.called # type: ignore[attr-defined] + # Get the data from writelines call + writelines_data = transport.writelines.call_args[0][0] # type: ignore[attr-defined] + combined = b"".join(writelines_data) + + # Should have chunk size, data, and EOF marker + assert b"10\r\n" in combined # hex for 16 (length of "final chunk data") + assert b"final chunk data" in combined + assert b"0\r\n\r\n" in combined + + +async def test_send_headers_with_payload_chunked_eof_no_data( + buf: bytearray, + protocol: BaseProtocol, + transport: asyncio.Transport, + loop: asyncio.AbstractEventLoop, +) -> None: + """Test _send_headers_with_payload with chunked, is_eof=True but no chunk data.""" + msg = http.StreamWriter(protocol, loop) + msg.enable_chunking() + + headers = CIMultiDict({"Transfer-Encoding": "chunked"}) + await msg.write_headers("GET /test HTTP/1.1", headers) + + # This triggers the elif is_eof branch in _send_headers_with_payload + # by calling write_eof with empty chunk + await msg.write_eof(b"") + + # Should have headers and chunked EOF marker together + assert b"GET /test HTTP/1.1\r\n" in buf + assert b"Transfer-Encoding: chunked\r\n" in buf + assert buf.endswith(b"0\r\n\r\n") diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 7b048970967..c07bf671d8c 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1540,3 +1540,46 @@ async def test_passing_cimultidict_to_web_response_not_mutated( await resp.prepare(req) assert resp.content_length == 6 assert not headers + + +async def test_stream_response_sends_headers_immediately() -> None: + """Test that StreamResponse sends headers immediately.""" + writer = mock.create_autospec(StreamWriter, spec_set=True) + writer.write_headers = mock.AsyncMock() + writer.send_headers = mock.Mock() + writer.write_eof = mock.AsyncMock() + + req = make_request("GET", "/", writer=writer) + resp = StreamResponse() + + # StreamResponse should have _send_headers_immediately = True + assert resp._send_headers_immediately is True + + # Prepare the response + await resp.prepare(req) + + # Headers should be sent immediately + writer.send_headers.assert_called_once() + + +async def test_response_buffers_headers() -> None: + """Test that Response buffers headers for packet coalescing.""" + writer = mock.create_autospec(StreamWriter, spec_set=True) + writer.write_headers = mock.AsyncMock() + writer.send_headers = mock.Mock() + writer.write_eof = mock.AsyncMock() + + req = make_request("GET", "/", writer=writer) + resp = Response(body=b"hello") + + # Response should have _send_headers_immediately = False + assert resp._send_headers_immediately is False + + # Prepare the response + await resp.prepare(req) + + # Headers should NOT be sent immediately + writer.send_headers.assert_not_called() + + # But write_headers should have been called + writer.write_headers.assert_called_once() diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 1776a3aabd3..61c3b49834f 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -3,6 +3,7 @@ from unittest import mock from aiohttp import hdrs +from aiohttp.http_writer import StreamWriter from aiohttp.test_utils import make_mocked_request from aiohttp.web_fileresponse import FileResponse @@ -125,3 +126,32 @@ def test_status_controlled_by_user(loop) -> None: loop.run_until_complete(file_sender.prepare(request)) assert file_sender._status == 203 + + +async def test_file_response_sends_headers_immediately() -> None: + """Test that FileResponse sends headers immediately (inherits from StreamResponse).""" + writer = mock.create_autospec(StreamWriter, spec_set=True) + writer.write_headers = mock.AsyncMock() + writer.send_headers = mock.Mock() + writer.write_eof = mock.AsyncMock() + + request = make_mocked_request("GET", "http://python.org/logo.png", writer=writer) + + filepath = mock.create_autospec(Path, spec_set=True) + filepath.name = "logo.png" + filepath.stat.return_value.st_size = 1024 + filepath.stat.return_value.st_mtime_ns = 1603733507222449291 + filepath.stat.return_value.st_mode = MOCK_MODE + + file_sender = FileResponse(filepath) + file_sender._path = filepath + file_sender._sendfile = mock.AsyncMock(return_value=None) # type: ignore[method-assign] + + # FileResponse inherits from StreamResponse, so should send immediately + assert file_sender._send_headers_immediately is True + + # Prepare the response + await file_sender.prepare(request) + + # Headers should be sent immediately + writer.send_headers.assert_called_once() diff --git a/tests/test_web_server.py b/tests/test_web_server.py index d2f1341afe0..09b7d0bc71b 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -261,9 +261,8 @@ async def handler(request): server = await aiohttp_raw_server(handler, logger=logger) cli = await aiohttp_client(server) - resp = await cli.get("/path/to") - with pytest.raises(client.ClientPayloadError): - await resp.read() + with pytest.raises(client.ServerDisconnectedError): + await cli.get("/path/to") logger.debug.assert_called_with("Ignored premature client disconnection") From 1b5b0d9f0d825c68de0f75bc786cd0af14c99a77 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 24 May 2025 12:38:09 -0500 Subject: [PATCH 1447/1511] Release 3.12.0rc1 (#10993) --- CHANGES.rst | 276 ++++++++++++++++++++++++++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 277 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 3ea3455294d..176dcf88179 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,282 @@ .. towncrier release notes start +3.12.0rc1 (2025-05-24) +====================== + +Bug fixes +--------- + +- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`6009`, :issue:`10988`. + + + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed connection reuse for file-like data payloads by ensuring buffer + truncation respects content-length boundaries and preventing premature + connection closure race -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10951`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`, :issue:`10923`, :issue:`10946`. + + + +- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`10972`. + + + +- Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`. + + This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding. + + When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads. + + This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests. + + + *Related issues and pull requests on GitHub:* + :issue:`10991`. + + + + +Improved documentation +---------------------- + +- Improved documentation for middleware by adding warnings and examples about + request body stream consumption. The documentation now clearly explains that + request body streams can only be read once and provides best practices for + sharing parsed request data between middleware and handlers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2914`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + + This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that + can be used to build aiohttp against the system install of the ``llhttp`` library rather + than the vendored one. + + + *Related issues and pull requests on GitHub:* + :issue:`10759`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. + + + *Related issues and pull requests on GitHub:* + :issue:`10922`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.12.0rc0 (2025-05-23) ====================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 0de2fb48b1b..fdad4aac495 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0rc0" +__version__ = "3.12.0rc1" from typing import TYPE_CHECKING, Tuple From cfe3d219d0016571a2fe239a24da1421a1f947da Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 24 May 2025 16:38:56 -0500 Subject: [PATCH 1448/1511] [PR #10978/df30c55 backport][3.12] Cookbook changes (#10995) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .mypy.ini | 2 +- docs/client_advanced.rst | 195 ++----------- docs/client_middleware_cookbook.rst | 351 ++++-------------------- docs/client_reference.rst | 127 +++++++++ docs/code/client_middleware_cookbook.py | 143 ++++++++++ docs/conf.py | 3 +- docs/spelling_wordlist.txt | 1 + setup.cfg | 1 + 8 files changed, 363 insertions(+), 460 deletions(-) create mode 100644 docs/code/client_middleware_cookbook.py diff --git a/.mypy.ini b/.mypy.ini index 78001c36e8f..e91bd30d58f 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -1,5 +1,5 @@ [mypy] -files = aiohttp, examples +files = aiohttp, docs/code, examples check_untyped_defs = True follow_imports_for_stubs = True #disallow_any_decorated = True diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 5a94e68ec1f..18c274ca7f5 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -124,29 +124,33 @@ Client Middleware ----------------- The client supports middleware to intercept requests and responses. This can be -useful for authentication, logging, request/response modification, and retries. +useful for authentication, logging, request/response modification, retries etc. -For practical examples and common middleware patterns, see the :ref:`aiohttp-client-middleware-cookbook`. +For more examples and common middleware patterns, see the :ref:`aiohttp-client-middleware-cookbook`. -Creating Middleware -^^^^^^^^^^^^^^^^^^^ +Creating a middleware +^^^^^^^^^^^^^^^^^^^^^ -To create a middleware, define an async function (or callable class) that accepts a request -and a handler function, and returns a response. Middleware must follow the -:type:`ClientMiddlewareType` signature (see :ref:`aiohttp-client-reference` for details). +To create a middleware, define an async function (or callable class) that accepts a request object +and a handler function, and returns a response. Middlewares must follow the +:type:`ClientMiddlewareType` signature:: -Using Middleware -^^^^^^^^^^^^^^^^ + async def auth_middleware(req: ClientRequest, handler: ClientHandlerType) -> ClientResponse: + req.headers["Authorization"] = get_auth_header() + return await handler(req) + +Using Middlewares +^^^^^^^^^^^^^^^^^ -You can apply middleware to a client session or to individual requests:: +You can apply middlewares to a client session or to individual requests:: # Apply to all requests in a session async with ClientSession(middlewares=(my_middleware,)) as session: - resp = await session.get('http://example.com') + resp = await session.get("http://example.com") # Apply to a specific request async with ClientSession() as session: - resp = await session.get('http://example.com', middlewares=(my_middleware,)) + resp = await session.get("http://example.com", middlewares=(my_middleware,)) Middleware Chaining ^^^^^^^^^^^^^^^^^^^ @@ -155,13 +159,14 @@ Multiple middlewares are applied in the order they are listed:: # Middlewares are applied in order: logging -> auth -> request async with ClientSession(middlewares=(logging_middleware, auth_middleware)) as session: - resp = await session.get('http://example.com') + async with session.get("http://example.com") as resp: + ... -A key aspect to understand about the flat middleware structure is that the execution flow follows this pattern: +A key aspect to understand about the middleware sequence is that the execution flow follows this pattern: 1. The first middleware in the list is called first and executes its code before calling the handler -2. The handler is the next middleware in the chain (or the actual request handler if there are no more middleware) -3. When the handler returns a response, execution continues in the first middleware after the handler call +2. The handler is the next middleware in the chain (or the request handler if there are no more middlewares) +3. When the handler returns a response, execution continues from the last middleware right after the handler call 4. This creates a nested "onion-like" pattern for execution For example, with ``middlewares=(middleware1, middleware2)``, the execution order would be: @@ -172,7 +177,12 @@ For example, with ``middlewares=(middleware1, middleware2)``, the execution orde 4. Exit ``middleware2`` (post-response code) 5. Exit ``middleware1`` (post-response code) -This flat structure means that middleware is applied on each retry attempt inside the client's retry loop, not just once before all retries. This allows middleware to modify requests freshly on each retry attempt. +This flat structure means that a middleware is applied on each retry attempt inside the client's retry loop, +not just once before all retries. This allows middleware to modify requests freshly on each retry attempt. + +For example, if we had a retry middleware and a logging middleware, and we want every retried request to be +logged separately, then we'd need to specify ``middlewares=(retry_mw, logging_mw)``. If we reversed the order +to ``middlewares=(logging_mw, retry_mw)``, then we'd only log once regardless of how many retries are done. .. note:: @@ -181,157 +191,6 @@ This flat structure means that middleware is applied on each retry attempt insid like adding static headers, you can often use request parameters (e.g., ``headers``) or session configuration instead. -Common Middleware Patterns -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. _client-middleware-retry: - -Authentication and Retry -"""""""""""""""""""""""" - -There are two recommended approaches for implementing retry logic: - -1. **For Loop Pattern (Simple Cases)** - - Use a bounded ``for`` loop when the number of retry attempts is known and fixed:: - - import hashlib - from aiohttp import ClientSession, ClientRequest, ClientResponse, ClientHandlerType - - async def auth_retry_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - # Try up to 3 authentication methods - for attempt in range(3): - if attempt == 0: - # First attempt: use API key - request.headers["X-API-Key"] = "my-api-key" - elif attempt == 1: - # Second attempt: use Bearer token - request.headers["Authorization"] = "Bearer fallback-token" - else: - # Third attempt: use hash-based signature - secret_key = "my-secret-key" - url_path = str(request.url.path) - signature = hashlib.sha256(f"{url_path}{secret_key}".encode()).hexdigest() - request.headers["X-Signature"] = signature - - # Send the request - response = await handler(request) - - # If successful or not an auth error, return immediately - if response.status != 401: - return response - - # Return the last response if all retries are exhausted - return response - -2. **While Loop Pattern (Complex Cases)** - - For more complex scenarios, use a ``while`` loop with strict exit conditions:: - - import logging - - _LOGGER = logging.getLogger(__name__) - - class RetryMiddleware: - def __init__(self, max_retries: int = 3): - self.max_retries = max_retries - - async def __call__( - self, - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - retry_count = 0 - - # Always have clear exit conditions - while retry_count <= self.max_retries: - # Send the request - response = await handler(request) - - # Exit conditions - if 200 <= response.status < 400 or retry_count >= self.max_retries: - return response - - # Retry logic for different status codes - if response.status in (401, 429, 500, 502, 503, 504): - retry_count += 1 - _LOGGER.debug(f"Retrying request (attempt {retry_count}/{self.max_retries})") - continue - - # For any other status code, don't retry - return response - - # Safety return (should never reach here) - return response - -Request Modification -"""""""""""""""""""" - -Modify request properties based on request content:: - - async def content_type_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - # Examine URL path to determine content-type - if request.url.path.endswith('.json'): - request.headers['Content-Type'] = 'application/json' - elif request.url.path.endswith('.xml'): - request.headers['Content-Type'] = 'application/xml' - - # Add custom headers based on HTTP method - if request.method == 'POST': - request.headers['X-Request-ID'] = f"post-{id(request)}" - - return await handler(request) - -Avoiding Infinite Recursion -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. warning:: - - Using the same session from within middleware can cause infinite recursion if - the middleware makes HTTP requests using the same session that has the middleware - applied. This is especially risky in token refresh middleware or retry logic. - - When implementing retry or refresh logic, always use bounded loops - (e.g., ``for _ in range(2):`` instead of ``while True:``) to prevent infinite recursion. - -To avoid recursion when making requests inside middleware, use one of these approaches: - -**Option 1:** Disable middleware for internal requests:: - - async def log_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - async with request.session.post( - "https://logapi.example/log", - json={"url": str(request.url)}, - middlewares=() # This prevents infinite recursion - ) as resp: - pass - - return await handler(request) - -**Option 2:** Check request details to avoid recursive application:: - - async def log_middleware( - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - if request.url.host != "logapi.example": # Avoid infinite recursion - async with request.session.post( - "https://logapi.example/log", - json={"url": str(request.url)} - ) as resp: - pass - - return await handler(request) - Custom Cookies -------------- diff --git a/docs/client_middleware_cookbook.rst b/docs/client_middleware_cookbook.rst index 4b8d6ddd5f8..33994160fba 100644 --- a/docs/client_middleware_cookbook.rst +++ b/docs/client_middleware_cookbook.rst @@ -5,331 +5,102 @@ Client Middleware Cookbook ========================== -This cookbook provides practical examples of implementing client middleware for common use cases. +This cookbook provides examples of how client middlewares can be used for common use cases. -.. note:: +Simple Retry Middleware +----------------------- - All examples in this cookbook are also available as complete, runnable scripts in the - ``examples/`` directory of the aiohttp repository. Look for files named ``*_middleware.py``. +It's very easy to create middlewares that can retry a connection on a given condition: -.. _cookbook-basic-auth-middleware: +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: retry_middleware -Basic Authentication Middleware -------------------------------- +.. warning:: -Basic authentication is a simple authentication scheme built into the HTTP protocol. -Here's a middleware that automatically adds Basic Auth headers to all requests: + It is recommended to ensure loops are bounded (e.g. using a ``for`` loop) to avoid + creating an infinite loop. -.. code-block:: python +Logging to an external service +------------------------------ - import base64 - from aiohttp import ClientRequest, ClientResponse, ClientHandlerType, hdrs +If we needed to log our requests via an API call to an external server or similar, we could +create a simple middleware like this: - class BasicAuthMiddleware: - """Middleware that adds Basic Authentication to all requests.""" +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: api_logging_middleware - def __init__(self, username: str, password: str) -> None: - self.username = username - self.password = password - self._auth_header = self._encode_credentials() +.. warning:: - def _encode_credentials(self) -> str: - """Encode username and password to base64.""" - credentials = f"{self.username}:{self.password}" - encoded = base64.b64encode(credentials.encode()).decode() - return f"Basic {encoded}" + Using the same session from within a middleware can cause infinite recursion if + that request gets processed again by the middleware. - async def __call__( - self, - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - """Add Basic Auth header to the request.""" - # Only add auth if not already present - if hdrs.AUTHORIZATION not in request.headers: - request.headers[hdrs.AUTHORIZATION] = self._auth_header + To avoid such recursion a middleware should typically make requests with + ``middlewares=()`` or else contain some condition to stop the request triggering + the same logic when it is processed again by the middleware (e.g by whitelisting + the API domain of the request). - # Proceed with the request - return await handler(request) +Token Refresh Middleware +------------------------ -Usage example: +If you need to refresh access tokens to continue accessing an API, this is also a good +candidate for a middleware. For example, you could check for a 401 response, then +refresh the token and retry: -.. code-block:: python +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: TokenRefresh401Middleware - import aiohttp - import asyncio - import logging +If you have an expiry time for the token, you could refresh at the expiry time, to avoid the +failed request: - _LOGGER = logging.getLogger(__name__) +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: TokenRefreshExpiryMiddleware - async def main(): - # Create middleware instance - auth_middleware = BasicAuthMiddleware("user", "pass") +Or you could even refresh preemptively in a background task to avoid any API delays. This is probably more +efficient to implement without a middleware: - # Use middleware in session - async with aiohttp.ClientSession(middlewares=(auth_middleware,)) as session: - async with session.get("https://httpbin.org/basic-auth/user/pass") as resp: - _LOGGER.debug("Status: %s", resp.status) - data = await resp.json() - _LOGGER.debug("Response: %s", data) +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: token_refresh_preemptively_example - asyncio.run(main()) +Or combine the above approaches to create a more robust solution. -.. _cookbook-retry-middleware: +.. note:: -Simple Retry Middleware ------------------------ + These can also be adjusted to handle proxy auth by modifying + :attr:`ClientRequest.proxy_headers`. -A retry middleware that automatically retries failed requests with exponential backoff: - -.. code-block:: python - - import asyncio - import logging - from http import HTTPStatus - from typing import Union, Set - from aiohttp import ClientRequest, ClientResponse, ClientHandlerType - - _LOGGER = logging.getLogger(__name__) - - DEFAULT_RETRY_STATUSES = { - HTTPStatus.TOO_MANY_REQUESTS, - HTTPStatus.INTERNAL_SERVER_ERROR, - HTTPStatus.BAD_GATEWAY, - HTTPStatus.SERVICE_UNAVAILABLE, - HTTPStatus.GATEWAY_TIMEOUT - } - - class RetryMiddleware: - """Middleware that retries failed requests with exponential backoff.""" - - def __init__( - self, - max_retries: int = 3, - retry_statuses: Union[Set[int], None] = None, - initial_delay: float = 1.0, - backoff_factor: float = 2.0 - ) -> None: - self.max_retries = max_retries - self.retry_statuses = retry_statuses or DEFAULT_RETRY_STATUSES - self.initial_delay = initial_delay - self.backoff_factor = backoff_factor - - async def __call__( - self, - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - """Execute request with retry logic.""" - last_response = None - delay = self.initial_delay - - for attempt in range(self.max_retries + 1): - if attempt > 0: - _LOGGER.info( - "Retrying request to %s (attempt %s/%s)", - request.url, - attempt + 1, - self.max_retries + 1 - ) - - # Execute the request - response = await handler(request) - last_response = response - - # Check if we should retry - if response.status not in self.retry_statuses: - return response - - # Don't retry if we've exhausted attempts - if attempt >= self.max_retries: - _LOGGER.warning( - "Max retries (%s) exceeded for %s", - self.max_retries, - request.url - ) - return response - - # Wait before retrying - _LOGGER.debug("Waiting %ss before retry...", delay) - await asyncio.sleep(delay) - delay *= self.backoff_factor - - # Return the last response - return last_response - -Usage example: - -.. code-block:: python - - import aiohttp - import asyncio - import logging - from http import HTTPStatus - - _LOGGER = logging.getLogger(__name__) - - RETRY_STATUSES = { - HTTPStatus.TOO_MANY_REQUESTS, - HTTPStatus.INTERNAL_SERVER_ERROR, - HTTPStatus.BAD_GATEWAY, - HTTPStatus.SERVICE_UNAVAILABLE, - HTTPStatus.GATEWAY_TIMEOUT - } - - async def main(): - # Create retry middleware with custom settings - retry_middleware = RetryMiddleware( - max_retries=3, - retry_statuses=RETRY_STATUSES, - initial_delay=0.5, - backoff_factor=2.0 - ) - - async with aiohttp.ClientSession(middlewares=(retry_middleware,)) as session: - # This will automatically retry on server errors - async with session.get("https://httpbin.org/status/500") as resp: - _LOGGER.debug("Final status: %s", resp.status) - - asyncio.run(main()) - -.. _cookbook-combining-middleware: - -Combining Multiple Middleware ------------------------------ - -You can combine multiple middleware to create powerful request pipelines: - -.. code-block:: python - - import time - import logging - from aiohttp import ClientRequest, ClientResponse, ClientHandlerType - - _LOGGER = logging.getLogger(__name__) - - class LoggingMiddleware: - """Middleware that logs request timing and response status.""" - - async def __call__( - self, - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - start_time = time.monotonic() - - # Log request - _LOGGER.debug("[REQUEST] %s %s", request.method, request.url) - - # Execute request - response = await handler(request) +Server-side Request Forgery Protection +-------------------------------------- - # Log response - duration = time.monotonic() - start_time - _LOGGER.debug("[RESPONSE] %s in %.2fs", response.status, duration) +To provide protection against server-side request forgery, we could blacklist any internal +IPs or domains. We could create a middleware that rejects requests made to a blacklist: - return response +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: ssrf_middleware - # Combine multiple middleware - async def main(): - # Middleware are applied in order: logging -> auth -> retry -> request - logging_middleware = LoggingMiddleware() - auth_middleware = BasicAuthMiddleware("user", "pass") - retry_middleware = RetryMiddleware(max_retries=2) +.. warning:: - async with aiohttp.ClientSession( - middlewares=(logging_middleware, auth_middleware, retry_middleware) - ) as session: - async with session.get("https://httpbin.org/basic-auth/user/pass") as resp: - text = await resp.text() - _LOGGER.debug("Response text: %s", text) + The above example is simplified for demonstration purposes. A production-ready + implementation should also check IPv6 addresses (``::1``), private IP ranges, + link-local addresses, and other internal hostnames. Consider using a well-tested + library for SSRF protection in production environments. -.. _cookbook-token-refresh-middleware: +If you know that your services correctly reject requests with an incorrect `Host` header, then +that may provide sufficient protection. Otherwise, we still have a concern with an attacker's +own domain resolving to a blacklisted IP. To provide complete protection, we can also +create a custom resolver: -Token Refresh Middleware ------------------------- +.. literalinclude:: code/client_middleware_cookbook.py + :pyobject: SSRFConnector + +Using both of these together in a session should provide full SSRF protection. -A more advanced example showing JWT token refresh: - -.. code-block:: python - - import asyncio - import time - from http import HTTPStatus - from typing import Union - from aiohttp import ClientRequest, ClientResponse, ClientHandlerType, hdrs - - class TokenRefreshMiddleware: - """Middleware that handles JWT token refresh automatically.""" - - def __init__(self, token_endpoint: str, refresh_token: str) -> None: - self.token_endpoint = token_endpoint - self.refresh_token = refresh_token - self.access_token: Union[str, None] = None - self.token_expires_at: Union[float, None] = None - self._refresh_lock = asyncio.Lock() - - async def _refresh_access_token(self, session) -> str: - """Refresh the access token using the refresh token.""" - async with self._refresh_lock: - # Check if another coroutine already refreshed the token - if self.token_expires_at and time.time() < self.token_expires_at: - return self.access_token - - # Make refresh request without middleware to avoid recursion - async with session.post( - self.token_endpoint, - json={"refresh_token": self.refresh_token}, - middlewares=() # Disable middleware for this request - ) as resp: - resp.raise_for_status() - data = await resp.json() - - if "access_token" not in data: - raise ValueError("No access_token in refresh response") - - self.access_token = data["access_token"] - # Token expires in 1 hour for demo, refresh 5 min early - expires_in = data.get("expires_in", 3600) - self.token_expires_at = time.time() + expires_in - 300 - return self.access_token - - async def __call__( - self, - request: ClientRequest, - handler: ClientHandlerType - ) -> ClientResponse: - """Add auth token to request, refreshing if needed.""" - # Skip token for refresh endpoint - if str(request.url).endswith('/token/refresh'): - return await handler(request) - - # Refresh token if needed - if not self.access_token or ( - self.token_expires_at and time.time() >= self.token_expires_at - ): - await self._refresh_access_token(request.session) - - # Add token to request - request.headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}" - - # Execute request - response = await handler(request) - - # If we get 401, try refreshing token once - if response.status == HTTPStatus.UNAUTHORIZED: - await self._refresh_access_token(request.session) - request.headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}" - response = await handler(request) - - return response Best Practices -------------- 1. **Keep middleware focused**: Each middleware should have a single responsibility. -2. **Order matters**: Middleware execute in the order they're listed. Place logging first, +2. **Order matters**: Middlewares execute in the order they're listed. Place logging first, authentication before retry, etc. 3. **Avoid infinite recursion**: When making HTTP requests inside middleware, either: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index fa0a50425af..606df6acc0a 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1864,6 +1864,133 @@ manually. :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. :raise ValueError: if message is not valid JSON. +ClientRequest +------------- + +.. class:: ClientRequest + + Represents an HTTP request to be sent by the client. + + This object encapsulates all the details of an HTTP request before it is sent. + It is primarily used within client middleware to inspect or modify requests. + + .. note:: + + You typically don't create ``ClientRequest`` instances directly. They are + created internally by :class:`ClientSession` methods and passed to middleware. + + For more information about using middleware, see :ref:`aiohttp-client-middleware`. + + .. attribute:: body + :type: Payload | FormData + + The request body payload. This can be: + + - A :class:`Payload` object for raw data (default is empty bytes ``b""``) + - A :class:`FormData` object for form submissions + + .. attribute:: chunked + :type: bool | None + + Whether to use chunked transfer encoding: + + - ``True``: Use chunked encoding + - ``False``: Don't use chunked encoding + - ``None``: Automatically determine based on body + + .. attribute:: compress + :type: str | None + + The compression encoding for the request body. Common values include + ``'gzip'`` and ``'deflate'``, but any string value is technically allowed. + ``None`` means no compression. + + .. attribute:: headers + :type: multidict.CIMultiDict + + The HTTP headers that will be sent with the request. This is a case-insensitive + multidict that can be modified by middleware. + + .. code-block:: python + + # Add or modify headers + request.headers['X-Custom-Header'] = 'value' + request.headers['User-Agent'] = 'MyApp/1.0' + + .. attribute:: is_ssl + :type: bool + + ``True`` if the request uses a secure scheme (e.g., HTTPS, WSS), ``False`` otherwise. + + .. attribute:: method + :type: str + + The HTTP method of the request (e.g., ``'GET'``, ``'POST'``, ``'PUT'``, etc.). + + .. attribute:: original_url + :type: yarl.URL + + The original URL passed to the request method, including any fragment. + This preserves the exact URL as provided by the user. + + .. attribute:: proxy + :type: yarl.URL | None + + The proxy URL if the request will be sent through a proxy, ``None`` otherwise. + + .. attribute:: proxy_headers + :type: multidict.CIMultiDict | None + + Headers to be sent to the proxy server (e.g., ``Proxy-Authorization``). + Only set when :attr:`proxy` is not ``None``. + + .. attribute:: response_class + :type: type[ClientResponse] + + The class to use for creating the response object. Defaults to + :class:`ClientResponse` but can be customized for special handling. + + .. attribute:: server_hostname + :type: str | None + + Override the hostname for SSL certificate verification. Useful when + connecting through proxies or to IP addresses. + + .. attribute:: session + :type: ClientSession + + The client session that created this request. Useful for accessing + session-level configuration or making additional requests within middleware. + + .. warning:: + Be careful when making requests with the same session inside middleware + to avoid infinite recursion. Use ``middlewares=()`` parameter when needed. + + .. attribute:: ssl + :type: ssl.SSLContext | bool | Fingerprint + + SSL validation configuration for this request: + + - ``True``: Use default SSL verification + - ``False``: Skip SSL verification + - :class:`ssl.SSLContext`: Custom SSL context + - :class:`Fingerprint`: Verify specific certificate fingerprint + + .. attribute:: url + :type: yarl.URL + + The target URL of the request with the fragment (``#...``) part stripped. + This is the actual URL that will be used for the connection. + + .. note:: + To access the original URL with fragment, use :attr:`original_url`. + + .. attribute:: version + :type: HttpVersion + + The HTTP version to use for the request (e.g., ``HttpVersion(1, 1)`` for HTTP/1.1). + + Utilities --------- diff --git a/docs/code/client_middleware_cookbook.py b/docs/code/client_middleware_cookbook.py new file mode 100644 index 00000000000..5bd84c68ac7 --- /dev/null +++ b/docs/code/client_middleware_cookbook.py @@ -0,0 +1,143 @@ +"""This is a collection of semi-complete examples that get included into the cookbook page.""" + +import asyncio +import logging +import time +from collections.abc import AsyncIterator, Sequence +from contextlib import asynccontextmanager, suppress + +from aiohttp import ( + ClientError, + ClientHandlerType, + ClientRequest, + ClientResponse, + ClientSession, + TCPConnector, +) +from aiohttp.abc import ResolveResult +from aiohttp.tracing import Trace + + +class SSRFError(ClientError): + """A request was made to a blacklisted host.""" + + +async def retry_middleware( + req: ClientRequest, handler: ClientHandlerType +) -> ClientResponse: + for _ in range(3): # Try up to 3 times + resp = await handler(req) + if resp.ok: + return resp + return resp + + +async def api_logging_middleware( + req: ClientRequest, handler: ClientHandlerType +) -> ClientResponse: + # We use middlewares=() to avoid infinite recursion. + async with req.session.post("/log", data=req.url.host, middlewares=()) as resp: + if not resp.ok: + logging.warning("Log endpoint failed") + + return await handler(req) + + +class TokenRefresh401Middleware: + def __init__(self, refresh_token: str, access_token: str): + self.access_token = access_token + self.refresh_token = refresh_token + self.lock = asyncio.Lock() + + async def __call__( + self, req: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + for _ in range(2): # Retry at most one time + token = self.access_token + req.headers["Authorization"] = f"Bearer {token}" + resp = await handler(req) + if resp.status != 401: + return resp + async with self.lock: + if token != self.access_token: # Already refreshed + continue + url = "https://api.example/refresh" + async with req.session.post(url, data=self.refresh_token) as resp: + # Add error handling as needed + data = await resp.json() + self.access_token = data["access_token"] + return resp + + +class TokenRefreshExpiryMiddleware: + def __init__(self, refresh_token: str): + self.access_token = "" + self.expires_at = 0 + self.refresh_token = refresh_token + self.lock = asyncio.Lock() + + async def __call__( + self, req: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + if self.expires_at <= time.time(): + token = self.access_token + async with self.lock: + if token == self.access_token: # Still not refreshed + url = "https://api.example/refresh" + async with req.session.post(url, data=self.refresh_token) as resp: + # Add error handling as needed + data = await resp.json() + self.access_token = data["access_token"] + self.expires_at = data["expires_at"] + + req.headers["Authorization"] = f"Bearer {self.access_token}" + return await handler(req) + + +async def token_refresh_preemptively_example() -> None: + async def set_token(session: ClientSession, event: asyncio.Event) -> None: + while True: + async with session.post("/refresh") as resp: + token = await resp.json() + session.headers["Authorization"] = f"Bearer {token['auth']}" + event.set() + await asyncio.sleep(token["valid_duration"]) + + @asynccontextmanager + async def auto_refresh_client() -> AsyncIterator[ClientSession]: + async with ClientSession() as session: + ready = asyncio.Event() + t = asyncio.create_task(set_token(session, ready)) + await ready.wait() + yield session + t.cancel() + with suppress(asyncio.CancelledError): + await t + + async with auto_refresh_client() as sess: + ... + + +async def ssrf_middleware( + req: ClientRequest, handler: ClientHandlerType +) -> ClientResponse: + # WARNING: This is a simplified example for demonstration purposes only. + # A complete implementation should also check: + # - IPv6 loopback (::1) + # - Private IP ranges (10.x.x.x, 192.168.x.x, 172.16-31.x.x) + # - Link-local addresses (169.254.x.x, fe80::/10) + # - Other internal hostnames and aliases + if req.url.host in {"127.0.0.1", "localhost"}: + raise SSRFError(req.url.host) + return await handler(req) + + +class SSRFConnector(TCPConnector): + async def _resolve_host( + self, host: str, port: int, traces: Sequence[Trace] | None = None + ) -> list[ResolveResult]: + res = await super()._resolve_host(host, port, traces) + # WARNING: This is a simplified example - should also check ::1, private ranges, etc. + if any(r["host"] in {"127.0.0.1"} for r in res): + raise SSRFError() + return res diff --git a/docs/conf.py b/docs/conf.py index 84dadfc8442..a449f223e1d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -397,8 +397,9 @@ ("py:class", "aiohttp.web.RequestHandler"), # undocumented ("py:class", "aiohttp.NamedPipeConnector"), # undocumented ("py:class", "aiohttp.protocol.HttpVersion"), # undocumented - ("py:class", "aiohttp.ClientRequest"), # undocumented + ("py:class", "HttpVersion"), # undocumented ("py:class", "aiohttp.payload.Payload"), # undocumented + ("py:class", "Payload"), # undocumented ("py:class", "aiohttp.resolver.AsyncResolver"), # undocumented ("py:class", "aiohttp.resolver.ThreadedResolver"), # undocumented ("py:func", "aiohttp.ws_connect"), # undocumented diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 3f67df33159..8b389cc11f6 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -145,6 +145,7 @@ HTTPException HttpProcessingError httpretty https +hostname impl incapsulates Indices diff --git a/setup.cfg b/setup.cfg index 23e56d61d00..4adfde579a0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -99,6 +99,7 @@ max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. examples/*:I900 + docs/code/*:F841 # flake8-requirements known-modules = proxy.py:[proxy] From 7a6ee687e945902d55eb08027987ac34c5f02840 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 24 May 2025 16:53:33 -0500 Subject: [PATCH 1449/1511] Release 3.12.0 (#10996) --- CHANGES.rst | 276 ++++++++++++++++++++++++++++++++++++ CHANGES/10074.feature.rst | 2 - CHANGES/10119.bugfix.rst | 1 - CHANGES/10120.feature.rst | 1 - CHANGES/10146.misc.rst | 1 - CHANGES/10325.bugfix.rst | 1 - CHANGES/10433.feature.rst | 1 - CHANGES/10474.feature.rst | 1 - CHANGES/10520.feature.rst | 2 - CHANGES/10662.packaging.rst | 1 - CHANGES/10725.feature.rst | 6 - CHANGES/10759.packaging.rst | 5 - CHANGES/10761.contrib.rst | 1 - CHANGES/10797.feature.rst | 1 - CHANGES/10823.packaging.rst | 3 - CHANGES/10847.feature.rst | 5 - CHANGES/10851.bugfix.rst | 1 - CHANGES/10851.contrib.rst | 2 - CHANGES/10877.packaging.rst | 1 - CHANGES/10902.feature.rst | 1 - CHANGES/10915.bugfix.rst | 3 - CHANGES/10922.contrib.rst | 1 - CHANGES/10923.feature.rst | 1 - CHANGES/10941.bugfix.rst | 1 - CHANGES/10943.bugfix.rst | 1 - CHANGES/10945.feature.rst | 1 - CHANGES/10946.feature.rst | 1 - CHANGES/10951.bugfix.rst | 1 - CHANGES/10952.feature.rst | 1 - CHANGES/10959.feature.rst | 1 - CHANGES/10961.feature.rst | 1 - CHANGES/10962.feature.rst | 1 - CHANGES/10968.feature.rst | 1 - CHANGES/10972.feature.rst | 1 - CHANGES/10988.bugfix.rst | 1 - CHANGES/10991.feature.rst | 7 - CHANGES/2213.feature.rst | 1 - CHANGES/2914.doc.rst | 4 - CHANGES/6009.bugfix.rst | 1 - CHANGES/9705.contrib.rst | 1 - CHANGES/9732.feature.rst | 6 - CHANGES/9798.feature.rst | 5 - CHANGES/9870.misc.rst | 1 - aiohttp/__init__.py | 2 +- 44 files changed, 277 insertions(+), 81 deletions(-) delete mode 100644 CHANGES/10074.feature.rst delete mode 100644 CHANGES/10119.bugfix.rst delete mode 100644 CHANGES/10120.feature.rst delete mode 100644 CHANGES/10146.misc.rst delete mode 120000 CHANGES/10325.bugfix.rst delete mode 100644 CHANGES/10433.feature.rst delete mode 120000 CHANGES/10474.feature.rst delete mode 100644 CHANGES/10520.feature.rst delete mode 100644 CHANGES/10662.packaging.rst delete mode 100644 CHANGES/10725.feature.rst delete mode 100644 CHANGES/10759.packaging.rst delete mode 120000 CHANGES/10761.contrib.rst delete mode 100644 CHANGES/10797.feature.rst delete mode 100644 CHANGES/10823.packaging.rst delete mode 100644 CHANGES/10847.feature.rst delete mode 100644 CHANGES/10851.bugfix.rst delete mode 100644 CHANGES/10851.contrib.rst delete mode 100644 CHANGES/10877.packaging.rst delete mode 120000 CHANGES/10902.feature.rst delete mode 100644 CHANGES/10915.bugfix.rst delete mode 100644 CHANGES/10922.contrib.rst delete mode 120000 CHANGES/10923.feature.rst delete mode 120000 CHANGES/10941.bugfix.rst delete mode 120000 CHANGES/10943.bugfix.rst delete mode 120000 CHANGES/10945.feature.rst delete mode 120000 CHANGES/10946.feature.rst delete mode 100644 CHANGES/10951.bugfix.rst delete mode 120000 CHANGES/10952.feature.rst delete mode 120000 CHANGES/10959.feature.rst delete mode 120000 CHANGES/10961.feature.rst delete mode 120000 CHANGES/10962.feature.rst delete mode 120000 CHANGES/10968.feature.rst delete mode 100644 CHANGES/10972.feature.rst delete mode 120000 CHANGES/10988.bugfix.rst delete mode 100644 CHANGES/10991.feature.rst delete mode 120000 CHANGES/2213.feature.rst delete mode 100644 CHANGES/2914.doc.rst delete mode 100644 CHANGES/6009.bugfix.rst delete mode 100644 CHANGES/9705.contrib.rst delete mode 100644 CHANGES/9732.feature.rst delete mode 100644 CHANGES/9798.feature.rst delete mode 100644 CHANGES/9870.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index 176dcf88179..ddbebd82369 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,282 @@ .. towncrier release notes start +3.12.0 (2025-05-24) +=================== + +Bug fixes +--------- + +- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`6009`, :issue:`10988`. + + + +- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) + + + *Related issues and pull requests on GitHub:* + :issue:`10119`. + + + +- Fixed connection reuse for file-like data payloads by ensuring buffer + truncation respects content-length boundaries and preventing premature + connection closure race -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. + + + +- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10951`. + + + + +Features +-------- + +- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) + that implements RFC 7616. The middleware supports all standard hash algorithms + (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and + 'auth-int' quality of protection options, and automatically manages the + authentication flow by intercepting 401 responses and retrying with proper + credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2213`, :issue:`10725`. + + + +- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. + + This change allows users to add middleware to the client session and requests, enabling features like + authentication, logging, and request/response modification without modifying the core + request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, + allowing middleware to access the session for making additional requests. + + + *Related issues and pull requests on GitHub:* + :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. + + + +- Allow user setting zlib compression backend -- by :user:`TimMenninger` + + This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the + zlib compression module of their choice. Default behavior continues to use + the builtin ``zlib`` library. + + + *Related issues and pull requests on GitHub:* + :issue:`9798`. + + + +- Added support for overriding the base URL with an absolute one in client sessions + -- by :user:`vivodi`. + + + *Related issues and pull requests on GitHub:* + :issue:`10074`. + + + +- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. + + + *Related issues and pull requests on GitHub:* + :issue:`10120`. + + + +- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. + + + *Related issues and pull requests on GitHub:* + :issue:`10433`. + + + +- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options + -- by :user:`TimMenninger`. + + + *Related issues and pull requests on GitHub:* + :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. + + + +- Started building armv7l manylinux wheels -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10797`. + + + +- Implemented shared DNS resolver management to fix excessive resolver object creation + when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures + only one ``DNSResolver`` object is created for default configurations, significantly + reducing resource usage and improving performance for applications using multiple + client sessions simultaneously -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10847`, :issue:`10923`, :issue:`10946`. + + + +- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`10972`. + + + +- Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`. + + This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding. + + When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads. + + This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests. + + + *Related issues and pull requests on GitHub:* + :issue:`10991`. + + + + +Improved documentation +---------------------- + +- Improved documentation for middleware by adding warnings and examples about + request body stream consumption. The documentation now clearly explains that + request body streams can only be read once and provides best practices for + sharing parsed request data between middleware and handlers -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2914`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. + + + *Related issues and pull requests on GitHub:* + :issue:`10662`. + + + +- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. + + This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that + can be used to build aiohttp against the system install of the ``llhttp`` library rather + than the vendored one. + + + *Related issues and pull requests on GitHub:* + :issue:`10759`. + + + +- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. + + As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. + + + *Related issues and pull requests on GitHub:* + :issue:`10823`. + + + +- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` + + + *Related issues and pull requests on GitHub:* + :issue:`10877`. + + + + +Contributor-facing changes +-------------------------- + +- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. + + + *Related issues and pull requests on GitHub:* + :issue:`9705`, :issue:`10761`. + + + +- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and + make it compatible with Python 3.14. + + + *Related issues and pull requests on GitHub:* + :issue:`10851`. + + + +- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. + + + *Related issues and pull requests on GitHub:* + :issue:`10922`. + + + + +Miscellaneous internal changes +------------------------------ + +- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. + + + *Related issues and pull requests on GitHub:* + :issue:`9870`. + + + +- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10146`. + + + + +---- + + 3.12.0rc1 (2025-05-24) ====================== diff --git a/CHANGES/10074.feature.rst b/CHANGES/10074.feature.rst deleted file mode 100644 index d956c38af57..00000000000 --- a/CHANGES/10074.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added support for overriding the base URL with an absolute one in client sessions --- by :user:`vivodi`. diff --git a/CHANGES/10119.bugfix.rst b/CHANGES/10119.bugfix.rst deleted file mode 100644 index 86d2511f5b5..00000000000 --- a/CHANGES/10119.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Response is now always True, instead of using MutableMapping behaviour (False when map is empty) diff --git a/CHANGES/10120.feature.rst b/CHANGES/10120.feature.rst deleted file mode 100644 index 98cee5650d6..00000000000 --- a/CHANGES/10120.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. diff --git a/CHANGES/10146.misc.rst b/CHANGES/10146.misc.rst deleted file mode 100644 index bee4ef68fb3..00000000000 --- a/CHANGES/10146.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. diff --git a/CHANGES/10325.bugfix.rst b/CHANGES/10325.bugfix.rst deleted file mode 120000 index aa085cc590d..00000000000 --- a/CHANGES/10325.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10915.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10433.feature.rst b/CHANGES/10433.feature.rst deleted file mode 100644 index 11a29d6e368..00000000000 --- a/CHANGES/10433.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. diff --git a/CHANGES/10474.feature.rst b/CHANGES/10474.feature.rst deleted file mode 120000 index 7c4f9a7b83b..00000000000 --- a/CHANGES/10474.feature.rst +++ /dev/null @@ -1 +0,0 @@ -10520.feature.rst \ No newline at end of file diff --git a/CHANGES/10520.feature.rst b/CHANGES/10520.feature.rst deleted file mode 100644 index 3d2877b5c09..00000000000 --- a/CHANGES/10520.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options --- by :user:`TimMenninger`. diff --git a/CHANGES/10662.packaging.rst b/CHANGES/10662.packaging.rst deleted file mode 100644 index 2ed3a69cb56..00000000000 --- a/CHANGES/10662.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. diff --git a/CHANGES/10725.feature.rst b/CHANGES/10725.feature.rst deleted file mode 100644 index 2cb096a58e7..00000000000 --- a/CHANGES/10725.feature.rst +++ /dev/null @@ -1,6 +0,0 @@ -Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) -that implements RFC 7616. The middleware supports all standard hash algorithms -(MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and -'auth-int' quality of protection options, and automatically manages the -authentication flow by intercepting 401 responses and retrying with proper -credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. diff --git a/CHANGES/10759.packaging.rst b/CHANGES/10759.packaging.rst deleted file mode 100644 index 6f41e873229..00000000000 --- a/CHANGES/10759.packaging.rst +++ /dev/null @@ -1,5 +0,0 @@ -Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - -This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that -can be used to build aiohttp against the system install of the ``llhttp`` library rather -than the vendored one. diff --git a/CHANGES/10761.contrib.rst b/CHANGES/10761.contrib.rst deleted file mode 120000 index 3d35184e09d..00000000000 --- a/CHANGES/10761.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -9705.contrib.rst \ No newline at end of file diff --git a/CHANGES/10797.feature.rst b/CHANGES/10797.feature.rst deleted file mode 100644 index fc68d09f34e..00000000000 --- a/CHANGES/10797.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Started building armv7l manylinux wheels -- by :user:`bdraco`. diff --git a/CHANGES/10823.packaging.rst b/CHANGES/10823.packaging.rst deleted file mode 100644 index c65f8bea795..00000000000 --- a/CHANGES/10823.packaging.rst +++ /dev/null @@ -1,3 +0,0 @@ -``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - -As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. diff --git a/CHANGES/10847.feature.rst b/CHANGES/10847.feature.rst deleted file mode 100644 index bfa7f6d498a..00000000000 --- a/CHANGES/10847.feature.rst +++ /dev/null @@ -1,5 +0,0 @@ -Implemented shared DNS resolver management to fix excessive resolver object creation -when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures -only one ``DNSResolver`` object is created for default configurations, significantly -reducing resource usage and improving performance for applications using multiple -client sessions simultaneously -- by :user:`bdraco`. diff --git a/CHANGES/10851.bugfix.rst b/CHANGES/10851.bugfix.rst deleted file mode 100644 index 9c47cc95905..00000000000 --- a/CHANGES/10851.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. diff --git a/CHANGES/10851.contrib.rst b/CHANGES/10851.contrib.rst deleted file mode 100644 index 623f96bc227..00000000000 --- a/CHANGES/10851.contrib.rst +++ /dev/null @@ -1,2 +0,0 @@ -Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and -make it compatible with Python 3.14. diff --git a/CHANGES/10877.packaging.rst b/CHANGES/10877.packaging.rst deleted file mode 100644 index 0bc2ee03984..00000000000 --- a/CHANGES/10877.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` diff --git a/CHANGES/10902.feature.rst b/CHANGES/10902.feature.rst deleted file mode 120000 index b565aa68ee0..00000000000 --- a/CHANGES/10902.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9732.feature.rst \ No newline at end of file diff --git a/CHANGES/10915.bugfix.rst b/CHANGES/10915.bugfix.rst deleted file mode 100644 index f564603306b..00000000000 --- a/CHANGES/10915.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed connection reuse for file-like data payloads by ensuring buffer -truncation respects content-length boundaries and preventing premature -connection closure race -- by :user:`bdraco`. diff --git a/CHANGES/10922.contrib.rst b/CHANGES/10922.contrib.rst deleted file mode 100644 index e5e1cfd8af6..00000000000 --- a/CHANGES/10922.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Added Winloop to test suite to support in the future -- by :user:`Vizonex`. diff --git a/CHANGES/10923.feature.rst b/CHANGES/10923.feature.rst deleted file mode 120000 index 879a4227358..00000000000 --- a/CHANGES/10923.feature.rst +++ /dev/null @@ -1 +0,0 @@ -10847.feature.rst \ No newline at end of file diff --git a/CHANGES/10941.bugfix.rst b/CHANGES/10941.bugfix.rst deleted file mode 120000 index aa085cc590d..00000000000 --- a/CHANGES/10941.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10915.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10943.bugfix.rst b/CHANGES/10943.bugfix.rst deleted file mode 120000 index aa085cc590d..00000000000 --- a/CHANGES/10943.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -10915.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10945.feature.rst b/CHANGES/10945.feature.rst deleted file mode 120000 index b565aa68ee0..00000000000 --- a/CHANGES/10945.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9732.feature.rst \ No newline at end of file diff --git a/CHANGES/10946.feature.rst b/CHANGES/10946.feature.rst deleted file mode 120000 index 879a4227358..00000000000 --- a/CHANGES/10946.feature.rst +++ /dev/null @@ -1 +0,0 @@ -10847.feature.rst \ No newline at end of file diff --git a/CHANGES/10951.bugfix.rst b/CHANGES/10951.bugfix.rst deleted file mode 100644 index d539fc1a52d..00000000000 --- a/CHANGES/10951.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. diff --git a/CHANGES/10952.feature.rst b/CHANGES/10952.feature.rst deleted file mode 120000 index b565aa68ee0..00000000000 --- a/CHANGES/10952.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9732.feature.rst \ No newline at end of file diff --git a/CHANGES/10959.feature.rst b/CHANGES/10959.feature.rst deleted file mode 120000 index b565aa68ee0..00000000000 --- a/CHANGES/10959.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9732.feature.rst \ No newline at end of file diff --git a/CHANGES/10961.feature.rst b/CHANGES/10961.feature.rst deleted file mode 120000 index 7c4f9a7b83b..00000000000 --- a/CHANGES/10961.feature.rst +++ /dev/null @@ -1 +0,0 @@ -10520.feature.rst \ No newline at end of file diff --git a/CHANGES/10962.feature.rst b/CHANGES/10962.feature.rst deleted file mode 120000 index 7c4f9a7b83b..00000000000 --- a/CHANGES/10962.feature.rst +++ /dev/null @@ -1 +0,0 @@ -10520.feature.rst \ No newline at end of file diff --git a/CHANGES/10968.feature.rst b/CHANGES/10968.feature.rst deleted file mode 120000 index b565aa68ee0..00000000000 --- a/CHANGES/10968.feature.rst +++ /dev/null @@ -1 +0,0 @@ -9732.feature.rst \ No newline at end of file diff --git a/CHANGES/10972.feature.rst b/CHANGES/10972.feature.rst deleted file mode 100644 index 1d3779a3969..00000000000 --- a/CHANGES/10972.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/10988.bugfix.rst b/CHANGES/10988.bugfix.rst deleted file mode 120000 index 6e737bb336c..00000000000 --- a/CHANGES/10988.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -6009.bugfix.rst \ No newline at end of file diff --git a/CHANGES/10991.feature.rst b/CHANGES/10991.feature.rst deleted file mode 100644 index 687a1a752f6..00000000000 --- a/CHANGES/10991.feature.rst +++ /dev/null @@ -1,7 +0,0 @@ -Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`. - -This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding. - -When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads. - -This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests. diff --git a/CHANGES/2213.feature.rst b/CHANGES/2213.feature.rst deleted file mode 120000 index d118975e478..00000000000 --- a/CHANGES/2213.feature.rst +++ /dev/null @@ -1 +0,0 @@ -10725.feature.rst \ No newline at end of file diff --git a/CHANGES/2914.doc.rst b/CHANGES/2914.doc.rst deleted file mode 100644 index 25592bf79bc..00000000000 --- a/CHANGES/2914.doc.rst +++ /dev/null @@ -1,4 +0,0 @@ -Improved documentation for middleware by adding warnings and examples about -request body stream consumption. The documentation now clearly explains that -request body streams can only be read once and provides best practices for -sharing parsed request data between middleware and handlers -- by :user:`bdraco`. diff --git a/CHANGES/6009.bugfix.rst b/CHANGES/6009.bugfix.rst deleted file mode 100644 index a530832c8a9..00000000000 --- a/CHANGES/6009.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` diff --git a/CHANGES/9705.contrib.rst b/CHANGES/9705.contrib.rst deleted file mode 100644 index 5d23e964fa1..00000000000 --- a/CHANGES/9705.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. diff --git a/CHANGES/9732.feature.rst b/CHANGES/9732.feature.rst deleted file mode 100644 index bf6dd8ebde3..00000000000 --- a/CHANGES/9732.feature.rst +++ /dev/null @@ -1,6 +0,0 @@ -Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - -This change allows users to add middleware to the client session and requests, enabling features like -authentication, logging, and request/response modification without modifying the core -request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, -allowing middleware to access the session for making additional requests. diff --git a/CHANGES/9798.feature.rst b/CHANGES/9798.feature.rst deleted file mode 100644 index c1584b04491..00000000000 --- a/CHANGES/9798.feature.rst +++ /dev/null @@ -1,5 +0,0 @@ -Allow user setting zlib compression backend -- by :user:`TimMenninger` - -This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the -zlib compression module of their choice. Default behavior continues to use -the builtin ``zlib`` library. diff --git a/CHANGES/9870.misc.rst b/CHANGES/9870.misc.rst deleted file mode 100644 index caa8f45e522..00000000000 --- a/CHANGES/9870.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for the ``partitioned`` attribute in the ``set_cookie`` method. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index fdad4aac495..bd797bcf6ef 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0rc1" +__version__ = "3.12.0" from typing import TYPE_CHECKING, Tuple From 2e554627b4b6e99aef131a90c808ac8a8e6d13bf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 24 May 2025 17:59:43 -0500 Subject: [PATCH 1450/1511] Increment version to 3.12.0.dev0 (#10998) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index bd797bcf6ef..4bc6a3a2b22 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0" +__version__ = "3.12.0.dev0" from typing import TYPE_CHECKING, Tuple From ee8f1c414e39001499f03b0a64510a4ae02cbf8e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 01:15:47 -0500 Subject: [PATCH 1451/1511] [PR #11017/1c01726 backport][3.12] Support Reusable Request Bodies and Improve Payload Handling (#11018) --- CHANGES/11017.feature.rst | 3 + CHANGES/5530.feature.rst | 1 + CHANGES/5577.feature.rst | 1 + CHANGES/9201.feature.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 12 + aiohttp/client_middleware_digest_auth.py | 14 +- aiohttp/client_reqrep.py | 194 ++++-- aiohttp/formdata.py | 5 +- aiohttp/multipart.py | 71 ++ aiohttp/payload.py | 377 +++++++++-- aiohttp/web_response.py | 4 + docs/client_reference.rst | 91 +++ tests/test_client_functional.py | 546 +++++++++++++++- tests/test_client_middleware.py | 108 ++++ tests/test_client_middleware_digest_auth.py | 49 +- tests/test_client_request.py | 478 +++++++++++++- tests/test_client_session.py | 10 +- tests/test_formdata.py | 176 ++++- tests/test_multipart.py | 201 +++++- tests/test_payload.py | 675 +++++++++++++++++++- 21 files changed, 2864 insertions(+), 154 deletions(-) create mode 100644 CHANGES/11017.feature.rst create mode 120000 CHANGES/5530.feature.rst create mode 120000 CHANGES/5577.feature.rst create mode 120000 CHANGES/9201.feature.rst diff --git a/CHANGES/11017.feature.rst b/CHANGES/11017.feature.rst new file mode 100644 index 00000000000..361c56e3fe8 --- /dev/null +++ b/CHANGES/11017.feature.rst @@ -0,0 +1,3 @@ +Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`. + +Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases. diff --git a/CHANGES/5530.feature.rst b/CHANGES/5530.feature.rst new file mode 120000 index 00000000000..63bf4429e55 --- /dev/null +++ b/CHANGES/5530.feature.rst @@ -0,0 +1 @@ +11017.feature.rst \ No newline at end of file diff --git a/CHANGES/5577.feature.rst b/CHANGES/5577.feature.rst new file mode 120000 index 00000000000..63bf4429e55 --- /dev/null +++ b/CHANGES/5577.feature.rst @@ -0,0 +1 @@ +11017.feature.rst \ No newline at end of file diff --git a/CHANGES/9201.feature.rst b/CHANGES/9201.feature.rst new file mode 120000 index 00000000000..63bf4429e55 --- /dev/null +++ b/CHANGES/9201.feature.rst @@ -0,0 +1 @@ +11017.feature.rst \ No newline at end of file diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 59edfd7ac3f..2e2ab140122 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -136,6 +136,7 @@ Frederik Gladhorn Frederik Peter Aalund Gabriel Tremblay Gang Ji +Gary Leung Gary Wilson Jr. Gennady Andreyev Georges Dubus diff --git a/aiohttp/client.py b/aiohttp/client.py index 811c8f97588..3b2cd2796cc 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -792,6 +792,8 @@ async def _connect_and_send_request( redirects += 1 history.append(resp) if max_redirects and redirects >= max_redirects: + if req._body is not None: + await req._body.close() resp.close() raise TooManyRedirects( history[0].request_info, tuple(history) @@ -823,6 +825,9 @@ async def _connect_and_send_request( r_url, encoded=not self._requote_redirect_url ) except ValueError as e: + if req._body is not None: + await req._body.close() + resp.close() raise InvalidUrlRedirectClientError( r_url, "Server attempted redirecting to a location that does not look like a URL", @@ -830,6 +835,8 @@ async def _connect_and_send_request( scheme = parsed_redirect_url.scheme if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: + if req._body is not None: + await req._body.close() resp.close() raise NonHttpUrlRedirectClientError(r_url) elif not scheme: @@ -838,6 +845,9 @@ async def _connect_and_send_request( try: redirect_origin = parsed_redirect_url.origin() except ValueError as origin_val_err: + if req._body is not None: + await req._body.close() + resp.close() raise InvalidUrlRedirectClientError( parsed_redirect_url, "Invalid redirect URL origin", @@ -854,6 +864,8 @@ async def _connect_and_send_request( break + if req._body is not None: + await req._body.close() # check response status if raise_for_status is None: raise_for_status = self._raise_for_status diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index b63efaf0142..9a8ffc18313 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -29,6 +29,7 @@ from .client_exceptions import ClientError from .client_middlewares import ClientHandlerType from .client_reqrep import ClientRequest, ClientResponse +from .payload import Payload class DigestAuthChallenge(TypedDict, total=False): @@ -192,7 +193,7 @@ def __init__( self._nonce_count = 0 self._challenge: DigestAuthChallenge = {} - def _encode(self, method: str, url: URL, body: Union[bytes, str]) -> str: + async def _encode(self, method: str, url: URL, body: Union[bytes, Payload]) -> str: """ Build digest authorization header for the current challenge. @@ -207,6 +208,7 @@ def _encode(self, method: str, url: URL, body: Union[bytes, str]) -> str: Raises: ClientError: If the challenge is missing required parameters or contains unsupported values + """ challenge = self._challenge if "realm" not in challenge: @@ -272,11 +274,11 @@ def KD(s: bytes, d: bytes) -> bytes: A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes)) A2 = f"{method.upper()}:{path}".encode() if qop == "auth-int": - if isinstance(body, str): - entity_str = body.encode("utf-8", errors="replace") + if isinstance(body, bytes): # will always be empty bytes unless Payload + entity_bytes = body else: - entity_str = body - entity_hash = H(entity_str) + entity_bytes = await body.as_bytes() # Get bytes from Payload + entity_hash = H(entity_bytes) A2 = b":".join((A2, entity_hash)) HA1 = H(A1) @@ -398,7 +400,7 @@ async def __call__( for retry_count in range(2): # Apply authorization header if we have a challenge (on second attempt) if retry_count > 0: - request.headers[hdrs.AUTHORIZATION] = self._encode( + request.headers[hdrs.AUTHORIZATION] = await self._encode( request.method, request.url, request.body ) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index fb83eefd51f..2322a1d7472 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -252,6 +252,25 @@ def _is_expected_content_type( return expected_content_type in response_content_type +def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) -> None: + """Warn if the payload is not closed. + + Callers must check that the body is a Payload before calling this method. + + Args: + payload: The payload to check + stacklevel: Stack level for the warning (default 2 for direct callers) + """ + if not payload.autoclose and not payload.consumed: + warnings.warn( + "The previous request body contains unclosed resources. " + "Use await request.update_body() instead of setting request.body " + "directly to properly close resources and avoid leaks.", + ResourceWarning, + stacklevel=stacklevel, + ) + + class ClientRequest: GET_METHODS = { hdrs.METH_GET, @@ -268,7 +287,7 @@ class ClientRequest: } # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. - body: Any = b"" + _body: Union[None, payload.Payload] = None auth = None response = None @@ -439,6 +458,36 @@ def host(self) -> str: def port(self) -> Optional[int]: return self.url.port + @property + def body(self) -> Union[bytes, payload.Payload]: + """Request body.""" + # empty body is represented as bytes for backwards compatibility + return self._body or b"" + + @body.setter + def body(self, value: Any) -> None: + """Set request body with warning for non-autoclose payloads. + + WARNING: This setter must be called from within an event loop and is not + thread-safe. Setting body outside of an event loop may raise RuntimeError + when closing file-based payloads. + + DEPRECATED: Direct assignment to body is deprecated and will be removed + in a future version. Use await update_body() instead for proper resource + management. + """ + # Close existing payload if present + if self._body is not None: + # Warn if the payload needs manual closing + # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload + _warn_if_unclosed_payload(self._body, stacklevel=3) + # NOTE: In the future, when we remove sync close support, + # this setter will need to be removed and only the async + # update_body() method will be available. For now, we call + # _close() for backwards compatibility. + self._body._close() + self._update_body(value) + @property def request_info(self) -> RequestInfo: headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) @@ -590,9 +639,12 @@ def update_transfer_encoding(self) -> None: ) self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + elif ( + self._body is not None + and hdrs.CONTENT_LENGTH not in self.headers + and (size := self._body.size) is not None + ): + self.headers[hdrs.CONTENT_LENGTH] = str(size) def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: """Set basic auth.""" @@ -610,37 +662,120 @@ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> Non self.headers[hdrs.AUTHORIZATION] = auth.encode() - def update_body_from_data(self, body: Any) -> None: + def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: + """Update request body from data.""" + if self._body is not None: + _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel) + if body is None: + self._body = None return # FormData - if isinstance(body, FormData): - body = body() + maybe_payload = body() if isinstance(body, FormData) else body try: - body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) + body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None) except payload.LookupError: - body = FormData(body)() - - self.body = body + body_payload = FormData(maybe_payload)() # type: ignore[arg-type] + self._body = body_payload # enable chunked encoding if needed if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: - if (size := body.size) is not None: + if (size := body_payload.size) is not None: self.headers[hdrs.CONTENT_LENGTH] = str(size) else: self.chunked = True # copy payload headers - assert body.headers + assert body_payload.headers headers = self.headers skip_headers = self._skip_auto_headers - for key, value in body.headers.items(): + for key, value in body_payload.headers.items(): if key in headers or (skip_headers is not None and key in skip_headers): continue headers[key] = value + def _update_body(self, body: Any) -> None: + """Update request body after its already been set.""" + # Remove existing Content-Length header since body is changing + if hdrs.CONTENT_LENGTH in self.headers: + del self.headers[hdrs.CONTENT_LENGTH] + + # Remove existing Transfer-Encoding header to avoid conflicts + if self.chunked and hdrs.TRANSFER_ENCODING in self.headers: + del self.headers[hdrs.TRANSFER_ENCODING] + + # Now update the body using the existing method + # Called from _update_body, add 1 to stacklevel from caller + self.update_body_from_data(body, _stacklevel=4) + + # Update transfer encoding headers if needed (same logic as __init__) + if body is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + + async def update_body(self, body: Any) -> None: + """ + Update request body and close previous payload if needed. + + This method safely updates the request body by first closing any existing + payload to prevent resource leaks, then setting the new body. + + IMPORTANT: Always use this method instead of setting request.body directly. + Direct assignment to request.body will leak resources if the previous body + contains file handles, streams, or other resources that need cleanup. + + Args: + body: The new body content. Can be: + - bytes/bytearray: Raw binary data + - str: Text data (will be encoded using charset from Content-Type) + - FormData: Form data that will be encoded as multipart/form-data + - Payload: A pre-configured payload object + - AsyncIterable: An async iterable of bytes chunks + - File-like object: Will be read and sent as binary data + - None: Clears the body + + Usage: + # CORRECT: Use update_body + await request.update_body(b"new request data") + + # WRONG: Don't set body directly + # request.body = b"new request data" # This will leak resources! + + # Update with form data + form_data = FormData() + form_data.add_field('field', 'value') + await request.update_body(form_data) + + # Clear body + await request.update_body(None) + + Note: + This method is async because it may need to close file handles or + other resources associated with the previous payload. Always await + this method to ensure proper cleanup. + + Warning: + Setting request.body directly is highly discouraged and can lead to: + - Resource leaks (unclosed file handles, streams) + - Memory leaks (unreleased buffers) + - Unexpected behavior with streaming payloads + + It is not recommended to change the payload type in middleware. If the + body was already set (e.g., as bytes), it's best to keep the same type + rather than converting it (e.g., to str) as this may result in unexpected + behavior. + + See Also: + - update_body_from_data: Synchronous body update without cleanup + - body property: Direct body access (STRONGLY DISCOURAGED) + + """ + # Close existing payload if it exists and needs closing + if self._body is not None: + await self._body.close() + self._update_body(body) + def update_expect_continue(self, expect: bool = False) -> None: if expect: self.headers[hdrs.EXPECT] = "100-continue" @@ -717,27 +852,14 @@ async def write_bytes( protocol = conn.protocol assert protocol is not None try: - if isinstance(self.body, payload.Payload): - # Specialized handling for Payload objects that know how to write themselves - await self.body.write_with_length(writer, content_length) - else: - # Handle bytes/bytearray by converting to an iterable for consistent handling - if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) - - if content_length is None: - # Write the entire body without length constraint - for chunk in self.body: - await writer.write(chunk) - else: - # Write with length constraint, respecting content_length limit - # If the body is larger than content_length, we truncate it - remaining_bytes = content_length - for chunk in self.body: - await writer.write(chunk[:remaining_bytes]) - remaining_bytes -= len(chunk) - if remaining_bytes <= 0: - break + # This should be a rare case but the + # self._body can be set to None while + # the task is being started or we wait above + # for the 100-continue response. + # The more likely case is we have an empty + # payload, but 100-continue is still expected. + if self._body is not None: + await self._body.write_with_length(writer, content_length) except OSError as underlying_exc: reraised_exc = underlying_exc @@ -833,7 +955,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": await writer.write_headers(status_line, self.headers) task: Optional["asyncio.Task[None]"] - if self.body or self._continue is not None or protocol.writing_paused: + if self._body or self._continue is not None or protocol.writing_paused: coro = self.write_bytes(writer, conn, self._get_content_length()) if sys.version_info >= (3, 12): # Optimization for Python 3.12, try to write diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index 73056f4bc45..bdf591fae7a 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -29,7 +29,6 @@ def __init__( self._writer = multipart.MultipartWriter("form-data") self._fields: List[Any] = [] self._is_multipart = default_to_multipart - self._is_processed = False self._quote_fields = quote_fields self._charset = charset @@ -140,8 +139,6 @@ def _gen_form_urlencoded(self) -> payload.BytesPayload: def _gen_form_data(self) -> multipart.MultipartWriter: """Encode a list of fields using the multipart/form-data MIME format""" - if self._is_processed: - raise RuntimeError("Form data has been processed already") for dispparams, headers, value in self._fields: try: if hdrs.CONTENT_TYPE in headers: @@ -172,7 +169,7 @@ def _gen_form_data(self) -> multipart.MultipartWriter: self._writer.append_payload(part) - self._is_processed = True + self._fields.clear() return self._writer def __call__(self) -> Payload: diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 459cc321a1d..231c67c7bb7 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -36,6 +36,7 @@ ) from .helpers import CHAR, TOKEN, parse_mimetype, reify from .http import HeadersParser +from .log import internal_logger from .payload import ( JsonPayload, LookupError, @@ -559,6 +560,7 @@ def filename(self) -> Optional[str]: @payload_type(BodyPartReader, order=Order.try_first) class BodyPartReaderPayload(Payload): _value: BodyPartReader + # _autoclose = False (inherited) - Streaming reader that may have resources def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: super().__init__(value, *args, **kwargs) @@ -575,6 +577,16 @@ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: raise TypeError("Unable to decode.") + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """Raises TypeError as body parts should be consumed via write(). + + This is intentional: BodyPartReader payloads are designed for streaming + large data (potentially gigabytes) and must be consumed only once via + the write() method to avoid memory exhaustion. They cannot be buffered + in memory for reuse. + """ + raise TypeError("Unable to read body part as bytes. Use write() to consume.") + async def write(self, writer: Any) -> None: field = self._value chunk = await field.read_chunk(size=2**16) @@ -793,6 +805,8 @@ class MultipartWriter(Payload): """Multipart body writer.""" _value: None + # _consumed = False (inherited) - Can be encoded multiple times + _autoclose = True # No file handles, just collects parts in memory def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: boundary = boundary if boundary is not None else uuid.uuid4().hex @@ -975,6 +989,11 @@ def size(self) -> Optional[int]: return total def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return string representation of the multipart data. + + WARNING: This method may do blocking I/O if parts contain file payloads. + It should not be called in the event loop. Use as_bytes().decode() instead. + """ return "".join( "--" + self.boundary @@ -984,6 +1003,33 @@ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: for part, _e, _te in self._parts ) + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """Return bytes representation of the multipart data. + + This method is async-safe and calls as_bytes on underlying payloads. + """ + parts: List[bytes] = [] + + # Process each part + for part, _e, _te in self._parts: + # Add boundary + parts.append(b"--" + self._boundary + b"\r\n") + + # Add headers + parts.append(part._binary_headers) + + # Add payload content using as_bytes for async safety + part_bytes = await part.as_bytes(encoding, errors) + parts.append(part_bytes) + + # Add trailing CRLF + parts.append(b"\r\n") + + # Add closing boundary + parts.append(b"--" + self._boundary + b"--\r\n") + + return b"".join(parts) + async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: @@ -1011,6 +1057,31 @@ async def write(self, writer: Any, close_boundary: bool = True) -> None: if close_boundary: await writer.write(b"--" + self._boundary + b"--\r\n") + async def close(self) -> None: + """ + Close all part payloads that need explicit closing. + + IMPORTANT: This method must not await anything that might not finish + immediately, as it may be called during cleanup/cancellation. Schedule + any long-running operations without awaiting them. + """ + if self._consumed: + return + self._consumed = True + + # Close all parts that need explicit closing + # We catch and log exceptions to ensure all parts get a chance to close + # we do not use asyncio.gather() here because we are not allowed + # to suspend given we may be called during cleanup + for idx, (part, _, _) in enumerate(self._parts): + if not part.autoclose and not part.consumed: + try: + await part.close() + except Exception as exc: + internal_logger.error( + "Failed to close multipart part %d: %s", idx, exc, exc_info=True + ) + class MultipartPayloadWriter: def __init__(self, writer: Any) -> None: diff --git a/aiohttp/payload.py b/aiohttp/payload.py index c954091adad..4a2c7922337 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -15,6 +15,7 @@ Dict, Final, Iterable, + List, Optional, Set, TextIO, @@ -58,12 +59,8 @@ _CLOSE_FUTURES: Set[asyncio.Future[None]] = set() -if TYPE_CHECKING: - from typing import List - - class LookupError(Exception): - pass + """Raised when no payload factory is found for the given data type.""" class Order(str, enum.Enum): @@ -155,6 +152,8 @@ class Payload(ABC): _default_content_type: str = "application/octet-stream" _size: Optional[int] = None + _consumed: bool = False # Default: payload has not been consumed yet + _autoclose: bool = False # Default: assume resource needs explicit closing def __init__( self, @@ -189,7 +188,12 @@ def __init__( @property def size(self) -> Optional[int]: - """Size of the payload.""" + """Size of the payload in bytes. + + Returns the number of bytes that will be transmitted when the payload + is written. For string payloads, this is the size after encoding to bytes, + not the length of the string. + """ return self._size @property @@ -221,6 +225,21 @@ def content_type(self) -> str: """Content type""" return self._headers[hdrs.CONTENT_TYPE] + @property + def consumed(self) -> bool: + """Whether the payload has been consumed and cannot be reused.""" + return self._consumed + + @property + def autoclose(self) -> bool: + """ + Whether the payload can close itself automatically. + + Returns True if the payload has no file handles or resources that need + explicit closing. If False, callers must await close() to release resources. + """ + return self._autoclose + def set_content_disposition( self, disptype: str, @@ -235,14 +254,16 @@ def set_content_disposition( @abstractmethod def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """Return string representation of the value. + """ + Return string representation of the value. This is named decode() to allow compatibility with bytes objects. """ @abstractmethod async def write(self, writer: AbstractStreamWriter) -> None: - """Write payload to the writer stream. + """ + Write payload to the writer stream. Args: writer: An AbstractStreamWriter instance that handles the actual writing @@ -256,6 +277,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: All payload subclasses must override this method for backwards compatibility, but new code should use write_with_length for more flexibility and control. + """ # write_with_length is new in aiohttp 3.12 @@ -283,9 +305,52 @@ async def write_with_length( # and for the default implementation await self.write(writer) + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """ + Return bytes representation of the value. + + This is a convenience method that calls decode() and encodes the result + to bytes using the specified encoding. + """ + # Use instance encoding if available, otherwise use parameter + actual_encoding = self._encoding or encoding + return self.decode(actual_encoding, errors).encode(actual_encoding) + + def _close(self) -> None: + """ + Async safe synchronous close operations for backwards compatibility. + + This method exists only for backwards compatibility with code that + needs to clean up payloads synchronously. In the future, we will + drop this method and only support the async close() method. + + WARNING: This method must be safe to call from within the event loop + without blocking. Subclasses should not perform any blocking I/O here. + + WARNING: This method must be called from within an event loop for + certain payload types (e.g., IOBasePayload). Calling it outside an + event loop may raise RuntimeError. + """ + # This is a no-op by default, but subclasses can override it + # for non-blocking cleanup operations. + + async def close(self) -> None: + """ + Close the payload if it holds any resources. + + IMPORTANT: This method must not await anything that might not finish + immediately, as it may be called during cleanup/cancellation. Schedule + any long-running operations without awaiting them. + + In the future, this will be the only close method supported. + """ + self._close() + class BytesPayload(Payload): _value: bytes + # _consumed = False (inherited) - Bytes are immutable and can be reused + _autoclose = True # No file handle, just bytes in memory def __init__( self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any @@ -315,8 +380,18 @@ def __init__( def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: return self._value.decode(encoding, errors) + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """ + Return bytes representation of the value. + + This method returns the raw bytes content of the payload. + It is equivalent to accessing the _value attribute directly. + """ + return self._value + async def write(self, writer: AbstractStreamWriter) -> None: - """Write the entire bytes payload to the writer stream. + """ + Write the entire bytes payload to the writer stream. Args: writer: An AbstractStreamWriter instance that handles the actual writing @@ -327,6 +402,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: For new implementations that need length control, use write_with_length(). This method is maintained for backwards compatibility and is equivalent to write_with_length(writer, None). + """ await writer.write(self._value) @@ -389,6 +465,9 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): _value: io.IOBase + # _consumed = False (inherited) - File can be re-read from the same position + _start_position: Optional[int] = None + # _autoclose = False (inherited) - Has file handle that needs explicit closing def __init__( self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any @@ -402,6 +481,16 @@ def __init__( if hdrs.CONTENT_DISPOSITION not in self.headers: self.set_content_disposition(disposition, filename=self._filename) + def _set_or_restore_start_position(self) -> None: + """Set or restore the start position of the file-like object.""" + if self._start_position is None: + try: + self._start_position = self._value.tell() + except OSError: + self._consumed = True # Cannot seek, mark as consumed + return + self._value.seek(self._start_position) + def _read_and_available_len( self, remaining_content_len: Optional[int] ) -> Tuple[Optional[int], bytes]: @@ -422,6 +511,7 @@ def _read_and_available_len( context switches and file operations when streaming content. """ + self._set_or_restore_start_position() size = self.size # Call size only once since it does I/O return size, self._value.read( min(size or READ_SIZE, remaining_content_len or READ_SIZE) @@ -447,6 +537,12 @@ def _read(self, remaining_content_len: Optional[int]) -> bytes: @property def size(self) -> Optional[int]: + """ + Size of the payload in bytes. + + Returns the number of bytes remaining to be read from the file. + Returns None if the size cannot be determined (e.g., for unseekable streams). + """ try: return os.fstat(self._value.fileno()).st_size - self._value.tell() except (AttributeError, OSError): @@ -497,38 +593,31 @@ async def write_with_length( total_written_len = 0 remaining_content_len = content_length - try: - # Get initial data and available length - available_len, chunk = await loop.run_in_executor( - None, self._read_and_available_len, remaining_content_len - ) - # Process data chunks until done - while chunk: - chunk_len = len(chunk) + # Get initial data and available length + available_len, chunk = await loop.run_in_executor( + None, self._read_and_available_len, remaining_content_len + ) + # Process data chunks until done + while chunk: + chunk_len = len(chunk) - # Write data with or without length constraint - if remaining_content_len is None: - await writer.write(chunk) - else: - await writer.write(chunk[:remaining_content_len]) - remaining_content_len -= chunk_len + # Write data with or without length constraint + if remaining_content_len is None: + await writer.write(chunk) + else: + await writer.write(chunk[:remaining_content_len]) + remaining_content_len -= chunk_len - total_written_len += chunk_len + total_written_len += chunk_len - # Check if we're done writing - if self._should_stop_writing( - available_len, total_written_len, remaining_content_len - ): - return + # Check if we're done writing + if self._should_stop_writing( + available_len, total_written_len, remaining_content_len + ): + return - # Read next chunk - chunk = await loop.run_in_executor( - None, self._read, remaining_content_len - ) - finally: - # Handle closing the file without awaiting to prevent cancellation issues - # when the StreamReader reaches EOF - self._schedule_file_close(loop) + # Read next chunk + chunk = await loop.run_in_executor(None, self._read, remaining_content_len) def _should_stop_writing( self, @@ -554,20 +643,67 @@ def _should_stop_writing( remaining_content_len is not None and remaining_content_len <= 0 ) - def _schedule_file_close(self, loop: asyncio.AbstractEventLoop) -> None: - """Schedule file closing without awaiting to prevent cancellation issues.""" + def _close(self) -> None: + """ + Async safe synchronous close operations for backwards compatibility. + + This method exists only for backwards + compatibility. Use the async close() method instead. + + WARNING: This method MUST be called from within an event loop. + Calling it outside an event loop will raise RuntimeError. + """ + # Skip if already consumed + if self._consumed: + return + self._consumed = True # Mark as consumed to prevent further writes + # Schedule file closing without awaiting to prevent cancellation issues + loop = asyncio.get_running_loop() close_future = loop.run_in_executor(None, self._value.close) # Hold a strong reference to the future to prevent it from being # garbage collected before it completes. _CLOSE_FUTURES.add(close_future) close_future.add_done_callback(_CLOSE_FUTURES.remove) + async def close(self) -> None: + """ + Close the payload if it holds any resources. + + IMPORTANT: This method must not await anything that might not finish + immediately, as it may be called during cleanup/cancellation. Schedule + any long-running operations without awaiting them. + """ + self._close() + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - return "".join(r.decode(encoding, errors) for r in self._value.readlines()) + """ + Return string representation of the value. + + WARNING: This method does blocking I/O and should not be called in the event loop. + """ + return self._read_all().decode(encoding, errors) + + def _read_all(self) -> bytes: + """Read the entire file-like object and return its content as bytes.""" + self._set_or_restore_start_position() + # Use readlines() to ensure we get all content + return b"".join(self._value.readlines()) + + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """ + Return bytes representation of the value. + + This method reads the entire file content and returns it as bytes. + It is equivalent to reading the file-like object directly. + The file reading is performed in an executor to avoid blocking the event loop. + """ + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, self._read_all) class TextIOPayload(IOBasePayload): _value: io.TextIOBase + # _autoclose = False (inherited) - Has text file handle that needs explicit closing def __init__( self, @@ -621,6 +757,7 @@ def _read_and_available_len( to the stream. If no encoding is specified, UTF-8 is used as the default. """ + self._set_or_restore_start_position() size = self.size chunk = self._value.read( min(size or READ_SIZE, remaining_content_len or READ_SIZE) @@ -649,20 +786,56 @@ def _read(self, remaining_content_len: Optional[int]) -> bytes: return chunk.encode(self._encoding) if self._encoding else chunk.encode() def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """ + Return string representation of the value. + + WARNING: This method does blocking I/O and should not be called in the event loop. + """ + self._set_or_restore_start_position() return self._value.read() + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """ + Return bytes representation of the value. + + This method reads the entire text file content and returns it as bytes. + It encodes the text content using the specified encoding. + The file reading is performed in an executor to avoid blocking the event loop. + """ + loop = asyncio.get_running_loop() + + # Use instance encoding if available, otherwise use parameter + actual_encoding = self._encoding or encoding + + def _read_and_encode() -> bytes: + self._set_or_restore_start_position() + # TextIO read() always returns the full content + return self._value.read().encode(actual_encoding, errors) + + return await loop.run_in_executor(None, _read_and_encode) + class BytesIOPayload(IOBasePayload): _value: io.BytesIO + _size: int # Always initialized in __init__ + _autoclose = True # BytesIO is in-memory, safe to auto-close + + def __init__(self, value: io.BytesIO, *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + # Calculate size once during initialization + self._size = len(self._value.getbuffer()) - self._value.tell() @property def size(self) -> int: - position = self._value.tell() - end = self._value.seek(0, os.SEEK_END) - self._value.seek(position) - return end - position + """Size of the payload in bytes. + + Returns the number of bytes in the BytesIO buffer that will be transmitted. + This is calculated once during initialization for efficiency. + """ + return self._size def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + self._set_or_restore_start_position() return self._value.read().decode(encoding, errors) async def write(self, writer: AbstractStreamWriter) -> None: @@ -690,32 +863,49 @@ async def write_with_length( responsiveness when processing large in-memory buffers. """ + self._set_or_restore_start_position() loop_count = 0 remaining_bytes = content_length - try: - while chunk := self._value.read(READ_SIZE): - if loop_count > 0: - # Avoid blocking the event loop - # if they pass a large BytesIO object - # and we are not in the first iteration - # of the loop - await asyncio.sleep(0) - if remaining_bytes is None: - await writer.write(chunk) - else: - await writer.write(chunk[:remaining_bytes]) - remaining_bytes -= len(chunk) - if remaining_bytes <= 0: - return - loop_count += 1 - finally: - self._value.close() + while chunk := self._value.read(READ_SIZE): + if loop_count > 0: + # Avoid blocking the event loop + # if they pass a large BytesIO object + # and we are not in the first iteration + # of the loop + await asyncio.sleep(0) + if remaining_bytes is None: + await writer.write(chunk) + else: + await writer.write(chunk[:remaining_bytes]) + remaining_bytes -= len(chunk) + if remaining_bytes <= 0: + return + loop_count += 1 + + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """ + Return bytes representation of the value. + + This method reads the entire BytesIO content and returns it as bytes. + It is equivalent to accessing the _value attribute directly. + """ + self._set_or_restore_start_position() + return self._value.read() + + async def close(self) -> None: + """ + Close the BytesIO payload. + + This does nothing since BytesIO is in-memory and does not require explicit closing. + """ class BufferedReaderPayload(IOBasePayload): _value: io.BufferedIOBase + # _autoclose = False (inherited) - Has buffered file handle that needs explicit closing def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + self._set_or_restore_start_position() return self._value.read().decode(encoding, errors) @@ -755,6 +945,9 @@ class AsyncIterablePayload(Payload): _iter: Optional[_AsyncIterator] = None _value: _AsyncIterable + _cached_chunks: Optional[List[bytes]] = None + # _consumed stays False to allow reuse with cached content + _autoclose = True # Iterator doesn't need explicit closing def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: if not isinstance(value, AsyncIterable): @@ -800,17 +993,30 @@ async def write_with_length( This implementation handles streaming of async iterable content with length constraints: - 1. Iterates through the async iterable one chunk at a time - 2. Respects content_length constraints when specified - 3. Handles the case when the iterable might be used twice - - Since async iterables are consumed as they're iterated, there is no way to - restart the iteration if it's already in progress or completed. + 1. If cached chunks are available, writes from them + 2. Otherwise iterates through the async iterable one chunk at a time + 3. Respects content_length constraints when specified + 4. Does NOT generate cache - that's done by as_bytes() """ + # If we have cached chunks, use them + if self._cached_chunks is not None: + remaining_bytes = content_length + for chunk in self._cached_chunks: + if remaining_bytes is None: + await writer.write(chunk) + elif remaining_bytes > 0: + await writer.write(chunk[:remaining_bytes]) + remaining_bytes -= len(chunk) + else: + break + return + + # If iterator is exhausted and we don't have cached chunks, nothing to write if self._iter is None: return + # Stream from the iterator remaining_bytes = content_length try: @@ -832,9 +1038,40 @@ async def write_with_length( except StopAsyncIteration: # Iterator is exhausted self._iter = None + self._consumed = True # Mark as consumed when streamed without caching def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - raise TypeError("Unable to decode.") + """Decode the payload content as a string if cached chunks are available.""" + if self._cached_chunks is not None: + return b"".join(self._cached_chunks).decode(encoding, errors) + raise TypeError("Unable to decode - content not cached. Call as_bytes() first.") + + async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """ + Return bytes representation of the value. + + This method reads the entire async iterable content and returns it as bytes. + It generates and caches the chunks for future reuse. + """ + # If we have cached chunks, return them joined + if self._cached_chunks is not None: + return b"".join(self._cached_chunks) + + # If iterator is exhausted and no cache, return empty + if self._iter is None: + return b"" + + # Read all chunks and cache them + chunks: List[bytes] = [] + async for chunk in self._iter: + chunks.append(chunk) + + # Iterator is exhausted, cache the chunks + self._iter = None + self._cached_chunks = chunks + # Keep _consumed as False to allow reuse with cached chunks + + return b"".join(chunks) class StreamReaderPayload(AsyncIterablePayload): @@ -852,5 +1089,5 @@ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) # try_last for giving a chance to more specialized async interables like -# multidict.BodyPartReaderPayload override the default +# multipart.BodyPartReaderPayload override the default PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 84ad18e8b4f..cdc90cc4f1c 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -724,6 +724,9 @@ def body(self, body: Any) -> None: def text(self) -> Optional[str]: if self._body is None: return None + # Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O + # This is generally safe as most common payloads (BytesPayload, StringPayload) + # don't do blocking I/O, but be careful with file-based payloads return self._body.decode(self.charset or "utf-8") @text.setter @@ -777,6 +780,7 @@ async def write_eof(self, data: bytes = b"") -> None: await super().write_eof() elif isinstance(self._body, Payload): await self._body.write(self._payload_writer) + await self._body.close() await super().write_eof() else: await super().write_eof(cast(bytes, body)) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 606df6acc0a..d3c2226aee0 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1889,6 +1889,26 @@ ClientRequest - A :class:`Payload` object for raw data (default is empty bytes ``b""``) - A :class:`FormData` object for form submissions + .. danger:: + + **DO NOT set this attribute directly!** Direct assignment will cause resource + leaks. Always use :meth:`update_body` instead: + + .. code-block:: python + + # WRONG - This will leak resources! + request.body = b"new data" + + # CORRECT - Use update_body + await request.update_body(b"new data") + + Setting body directly bypasses cleanup of the previous payload, which can + leave file handles open, streams unclosed, and buffers unreleased. + + Additionally, setting body directly must be done from within an event loop + and is not thread-safe. Setting body outside of an event loop may raise + RuntimeError when closing file-based payloads. + .. attribute:: chunked :type: bool | None @@ -1990,6 +2010,77 @@ ClientRequest The HTTP version to use for the request (e.g., ``HttpVersion(1, 1)`` for HTTP/1.1). + .. method:: update_body(body) + + Update the request body and close any existing payload to prevent resource leaks. + + **This is the ONLY correct way to modify a request body.** Never set the + :attr:`body` attribute directly. + + This method is particularly useful in middleware when you need to modify the + request body after the request has been created but before it's sent. + + :param body: The new body content. Can be: + + - ``bytes``/``bytearray``: Raw binary data + - ``str``: Text data (encoded using charset from Content-Type) + - :class:`FormData`: Form data encoded as multipart/form-data + - :class:`Payload`: A pre-configured payload object + - ``AsyncIterable[bytes]``: Async iterable of bytes chunks + - File-like object: Will be read and sent as binary data + - ``None``: Clears the body + + .. code-block:: python + + async def middleware(request, handler): + # Modify request body in middleware + if request.method == 'POST': + # CORRECT: Always use update_body + await request.update_body(b'{"modified": true}') + + # WRONG: Never set body directly! + # request.body = b'{"modified": true}' # This leaks resources! + + # Or add authentication data to form + if isinstance(request.body, FormData): + form = FormData() + # Copy existing fields and add auth token + form.add_field('auth_token', 'secret123') + await request.update_body(form) + + return await handler(request) + + .. note:: + + This method is async because it may need to close file handles or + other resources associated with the previous payload. Always await + this method to ensure proper cleanup. + + .. danger:: + + **Never set :attr:`ClientRequest.body` directly!** Direct assignment will cause resource + leaks. Always use this method instead. Setting the body attribute directly: + + - Bypasses cleanup of the previous payload + - Leaves file handles and streams open + - Can cause memory leaks + - May result in unexpected behavior with async iterables + + .. warning:: + + When updating the body, ensure that the Content-Type header is + appropriate for the new body content. The Content-Length header + will be updated automatically. When using :class:`FormData` or + :class:`Payload` objects, headers are updated automatically, + but you may need to set Content-Type manually for raw bytes or text. + + It is not recommended to change the payload type in middleware. If the + body was already set (e.g., as bytes), it's best to keep the same type + rather than converting it (e.g., to str) as this may result in unexpected + behavior. + + .. versionadded:: 3.12 + Utilities diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 29838c39a71..cb4edd3d1e1 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -30,7 +30,7 @@ from yarl import URL import aiohttp -from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web +from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, payload, web from aiohttp.abc import AbstractResolver, ResolveResult from aiohttp.client_exceptions import ( ClientResponseError, @@ -45,6 +45,14 @@ from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import Connection from aiohttp.http_writer import StreamWriter +from aiohttp.payload import ( + AsyncIterablePayload, + BufferedReaderPayload, + BytesIOPayload, + BytesPayload, + StringIOPayload, + StringPayload, +) from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer from aiohttp.test_utils import TestClient, TestServer, unused_port from aiohttp.typedefs import Handler @@ -600,6 +608,61 @@ async def handler(request: web.Request) -> web.Response: assert 200 == resp.status +async def test_post_bytes_data_content_length_from_body( + aiohttp_client: AiohttpClient, +) -> None: + """Test that Content-Length is set from body payload size when sending bytes.""" + data = b"test payload data" + + async def handler(request: web.Request) -> web.Response: + # Verify Content-Length header was set correctly + assert request.content_length == len(data) + assert request.headers.get("Content-Length") == str(len(data)) + + # Verify we can read the data + val = await request.read() + assert data == val + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + # Send bytes data - this should trigger the code path where + # Content-Length is set from body.size in update_transfer_encoding + async with client.post("/", data=data) as resp: + assert resp.status == 200 + + +async def test_post_custom_payload_without_content_length( + aiohttp_client: AiohttpClient, +) -> None: + """Test that Content-Length is set from payload.size when not explicitly provided.""" + data = b"custom payload data" + + async def handler(request: web.Request) -> web.Response: + # Verify Content-Length header was set from payload size + assert request.content_length == len(data) + assert request.headers.get("Content-Length") == str(len(data)) + + # Verify we can read the data + val = await request.read() + assert data == val + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + # Create a BytesPayload directly - this ensures we test the path + # where update_transfer_encoding sets Content-Length from body.size + bytes_payload = payload.BytesPayload(data) + + # Don't set Content-Length header explicitly + async with client.post("/", data=bytes_payload) as resp: + assert resp.status == 200 + + async def test_ssl_client( aiohttp_server, ssl_ctx, @@ -2111,6 +2174,51 @@ async def expect_handler(request): assert expect_called +async def test_expect100_with_no_body(aiohttp_client: AiohttpClient) -> None: + """Test expect100 with GET request that has no body.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response(text="OK") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + # GET request with expect100=True but no body + async with client.get("/", expect100=True) as resp: + assert resp.status == 200 + assert await resp.text() == "OK" + + +async def test_expect100_continue_with_none_payload( + aiohttp_client: AiohttpClient, +) -> None: + """Test expect100 continue handling when payload is None from the start.""" + expect_received = False + + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"OK") + + async def expect_handler(request: web.Request) -> None: + nonlocal expect_received + expect_received = True + # Send 100 Continue + assert request.transport is not None + request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n") + + app = web.Application() + app.router.add_post("/", handler, expect_handler=expect_handler) + client = await aiohttp_client(app) + + # POST request with expect100=True but no body (data=None) + async with client.post("/", expect100=True, data=None) as resp: + assert resp.status == 200 + assert await resp.read() == b"OK" + + # Expect handler should still be called even with no body + assert expect_received + + @pytest.mark.usefixtures("parametrize_zlib_backend") async def test_encoding_deflate(aiohttp_client) -> None: async def handler(request): @@ -4502,3 +4610,439 @@ async def handler(request: web.Request) -> web.Response: data = await resp.read() assert data == b"" resp.close() + + +async def test_bytes_payload_redirect(aiohttp_client: AiohttpClient) -> None: + """Test that BytesPayload can be reused across redirects.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("final", data)) + return web.Response(text=f"Received: {data.decode()}") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + payload_data = b"test payload data" + payload = BytesPayload(payload_data) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + assert text == "Received: test payload data" + # Both endpoints should have received the data + assert data_received == [("redirect", payload_data), ("final", payload_data)] + + +async def test_string_payload_redirect(aiohttp_client: AiohttpClient) -> None: + """Test that StringPayload can be reused across redirects.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.text() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.text() + data_received.append(("final", data)) + return web.Response(text=f"Received: {data}") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + payload_data = "test string payload" + payload = StringPayload(payload_data) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + assert text == "Received: test string payload" + # Both endpoints should have received the data + assert data_received == [("redirect", payload_data), ("final", payload_data)] + + +async def test_async_iterable_payload_redirect(aiohttp_client: AiohttpClient) -> None: + """Test that AsyncIterablePayload cannot be reused across redirects.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("final", data)) + return web.Response(text=f"Received: {data.decode()}") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + chunks = [b"chunk1", b"chunk2", b"chunk3"] + + async def async_gen() -> AsyncIterator[bytes]: + for chunk in chunks: + yield chunk + + payload = AsyncIterablePayload(async_gen()) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + # AsyncIterablePayload is consumed after first use, so redirect gets empty body + assert text == "Received: " + + # Only the first endpoint should have received data + expected_data = b"".join(chunks) + assert len(data_received) == 2 + assert data_received[0] == ("redirect", expected_data) + assert data_received[1] == ("final", b"") # Empty after being consumed + + +async def test_buffered_reader_payload_redirect(aiohttp_client: AiohttpClient) -> None: + """Test that BufferedReaderPayload can be reused across redirects.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("final", data)) + return web.Response(text=f"Received: {data.decode()}") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + payload_data = b"buffered reader payload" + buffer = io.BufferedReader(io.BytesIO(payload_data)) # type: ignore[arg-type] + payload = BufferedReaderPayload(buffer) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + assert text == "Received: buffered reader payload" + # Both endpoints should have received the data + assert data_received == [("redirect", payload_data), ("final", payload_data)] + + +async def test_string_io_payload_redirect(aiohttp_client: AiohttpClient) -> None: + """Test that StringIOPayload can be reused across redirects.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.text() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.text() + data_received.append(("final", data)) + return web.Response(text=f"Received: {data}") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + payload_data = "string io payload" + string_io = io.StringIO(payload_data) + payload = StringIOPayload(string_io) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + assert text == "Received: string io payload" + # Both endpoints should have received the data + assert data_received == [("redirect", payload_data), ("final", payload_data)] + + +async def test_bytes_io_payload_redirect(aiohttp_client: AiohttpClient) -> None: + """Test that BytesIOPayload can be reused across redirects.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("final", data)) + return web.Response(text=f"Received: {data.decode()}") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + payload_data = b"bytes io payload" + bytes_io = io.BytesIO(payload_data) + payload = BytesIOPayload(bytes_io) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + assert text == "Received: bytes io payload" + # Both endpoints should have received the data + assert data_received == [("redirect", payload_data), ("final", payload_data)] + + +async def test_multiple_redirects_with_bytes_payload( + aiohttp_client: AiohttpClient, +) -> None: + """Test BytesPayload with multiple redirects.""" + data_received = [] + + async def redirect1_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect1", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/redirect2") + + async def redirect2_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect2", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("final", data)) + return web.Response(text=f"Received after 2 redirects: {data.decode()}") + + app = web.Application() + app.router.add_post("/redirect", redirect1_handler) + app.router.add_post("/redirect2", redirect2_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + payload_data = b"multi-redirect-test" + payload = BytesPayload(payload_data) + + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + text = await resp.text() + assert text == f"Received after 2 redirects: {payload_data.decode()}" + # All 3 endpoints should have received the same data + assert data_received == [ + ("redirect1", payload_data), + ("redirect2", payload_data), + ("final", payload_data), + ] + + +async def test_redirect_with_empty_payload(aiohttp_client: AiohttpClient) -> None: + """Test redirects with empty payloads.""" + data_received = [] + + async def redirect_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("redirect", data)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + data = await request.read() + data_received.append(("final", data)) + return web.Response(text="Done") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + # Test with empty BytesPayload + payload = BytesPayload(b"") + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + assert data_received == [("redirect", b""), ("final", b"")] + + +async def test_redirect_preserves_content_type(aiohttp_client: AiohttpClient) -> None: + """Test that content-type is preserved across redirects.""" + content_types = [] + + async def redirect_handler(request: web.Request) -> web.Response: + content_types.append(("redirect", request.content_type)) + # Use 307 to preserve POST method + raise web.HTTPTemporaryRedirect("/final_destination") + + async def final_handler(request: web.Request) -> web.Response: + content_types.append(("final", request.content_type)) + return web.Response(text="Done") + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + app.router.add_post("/final_destination", final_handler) + + client = await aiohttp_client(app) + + # StringPayload should set content-type with charset + payload = StringPayload("test data") + resp = await client.post("/redirect", data=payload) + assert resp.status == 200 + # Both requests should have the same content type + assert len(content_types) == 2 + assert content_types[0][1] == "text/plain" + assert content_types[1][1] == "text/plain" + + +class MockedBytesPayload(BytesPayload): + """A BytesPayload that tracks whether close() was called.""" + + def __init__(self, data: bytes) -> None: + super().__init__(data) + self.close_called = False + + async def close(self) -> None: + self.close_called = True + await super().close() + + +async def test_too_many_redirects_closes_payload(aiohttp_client: AiohttpClient) -> None: + """Test that TooManyRedirects exception closes the request payload.""" + + async def redirect_handler(request: web.Request) -> web.Response: + # Read the payload to simulate server processing + await request.read() + count = int(request.match_info.get("count", 0)) + # Use 307 to preserve POST method + return web.Response( + status=307, headers={hdrs.LOCATION: f"/redirect/{count + 1}"} + ) + + app = web.Application() + app.router.add_post(r"/redirect/{count:\d+}", redirect_handler) + + client = await aiohttp_client(app) + + # Create a mocked payload to verify close() is called + payload = MockedBytesPayload(b"test payload") + + with pytest.raises(TooManyRedirects): + await client.post("/redirect/0", data=payload, max_redirects=2) + + assert ( + payload.close_called + ), "Payload.close() was not called when TooManyRedirects was raised" + + +async def test_invalid_url_redirect_closes_payload( + aiohttp_client: AiohttpClient, +) -> None: + """Test that InvalidUrlRedirectClientError exception closes the request payload.""" + + async def redirect_handler(request: web.Request) -> web.Response: + # Read the payload to simulate server processing + await request.read() + # Return an invalid URL that will cause ValueError in URL parsing + # Using a URL with invalid port that's out of range + return web.Response( + status=307, headers={hdrs.LOCATION: "http://example.com:999999/path"} + ) + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + + client = await aiohttp_client(app) + + # Create a mocked payload to verify close() is called + payload = MockedBytesPayload(b"test payload") + + with pytest.raises( + InvalidUrlRedirectClientError, + match="Server attempted redirecting to a location that does not look like a URL", + ): + await client.post("/redirect", data=payload) + + assert ( + payload.close_called + ), "Payload.close() was not called when InvalidUrlRedirectClientError was raised" + + +async def test_non_http_redirect_closes_payload(aiohttp_client: AiohttpClient) -> None: + """Test that NonHttpUrlRedirectClientError exception closes the request payload.""" + + async def redirect_handler(request: web.Request) -> web.Response: + # Read the payload to simulate server processing + await request.read() + # Return a non-HTTP scheme URL + return web.Response( + status=307, headers={hdrs.LOCATION: "ftp://example.com/file"} + ) + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + + client = await aiohttp_client(app) + + # Create a mocked payload to verify close() is called + payload = MockedBytesPayload(b"test payload") + + with pytest.raises(NonHttpUrlRedirectClientError): + await client.post("/redirect", data=payload) + + assert ( + payload.close_called + ), "Payload.close() was not called when NonHttpUrlRedirectClientError was raised" + + +async def test_invalid_redirect_origin_closes_payload( + aiohttp_client: AiohttpClient, +) -> None: + """Test that InvalidUrlRedirectClientError exception (invalid origin) closes the request payload.""" + + async def redirect_handler(request: web.Request) -> web.Response: + # Read the payload to simulate server processing + await request.read() + # Return a URL that will fail origin() check - using a relative URL without host + return web.Response(status=307, headers={hdrs.LOCATION: "http:///path"}) + + app = web.Application() + app.router.add_post("/redirect", redirect_handler) + + client = await aiohttp_client(app) + + # Create a mocked payload to verify close() is called + payload = MockedBytesPayload(b"test payload") + + with pytest.raises( + InvalidUrlRedirectClientError, match="Invalid redirect URL origin" + ): + await client.post("/redirect", data=payload) + + assert ( + payload.close_called + ), "Payload.close() was not called when InvalidUrlRedirectClientError (invalid origin) was raised" diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index e698e8ee825..217877759c0 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -1161,3 +1161,111 @@ async def __call__( assert received_bodies[1] == json_str2 assert received_bodies[2] == "" # GET request has no body assert received_bodies[3] == text_data + + +async def test_client_middleware_update_shorter_body( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can update request body using update_body method.""" + + async def handler(request: web.Request) -> web.Response: + body = await request.text() + return web.Response(text=body) + + app = web.Application() + app.router.add_post("/", handler) + server = await aiohttp_server(app) + + async def update_body_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Update the request body + await request.update_body(b"short body") + return await handler(request) + + async with ClientSession(middlewares=(update_body_middleware,)) as session: + async with session.post(server.make_url("/"), data=b"original body") as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "short body" + + +async def test_client_middleware_update_longer_body( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can update request body using update_body method.""" + + async def handler(request: web.Request) -> web.Response: + body = await request.text() + return web.Response(text=body) + + app = web.Application() + app.router.add_post("/", handler) + server = await aiohttp_server(app) + + async def update_body_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Update the request body + await request.update_body(b"much much longer body") + return await handler(request) + + async with ClientSession(middlewares=(update_body_middleware,)) as session: + async with session.post(server.make_url("/"), data=b"original body") as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "much much longer body" + + +async def test_client_middleware_update_string_body( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can update request body using update_body method.""" + + async def handler(request: web.Request) -> web.Response: + body = await request.text() + return web.Response(text=body) + + app = web.Application() + app.router.add_post("/", handler) + server = await aiohttp_server(app) + + async def update_body_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Update the request body + await request.update_body("this is a string") + return await handler(request) + + async with ClientSession(middlewares=(update_body_middleware,)) as session: + async with session.post(server.make_url("/"), data="original string") as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "this is a string" + + +async def test_client_middleware_switch_types( + aiohttp_server: AiohttpServer, +) -> None: + """Test that middleware can update request body using update_body method.""" + + async def handler(request: web.Request) -> web.Response: + body = await request.text() + return web.Response(text=body) + + app = web.Application() + app.router.add_post("/", handler) + server = await aiohttp_server(app) + + async def update_body_middleware( + request: ClientRequest, handler: ClientHandlerType + ) -> ClientResponse: + # Update the request body + await request.update_body("now a string") + return await handler(request) + + async with ClientSession(middlewares=(update_body_middleware,)) as session: + async with session.post(server.make_url("/"), data=b"original bytes") as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "now a string" diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index 26118288913..6da6850bafc 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -1,5 +1,6 @@ """Test digest authentication middleware for aiohttp client.""" +import io from hashlib import md5, sha1 from typing import Generator, Union from unittest import mock @@ -18,6 +19,7 @@ unescape_quotes, ) from aiohttp.client_reqrep import ClientResponse +from aiohttp.payload import BytesIOPayload from aiohttp.pytest_plugin import AiohttpServer from aiohttp.web import Application, Request, Response @@ -154,7 +156,7 @@ async def test_authenticate_scenarios( ), ], ) -def test_encode_validation_errors( +async def test_encode_validation_errors( digest_auth_mw: DigestAuthMiddleware, challenge: DigestAuthChallenge, expected_error: str, @@ -162,12 +164,14 @@ def test_encode_validation_errors( """Test validation errors when encoding digest auth headers.""" digest_auth_mw._challenge = challenge with pytest.raises(ClientError, match=expected_error): - digest_auth_mw._encode("GET", URL("http://example.com/resource"), "") + await digest_auth_mw._encode("GET", URL("http://example.com/resource"), b"") -def test_encode_digest_with_md5(auth_mw_with_challenge: DigestAuthMiddleware) -> None: - header = auth_mw_with_challenge._encode( - "GET", URL("http://example.com/resource"), "" +async def test_encode_digest_with_md5( + auth_mw_with_challenge: DigestAuthMiddleware, +) -> None: + header = await auth_mw_with_challenge._encode( + "GET", URL("http://example.com/resource"), b"" ) assert header.startswith("Digest ") assert 'username="user"' in header @@ -177,7 +181,7 @@ def test_encode_digest_with_md5(auth_mw_with_challenge: DigestAuthMiddleware) -> @pytest.mark.parametrize( "algorithm", ["MD5-SESS", "SHA-SESS", "SHA-256-SESS", "SHA-512-SESS"] ) -def test_encode_digest_with_sess_algorithms( +async def test_encode_digest_with_sess_algorithms( digest_auth_mw: DigestAuthMiddleware, qop_challenge: DigestAuthChallenge, algorithm: str, @@ -188,11 +192,13 @@ def test_encode_digest_with_sess_algorithms( challenge["algorithm"] = algorithm digest_auth_mw._challenge = challenge - header = digest_auth_mw._encode("GET", URL("http://example.com/resource"), "") + header = await digest_auth_mw._encode( + "GET", URL("http://example.com/resource"), b"" + ) assert f"algorithm={algorithm}" in header -def test_encode_unsupported_algorithm( +async def test_encode_unsupported_algorithm( digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge ) -> None: """Test that unsupported algorithm raises ClientError.""" @@ -202,10 +208,10 @@ def test_encode_unsupported_algorithm( digest_auth_mw._challenge = challenge with pytest.raises(ClientError, match="Unsupported hash algorithm"): - digest_auth_mw._encode("GET", URL("http://example.com/resource"), "") + await digest_auth_mw._encode("GET", URL("http://example.com/resource"), b"") -def test_invalid_qop_rejected( +async def test_invalid_qop_rejected( digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge ) -> None: """Test that invalid Quality of Protection values are rejected.""" @@ -217,7 +223,7 @@ def test_invalid_qop_rejected( # This should raise an error about unsupported QoP with pytest.raises(ClientError, match="Unsupported Quality of Protection"): - digest_auth_mw._encode("GET", URL("http://example.com"), "") + await digest_auth_mw._encode("GET", URL("http://example.com"), b"") def compute_expected_digest( @@ -264,14 +270,17 @@ def KD(secret: str, data: str) -> str: @pytest.mark.parametrize( ("body", "body_str"), [ - ("this is a body", "this is a body"), # String case (b"this is a body", "this is a body"), # Bytes case + ( + BytesIOPayload(io.BytesIO(b"this is a body")), + "this is a body", + ), # BytesIOPayload case ], ) -def test_digest_response_exact_match( +async def test_digest_response_exact_match( qop: str, algorithm: str, - body: Union[str, bytes], + body: Union[bytes, BytesIOPayload], body_str: str, mock_sha1_digest: mock.MagicMock, ) -> None: @@ -295,7 +304,7 @@ def test_digest_response_exact_match( auth._last_nonce_bytes = nonce.encode("utf-8") auth._nonce_count = nc - header = auth._encode(method, URL(f"http://host{uri}"), body) + header = await auth._encode(method, URL(f"http://host{uri}"), body) # Get expected digest expected = compute_expected_digest( @@ -402,7 +411,7 @@ def test_middleware_invalid_login() -> None: DigestAuthMiddleware("user:name", "pass") -def test_escaping_quotes_in_auth_header() -> None: +async def test_escaping_quotes_in_auth_header() -> None: """Test that double quotes are properly escaped in auth header.""" auth = DigestAuthMiddleware('user"with"quotes', "pass") auth._challenge = DigestAuthChallenge( @@ -413,7 +422,7 @@ def test_escaping_quotes_in_auth_header() -> None: opaque='opaque"with"quotes', ) - header = auth._encode("GET", URL("http://example.com/path"), "") + header = await auth._encode("GET", URL("http://example.com/path"), b"") # Check that quotes are escaped in the header assert 'username="user\\"with\\"quotes"' in header @@ -422,13 +431,15 @@ def test_escaping_quotes_in_auth_header() -> None: assert 'opaque="opaque\\"with\\"quotes"' in header -def test_template_based_header_construction( +async def test_template_based_header_construction( auth_mw_with_challenge: DigestAuthMiddleware, mock_sha1_digest: mock.MagicMock, mock_md5_digest: mock.MagicMock, ) -> None: """Test that the template-based header construction works correctly.""" - header = auth_mw_with_challenge._encode("GET", URL("http://example.com/test"), "") + header = await auth_mw_with_challenge._encode( + "GET", URL("http://example.com/test"), b"" + ) # Split the header into scheme and parameters scheme, params_str = header.split(" ", 1) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 70b30dd14f2..b1807b96d82 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -4,9 +4,10 @@ import pathlib import sys import urllib.parse +import warnings from collections.abc import Callable, Iterable from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import Any, Optional, Protocol, Union +from typing import Any, AsyncIterator, Optional, Protocol, Union from unittest import mock import pytest @@ -750,7 +751,7 @@ async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> No ) resp = await req.send(conn) assert "/" == req.url.path - assert b"life=42" == req.body._value + assert b"life=42" == req.body._value # type: ignore[union-attr] assert "application/x-www-form-urlencoded" == req.headers["CONTENT-TYPE"] await req.close() resp.close() @@ -787,7 +788,7 @@ async def test_get_with_data(loop) -> None: meth, URL("http://python.org/"), data={"life": "42"}, loop=loop ) assert "/" == req.url.path - assert b"life=42" == req.body._value + assert b"life=42" == req.body._value # type: ignore[union-attr] await req.close() @@ -893,6 +894,7 @@ async def test_chunked_explicit(loop, conn) -> None: req = ClientRequest("post", URL("http://python.org/"), chunked=True, loop=loop) with mock.patch("aiohttp.client_reqrep.StreamWriter") as m_writer: m_writer.return_value.write_headers = mock.AsyncMock() + m_writer.return_value.write_eof = mock.AsyncMock() resp = await req.send(conn) assert "chunked" == req.headers["TRANSFER-ENCODING"] @@ -949,7 +951,65 @@ async def test_precompressed_data_stays_intact(loop) -> None: await req.close() -async def test_file_upload_not_chunked_seek(loop) -> None: +async def test_body_with_size_sets_content_length( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that when body has a size and no Content-Length header is set, it gets added.""" + # Create a BytesPayload which has a size property + data = b"test data" + + # Create request with data that will create a BytesPayload + req = ClientRequest( + "post", + URL("http://python.org/"), + data=data, + loop=loop, + ) + + # Verify Content-Length was set from body.size + assert req.headers["CONTENT-LENGTH"] == str(len(data)) + assert req.body is not None + assert req._body is not None # When _body is set, body returns it + assert req._body.size == len(data) + await req.close() + + +async def test_body_payload_with_size_no_content_length( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that when a body payload with size is set directly, Content-Length is added.""" + # Create a payload with a known size + data = b"payload data" + bytes_payload = payload.BytesPayload(data) + + # Create request with no data initially + req = ClientRequest( + "post", + URL("http://python.org/"), + loop=loop, + ) + + # Set body directly (bypassing update_body_from_data to avoid it setting Content-Length) + req._body = bytes_payload + + # Ensure conditions for the code path we want to test + assert req._body is not None + assert hdrs.CONTENT_LENGTH not in req.headers + assert req._body.size is not None + assert not req.chunked + + # Now trigger update_transfer_encoding which should set Content-Length + req.update_transfer_encoding() + + # Verify Content-Length was set from body.size + assert req.headers["CONTENT-LENGTH"] == str(len(data)) + assert req.body is bytes_payload + assert req._body is bytes_payload # Access _body which is the Payload + assert req._body.size == len(data) + await req.close() + + +async def test_file_upload_not_chunked_seek(loop: asyncio.AbstractEventLoop) -> None: file_path = pathlib.Path(__file__).parent / "aiohttp.png" with file_path.open("rb") as f: f.seek(100) @@ -1201,6 +1261,7 @@ def read(self, decode=False): async def test_oserror_on_write_bytes(loop, conn) -> None: req = ClientRequest("POST", URL("http://python.org/"), loop=loop) + req.body = b"test data" writer = WriterMock() writer.write.side_effect = OSError @@ -1584,7 +1645,17 @@ async def test_write_bytes_with_iterable_content_length_limit( """Test that write_bytes respects content_length limit for iterable data.""" # Test with iterable data req = ClientRequest("post", URL("http://python.org/"), loop=loop) - req.body = data + + # Convert list to async generator if needed + if isinstance(data, list): + + async def gen() -> AsyncIterator[bytes]: + for chunk in data: + yield chunk + + req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 + else: + req.body = data writer = StreamWriter(protocol=conn.protocol, loop=loop) # Use content_length=7 to truncate at the middle of Part2 @@ -1599,7 +1670,13 @@ async def test_write_bytes_empty_iterable_with_content_length( ) -> None: """Test that write_bytes handles empty iterable body with content_length.""" req = ClientRequest("post", URL("http://python.org/"), loop=loop) - req.body = [] # Empty iterable + + # Create an empty async generator + async def gen() -> AsyncIterator[bytes]: + return + yield # pragma: no cover # This makes it a generator but never executes + + req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = StreamWriter(protocol=conn.protocol, loop=loop) # Use content_length=10 with empty body @@ -1608,3 +1685,392 @@ async def test_write_bytes_empty_iterable_with_content_length( # Verify nothing was written assert len(buf) == 0 await req.close() + + +async def test_warn_if_unclosed_payload_via_body_setter( + make_request: _RequestMaker, +) -> None: + """Test that _warn_if_unclosed_payload is called when setting body with unclosed payload.""" + req = make_request("POST", "http://python.org/") + + # First set a payload that needs manual closing (autoclose=False) + file_payload = payload.BufferedReaderPayload( + io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type] + encoding="utf-8", + ) + req.body = file_payload + + # Setting body again should trigger the warning for the previous payload + with pytest.warns( + ResourceWarning, + match="The previous request body contains unclosed resources", + ): + req.body = b"new data" + + await req.close() + + +async def test_no_warn_for_autoclose_payload_via_body_setter( + make_request: _RequestMaker, +) -> None: + """Test that no warning is issued for payloads with autoclose=True.""" + req = make_request("POST", "http://python.org/") + + # First set BytesIOPayload which has autoclose=True + bytes_payload = payload.BytesIOPayload(io.BytesIO(b"test data")) + req.body = bytes_payload + + # Setting body again should not trigger warning since previous payload has autoclose=True + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + req.body = b"new data" + + # Filter out any non-ResourceWarning warnings + resource_warnings = [ + w for w in warning_list if issubclass(w.category, ResourceWarning) + ] + assert len(resource_warnings) == 0 + + await req.close() + + +async def test_no_warn_for_consumed_payload_via_body_setter( + make_request: _RequestMaker, +) -> None: + """Test that no warning is issued for already consumed payloads.""" + req = make_request("POST", "http://python.org/") + + # Create a payload that needs manual closing + file_payload = payload.BufferedReaderPayload( + io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type] + encoding="utf-8", + ) + req.body = file_payload + + # Properly close the payload to mark it as consumed + await file_payload.close() + + # Setting body again should not trigger warning since previous payload is consumed + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + req.body = b"new data" + + # Filter out any non-ResourceWarning warnings + resource_warnings = [ + w for w in warning_list if issubclass(w.category, ResourceWarning) + ] + assert len(resource_warnings) == 0 + + await req.close() + + +async def test_warn_if_unclosed_payload_via_update_body_from_data( + make_request: _RequestMaker, +) -> None: + """Test that _warn_if_unclosed_payload is called via update_body_from_data.""" + req = make_request("POST", "http://python.org/") + + # First set a payload that needs manual closing + file_payload = payload.BufferedReaderPayload( + io.BufferedReader(io.BytesIO(b"initial data")), # type: ignore[arg-type] + encoding="utf-8", + ) + req.update_body_from_data(file_payload) + + # Create FormData for second update + form = aiohttp.FormData() + form.add_field("test", "value") + + # update_body_from_data should trigger the warning for the previous payload + with pytest.warns( + ResourceWarning, + match="The previous request body contains unclosed resources", + ): + req.update_body_from_data(form) + + await req.close() + + +async def test_warn_via_update_with_file_payload( + make_request: _RequestMaker, +) -> None: + """Test warning via update_body_from_data with file-like object.""" + req = make_request("POST", "http://python.org/") + + # First create a file-like object that results in BufferedReaderPayload + buffered1 = io.BufferedReader(io.BytesIO(b"file content 1")) # type: ignore[arg-type] + req.update_body_from_data(buffered1) + + # Second update should warn about the first payload + buffered2 = io.BufferedReader(io.BytesIO(b"file content 2")) # type: ignore[arg-type] + + with pytest.warns( + ResourceWarning, + match="The previous request body contains unclosed resources", + ): + req.update_body_from_data(buffered2) + + await req.close() + + +async def test_no_warn_for_simple_data_via_update_body_from_data( + make_request: _RequestMaker, +) -> None: + """Test that no warning is issued for simple data types.""" + req = make_request("POST", "http://python.org/") + + # Simple bytes data should not trigger warning + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + req.update_body_from_data(b"simple data") + + # Filter out any non-ResourceWarning warnings + resource_warnings = [ + w for w in warning_list if issubclass(w.category, ResourceWarning) + ] + assert len(resource_warnings) == 0 + + await req.close() + + +async def test_update_body_closes_previous_payload( + make_request: _RequestMaker, +) -> None: + """Test that update_body properly closes the previous payload.""" + req = make_request("POST", "http://python.org/") + + # Create a mock payload that tracks if it was closed + mock_payload = mock.Mock(spec=payload.Payload) + mock_payload.close = mock.AsyncMock() + + # Set initial payload + req._body = mock_payload + + # Update body with new data + await req.update_body(b"new body data") + + # Verify the previous payload was closed + mock_payload.close.assert_called_once() + + # Verify new body is set (it's a BytesPayload now) + assert isinstance(req.body, payload.BytesPayload) + + await req.close() + + +async def test_body_setter_closes_previous_payload( + make_request: _RequestMaker, +) -> None: + """Test that body setter properly closes the previous payload.""" + req = make_request("POST", "http://python.org/") + + # Create a mock payload that tracks if it was closed + # We need to use create_autospec to ensure all methods are available + mock_payload = mock.create_autospec(payload.Payload, instance=True) + + # Set initial payload + req._body = mock_payload + + # Update body with new data using setter + req.body = b"new body data" + + # Verify the previous payload was closed using _close + mock_payload._close.assert_called_once() + + # Verify new body is set (it's a BytesPayload now) + assert isinstance(req.body, payload.BytesPayload) + + await req.close() + + +async def test_update_body_with_different_types( + make_request: _RequestMaker, +) -> None: + """Test update_body with various data types.""" + req = make_request("POST", "http://python.org/") + + # Test with bytes + await req.update_body(b"bytes data") + assert isinstance(req.body, payload.BytesPayload) + + # Test with string + await req.update_body("string data") + assert isinstance(req.body, payload.BytesPayload) + + # Test with None (clears body) + await req.update_body(None) + assert req.body == b"" # type: ignore[comparison-overlap] # empty body is represented as b"" + + await req.close() + + +async def test_update_body_with_chunked_encoding( + make_request: _RequestMaker, +) -> None: + """Test that update_body properly handles chunked transfer encoding.""" + # Create request with chunked=True + req = make_request("POST", "http://python.org/", chunked=True) + + # Verify Transfer-Encoding header is set + assert req.headers["Transfer-Encoding"] == "chunked" + assert "Content-Length" not in req.headers + + # Update body - should maintain chunked encoding + await req.update_body(b"chunked data") + assert req.headers["Transfer-Encoding"] == "chunked" + assert "Content-Length" not in req.headers + assert isinstance(req.body, payload.BytesPayload) + + # Update with different body - chunked should remain + await req.update_body(b"different chunked data") + assert req.headers["Transfer-Encoding"] == "chunked" + assert "Content-Length" not in req.headers + + # Clear body - chunked header should remain + await req.update_body(None) + assert req.headers["Transfer-Encoding"] == "chunked" + assert "Content-Length" not in req.headers + + await req.close() + + +async def test_update_body_get_method_with_none_body( + make_request: _RequestMaker, +) -> None: + """Test that update_body with GET method and None body doesn't call update_transfer_encoding.""" + # Create GET request + req = make_request("GET", "http://python.org/") + + # GET requests shouldn't have Transfer-Encoding or Content-Length initially + assert "Transfer-Encoding" not in req.headers + assert "Content-Length" not in req.headers + + # Update body to None - should not trigger update_transfer_encoding + # This covers the branch where body is None AND method is in GET_METHODS + await req.update_body(None) + + # Headers should remain unchanged + assert "Transfer-Encoding" not in req.headers + assert "Content-Length" not in req.headers + + await req.close() + + +async def test_update_body_updates_content_length( + make_request: _RequestMaker, +) -> None: + """Test that update_body properly updates Content-Length header when body size changes.""" + req = make_request("POST", "http://python.org/") + + # Set initial body with known size + await req.update_body(b"initial data") + initial_content_length = req.headers.get("Content-Length") + assert initial_content_length == "12" # len(b"initial data") = 12 + + # Update body with different size + await req.update_body(b"much longer data than before") + new_content_length = req.headers.get("Content-Length") + assert new_content_length == "28" # len(b"much longer data than before") = 28 + + # Update body with shorter data + await req.update_body(b"short") + assert req.headers.get("Content-Length") == "5" # len(b"short") = 5 + + # Clear body + await req.update_body(None) + # For None body, Content-Length should not be set + assert "Content-Length" not in req.headers + + await req.close() + + +async def test_warn_stacklevel_points_to_user_code( + make_request: _RequestMaker, +) -> None: + """Test that the warning stacklevel correctly points to user code.""" + req = make_request("POST", "http://python.org/") + + # First set a payload that needs manual closing (autoclose=False) + file_payload = payload.BufferedReaderPayload( + io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type] + encoding="utf-8", + ) + req.body = file_payload + + # Capture warnings with their details + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always", ResourceWarning) + # This line should be reported as the warning source + req.body = b"new data" # LINE TO BE REPORTED + + # Find the ResourceWarning + resource_warnings = [ + w for w in warning_list if issubclass(w.category, ResourceWarning) + ] + assert len(resource_warnings) == 1 + + warning = resource_warnings[0] + # The warning should point to the line where we set req.body, not inside the library + # Call chain: user code -> body setter -> _warn_if_unclosed_payload + # stacklevel=3 is used in body setter to skip the setter and _warn_if_unclosed_payload + assert warning.filename == __file__ + # The line number should be the line with "req.body = b'new data'" + # We can't hardcode the line number, but we can verify it's not pointing + # to client_reqrep.py (the library code) + assert "client_reqrep.py" not in warning.filename + + await req.close() + + +async def test_warn_stacklevel_update_body_from_data( + make_request: _RequestMaker, +) -> None: + """Test that warning stacklevel is correct when called from update_body_from_data.""" + req = make_request("POST", "http://python.org/") + + # First set a payload that needs manual closing (autoclose=False) + file_payload = payload.BufferedReaderPayload( + io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type] + encoding="utf-8", + ) + req.update_body_from_data(file_payload) + + # Capture warnings with their details + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always", ResourceWarning) + # This line should be reported as the warning source + req.update_body_from_data(b"new data") # LINE TO BE REPORTED + + # Find the ResourceWarning + resource_warnings = [ + w for w in warning_list if issubclass(w.category, ResourceWarning) + ] + assert len(resource_warnings) == 1 + + warning = resource_warnings[0] + # For update_body_from_data, stacklevel=3 points to this test file + # Call chain: user code -> update_body_from_data -> _warn_if_unclosed_payload + assert warning.filename == __file__ + assert "client_reqrep.py" not in warning.filename + + await req.close() + + +async def test_expect100_with_body_becomes_none() -> None: + """Test that write_bytes handles body becoming None after expect100 handling.""" + # Create a mock writer and connection + mock_writer = mock.AsyncMock() + mock_conn = mock.Mock() + + # Create a request + req = ClientRequest( + "POST", URL("http://test.example.com/"), loop=asyncio.get_event_loop() + ) + req._body = mock.Mock() # Start with a body + + # Now set body to None to simulate a race condition + # where req._body is set to None after expect100 handling + req._body = None + + await req.write_bytes(mock_writer, mock_conn, None) + await req.close() diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 0656a9ed023..e31144abd0b 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -422,7 +422,9 @@ async def test_reraise_os_error(create_session) -> None: err = OSError(1, "permission error") req = mock.Mock() req_factory = mock.Mock(return_value=req) - req.send = mock.Mock(side_effect=err) + req.send = mock.AsyncMock(side_effect=err) + req._body = mock.Mock() + req._body.close = mock.AsyncMock() session = await create_session(request_class=req_factory) async def create_connection(req, traces, timeout): @@ -446,7 +448,9 @@ class UnexpectedException(BaseException): err = UnexpectedException("permission error") req = mock.Mock() req_factory = mock.Mock(return_value=req) - req.send = mock.Mock(side_effect=err) + req.send = mock.AsyncMock(side_effect=err) + req._body = mock.Mock() + req._body.close = mock.AsyncMock() session = await create_session(request_class=req_factory) connections = [] @@ -496,6 +500,7 @@ async def test_ws_connect_allowed_protocols( resp.start = mock.AsyncMock() req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req._body = None # No body for WebSocket upgrade requests req_factory = mock.Mock(return_value=req) req.send = mock.AsyncMock(return_value=resp) # BaseConnector allows all high level protocols by default @@ -553,6 +558,7 @@ async def test_ws_connect_unix_socket_allowed_protocols( resp.start = mock.AsyncMock() req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True) + req._body = None # No body for WebSocket upgrade requests req_factory = mock.Mock(return_value=req) req.send = mock.AsyncMock(return_value=resp) # UnixConnector allows all high level protocols by default and unix sockets diff --git a/tests/test_formdata.py b/tests/test_formdata.py index 4b7c94ac4cd..5fe8f92b097 100644 --- a/tests/test_formdata.py +++ b/tests/test_formdata.py @@ -4,6 +4,8 @@ import pytest from aiohttp import FormData, web +from aiohttp.http_writer import StreamWriter +from aiohttp.pytest_plugin import AiohttpClient @pytest.fixture @@ -105,8 +107,8 @@ async def test_formdata_field_name_is_not_quoted(buf, writer) -> None: assert b'name="email 1"' in buf -async def test_mark_formdata_as_processed(aiohttp_client) -> None: - async def handler(request): +async def test_formdata_is_reusable(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: return web.Response() app = web.Application() @@ -117,10 +119,170 @@ async def handler(request): data = FormData() data.add_field("test", "test_value", content_type="application/json") - resp = await client.post("/", data=data) - assert len(data._writer._parts) == 1 + # First request + resp1 = await client.post("/", data=data) + assert resp1.status == 200 + resp1.release() - with pytest.raises(RuntimeError): - await client.post("/", data=data) + # Second request - should work without RuntimeError + resp2 = await client.post("/", data=data) + assert resp2.status == 200 + resp2.release() - resp.release() + # Third request to ensure continued reusability + resp3 = await client.post("/", data=data) + assert resp3.status == 200 + resp3.release() + + +async def test_formdata_reusability_multipart( + writer: StreamWriter, buf: bytearray +) -> None: + form = FormData() + form.add_field("name", "value") + form.add_field("file", b"content", filename="test.txt", content_type="text/plain") + + # First call - should generate multipart payload + payload1 = form() + assert form.is_multipart + buf.clear() + await payload1.write(writer) + result1 = bytes(buf) + + # Verify first result contains expected content + assert b"name" in result1 + assert b"value" in result1 + assert b"test.txt" in result1 + assert b"content" in result1 + assert b"text/plain" in result1 + + # Second call - should generate identical multipart payload + payload2 = form() + buf.clear() + await payload2.write(writer) + result2 = bytes(buf) + + # Results should be identical (same boundary and content) + assert result1 == result2 + + # Third call to ensure continued reusability + payload3 = form() + buf.clear() + await payload3.write(writer) + result3 = bytes(buf) + + assert result1 == result3 + + +async def test_formdata_reusability_urlencoded( + writer: StreamWriter, buf: bytearray +) -> None: + form = FormData() + form.add_field("key1", "value1") + form.add_field("key2", "value2") + + # First call - should generate urlencoded payload + payload1 = form() + assert not form.is_multipart + buf.clear() + await payload1.write(writer) + result1 = bytes(buf) + + # Verify first result contains expected content + assert b"key1=value1" in result1 + assert b"key2=value2" in result1 + + # Second call - should generate identical urlencoded payload + payload2 = form() + buf.clear() + await payload2.write(writer) + result2 = bytes(buf) + + # Results should be identical + assert result1 == result2 + + # Third call to ensure continued reusability + payload3 = form() + buf.clear() + await payload3.write(writer) + result3 = bytes(buf) + + assert result1 == result3 + + +async def test_formdata_reusability_after_adding_fields( + writer: StreamWriter, buf: bytearray +) -> None: + form = FormData() + form.add_field("field1", "value1") + + # First call + payload1 = form() + buf.clear() + await payload1.write(writer) + result1 = bytes(buf) + + # Add more fields after first call + form.add_field("field2", "value2") + + # Second call should include new field + payload2 = form() + buf.clear() + await payload2.write(writer) + result2 = bytes(buf) + + # Results should be different + assert result1 != result2 + assert b"field1=value1" in result2 + assert b"field2=value2" in result2 + assert b"field2=value2" not in result1 + + # Third call should be same as second + payload3 = form() + buf.clear() + await payload3.write(writer) + result3 = bytes(buf) + + assert result2 == result3 + + +async def test_formdata_reusability_with_io_fields( + writer: StreamWriter, buf: bytearray +) -> None: + form = FormData() + + # Create BytesIO and StringIO objects + bytes_io = io.BytesIO(b"bytes content") + string_io = io.StringIO("string content") + + form.add_field( + "bytes_field", + bytes_io, + filename="bytes.bin", + content_type="application/octet-stream", + ) + form.add_field( + "string_field", string_io, filename="text.txt", content_type="text/plain" + ) + + # First call + payload1 = form() + buf.clear() + await payload1.write(writer) + result1 = bytes(buf) + + assert b"bytes content" in result1 + assert b"string content" in result1 + + # Reset IO objects for reuse + bytes_io.seek(0) + string_io.seek(0) + + # Second call - should work with reset IO objects + payload2 = form() + buf.clear() + await payload2.write(writer) + result2 = bytes(buf) + + # Should produce identical results + assert result1 == result2 diff --git a/tests/test_multipart.py b/tests/test_multipart.py index c76d523ca86..75b73a78070 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -6,6 +6,7 @@ from unittest import mock import pytest +from multidict import CIMultiDict, CIMultiDictProxy import aiohttp from aiohttp import payload @@ -17,7 +18,11 @@ CONTENT_TYPE, ) from aiohttp.helpers import parse_mimetype -from aiohttp.multipart import MultipartResponseWrapper +from aiohttp.multipart import ( + BodyPartReader, + BodyPartReaderPayload, + MultipartResponseWrapper, +) from aiohttp.streams import StreamReader BOUNDARY = b"--:" @@ -43,7 +48,23 @@ async def write(chunk): @pytest.fixture -def writer(): +def buf2() -> bytearray: + return bytearray() + + +@pytest.fixture +def stream2(buf2: bytearray) -> mock.Mock: + writer = mock.Mock() + + async def write(chunk: bytes) -> None: + buf2.extend(chunk) + + writer.write.side_effect = write + return writer + + +@pytest.fixture +def writer() -> aiohttp.MultipartWriter: return aiohttp.MultipartWriter(boundary=":") @@ -1550,3 +1571,179 @@ async def test_async_for_bodypart() -> None: part = aiohttp.BodyPartReader(boundary=b"--:", headers={}, content=stream) async for data in part: assert data == b"foobarbaz" + + +async def test_multipart_writer_reusability( + buf: bytearray, + stream: mock.Mock, + buf2: bytearray, + stream2: mock.Mock, + writer: aiohttp.MultipartWriter, +) -> None: + """Test that MultipartWriter can be written multiple times.""" + # Add some parts + writer.append("text content") + writer.append(b"binary content", {"Content-Type": "application/octet-stream"}) + writer.append_json({"key": "value"}) + + # Test as_bytes multiple times + bytes1 = await writer.as_bytes() + bytes2 = await writer.as_bytes() + bytes3 = await writer.as_bytes() + + # All as_bytes calls should return identical data + assert bytes1 == bytes2 == bytes3 + + # Verify content is there + assert b"text content" in bytes1 + assert b"binary content" in bytes1 + assert b'"key": "value"' in bytes1 + + # First write + buf.clear() + await writer.write(stream) + result1 = bytes(buf) + + # Second write - should produce identical output + buf2.clear() + await writer.write(stream2) + result2 = bytes(buf2) + + # Results should be identical + assert result1 == result2 + + # Third write to ensure continued reusability + buf.clear() + await writer.write(stream) + result3 = bytes(buf) + + assert result1 == result3 + + # as_bytes should still work after writes + bytes4 = await writer.as_bytes() + assert bytes1 == bytes4 + + +async def test_multipart_writer_reusability_with_io_payloads( + buf: bytearray, + stream: mock.Mock, + buf2: bytearray, + stream2: mock.Mock, + writer: aiohttp.MultipartWriter, +) -> None: + """Test that MultipartWriter with IO payloads can be reused.""" + # Create IO objects + bytes_io = io.BytesIO(b"bytes io content") + string_io = io.StringIO("string io content") + + # Add IO payloads + writer.append(bytes_io, {"Content-Type": "application/octet-stream"}) + writer.append(string_io, {"Content-Type": "text/plain"}) + + # Test as_bytes multiple times + bytes1 = await writer.as_bytes() + bytes2 = await writer.as_bytes() + + # All as_bytes calls should return identical data + assert bytes1 == bytes2 + assert b"bytes io content" in bytes1 + assert b"string io content" in bytes1 + + # First write + buf.clear() + await writer.write(stream) + result1 = bytes(buf) + + assert b"bytes io content" in result1 + assert b"string io content" in result1 + + # Reset IO objects for reuse + bytes_io.seek(0) + string_io.seek(0) + + # Second write + buf2.clear() + await writer.write(stream2) + result2 = bytes(buf2) + + # Should produce identical results + assert result1 == result2 + + # Test as_bytes after writes (IO objects should auto-reset) + bytes3 = await writer.as_bytes() + assert bytes1 == bytes3 + + +async def test_body_part_reader_payload_as_bytes() -> None: + """Test that BodyPartReaderPayload.as_bytes raises TypeError.""" + # Create a mock BodyPartReader + headers = CIMultiDictProxy(CIMultiDict({CONTENT_TYPE: "text/plain"})) + protocol = mock.Mock(_reading_paused=False) + stream = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) + body_part = BodyPartReader(BOUNDARY, headers, stream) + + # Create the payload + payload = BodyPartReaderPayload(body_part) + + # Test that as_bytes raises TypeError + with pytest.raises(TypeError, match="Unable to read body part as bytes"): + await payload.as_bytes() + + # Test that decode also raises TypeError + with pytest.raises(TypeError, match="Unable to decode"): + payload.decode() + + +async def test_multipart_writer_close_with_exceptions() -> None: + """Test that MultipartWriter.close() continues closing all parts even if one raises.""" + writer = aiohttp.MultipartWriter() + + # Create mock payloads + # First part will raise during close + part1 = mock.Mock() + part1.autoclose = False + part1.consumed = False + part1.close = mock.AsyncMock(side_effect=RuntimeError("Part 1 close failed")) + + # Second part should still get closed + part2 = mock.Mock() + part2.autoclose = False + part2.consumed = False + part2.close = mock.AsyncMock() + + # Third part with autoclose=True should not be closed + part3 = mock.Mock() + part3.autoclose = True + part3.consumed = False + part3.close = mock.AsyncMock() + + # Fourth part already consumed should not be closed + part4 = mock.Mock() + part4.autoclose = False + part4.consumed = True + part4.close = mock.AsyncMock() + + # Add parts to writer's internal list + writer._parts = [ + (part1, "", ""), + (part2, "", ""), + (part3, "", ""), + (part4, "", ""), + ] + + # Close the writer - should not raise despite part1 failing + await writer.close() + + # Verify close was called on appropriate parts + part1.close.assert_called_once() + part2.close.assert_called_once() # Should still be called despite part1 failing + part3.close.assert_not_called() # autoclose=True + part4.close.assert_not_called() # consumed=True + + # Verify writer is marked as consumed + assert writer._consumed is True + + # Calling close again should do nothing + await writer.close() + assert part1.close.call_count == 1 + assert part2.close.call_count == 1 diff --git a/tests/test_payload.py b/tests/test_payload.py index af0230776e5..b810a68f8b7 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -1,9 +1,12 @@ import array +import asyncio import io +import json import unittest.mock from collections.abc import AsyncIterator from io import StringIO -from typing import Optional, Union +from pathlib import Path +from typing import Optional, TextIO, Union import pytest from multidict import CIMultiDict @@ -12,6 +15,35 @@ from aiohttp.abc import AbstractStreamWriter +class BufferWriter(AbstractStreamWriter): + """Test writer that captures written bytes in a buffer.""" + + def __init__(self) -> None: + self.buffer = bytearray() + + async def write( + self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"] + ) -> None: + self.buffer.extend(bytes(chunk)) + + async def write_eof(self, chunk: bytes = b"") -> None: + """No-op for test writer.""" + + async def drain(self) -> None: + """No-op for test writer.""" + + def enable_compression( + self, encoding: str = "deflate", strategy: Optional[int] = None + ) -> None: + """Compression not implemented for test writer.""" + + def enable_chunking(self) -> None: + """Chunking not implemented for test writer.""" + + async def write_headers(self, status_line: str, headers: CIMultiDict[str]) -> None: + """Headers not captured for payload tests.""" + + @pytest.fixture(autouse=True) def cleanup( cleanup_payload_pending_file_closes: None, @@ -417,6 +449,43 @@ async def test_textio_payload_with_encoding() -> None: assert writer.get_written_bytes() == b"hello wo" +async def test_textio_payload_as_bytes() -> None: + """Test TextIOPayload.as_bytes method with different encodings.""" + # Test with UTF-8 encoding + data = io.StringIO("Hello 世界") + p = payload.TextIOPayload(data, encoding="utf-8") + + # Test as_bytes() method + result = await p.as_bytes() + assert result == "Hello 世界".encode() + + # Test that position is restored for multiple reads + result2 = await p.as_bytes() + assert result2 == "Hello 世界".encode() + + # Test with different encoding parameter (should use instance encoding) + result3 = await p.as_bytes(encoding="latin-1") + assert result3 == "Hello 世界".encode() # Should still use utf-8 + + # Test with different encoding in payload + data2 = io.StringIO("Hello World") + p2 = payload.TextIOPayload(data2, encoding="latin-1") + result4 = await p2.as_bytes() + assert result4 == b"Hello World" # latin-1 encoding + + # Test with no explicit encoding (defaults to utf-8) + data3 = io.StringIO("Test データ") + p3 = payload.TextIOPayload(data3) + result5 = await p3.as_bytes() + assert result5 == "Test データ".encode() + + # Test with encoding errors parameter + data4 = io.StringIO("Test") + p4 = payload.TextIOPayload(data4, encoding="ascii") + result6 = await p4.as_bytes(errors="strict") + assert result6 == b"Test" + + async def test_bytesio_payload_backwards_compatibility() -> None: """Test BytesIOPayload.write() backwards compatibility delegates to write_with_length().""" data = io.BytesIO(b"test data") @@ -455,3 +524,607 @@ async def gen() -> AsyncIterator[bytes]: # Should return early without writing anything await p.write_with_length(writer, 10) assert writer.get_written_bytes() == b"" + + +async def test_async_iterable_payload_caching() -> None: + """Test AsyncIterablePayload caching behavior.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"Hello" + yield b" " + yield b"World" + + p = payload.AsyncIterablePayload(gen()) + + # First call to as_bytes should consume iterator and cache + result1 = await p.as_bytes() + assert result1 == b"Hello World" + assert p._iter is None # Iterator exhausted + assert p._cached_chunks == [b"Hello", b" ", b"World"] # Chunks cached + assert p._consumed is False # Not marked as consumed to allow reuse + + # Second call should use cache + result2 = await p.as_bytes() + assert result2 == b"Hello World" + assert p._cached_chunks == [b"Hello", b" ", b"World"] # Still cached + + # decode should work with cached chunks + decoded = p.decode() + assert decoded == "Hello World" + + # write_with_length should use cached chunks + writer = MockStreamWriter() + await p.write_with_length(writer, None) + assert writer.get_written_bytes() == b"Hello World" + + # write_with_length with limit should respect it + writer2 = MockStreamWriter() + await p.write_with_length(writer2, 5) + assert writer2.get_written_bytes() == b"Hello" + + +async def test_async_iterable_payload_decode_without_cache() -> None: + """Test AsyncIterablePayload decode raises error without cache.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"test" + + p = payload.AsyncIterablePayload(gen()) + + # decode should raise without cache + with pytest.raises(TypeError) as excinfo: + p.decode() + assert "Unable to decode - content not cached" in str(excinfo.value) + + # After as_bytes, decode should work + await p.as_bytes() + assert p.decode() == "test" + + +async def test_async_iterable_payload_write_then_cache() -> None: + """Test AsyncIterablePayload behavior when written before caching.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"Hello" + yield b"World" + + p = payload.AsyncIterablePayload(gen()) + + # First write without caching (streaming) + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == b"HelloWorld" + assert p._iter is None # Iterator exhausted + assert p._cached_chunks is None # No cache created + assert p._consumed is True # Marked as consumed + + # Subsequent operations should handle exhausted iterator + result = await p.as_bytes() + assert result == b"" # Empty since iterator exhausted without cache + + # Write should also be empty + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == b"" + + +async def test_bytes_payload_reusability() -> None: + """Test that BytesPayload can be written and read multiple times.""" + data = b"test payload data" + p = payload.BytesPayload(data) + + # First write_with_length + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == data + + # Second write_with_length (simulating redirect) + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == data + + # Write with partial length + writer3 = MockStreamWriter() + await p.write_with_length(writer3, 5) + assert writer3.get_written_bytes() == b"test " + + # Test as_bytes multiple times + bytes1 = await p.as_bytes() + bytes2 = await p.as_bytes() + bytes3 = await p.as_bytes() + assert bytes1 == bytes2 == bytes3 == data + + +async def test_string_payload_reusability() -> None: + """Test that StringPayload can be written and read multiple times.""" + text = "test string data" + expected_bytes = text.encode("utf-8") + p = payload.StringPayload(text) + + # First write_with_length + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == expected_bytes + + # Second write_with_length (simulating redirect) + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == expected_bytes + + # Write with partial length + writer3 = MockStreamWriter() + await p.write_with_length(writer3, 5) + assert writer3.get_written_bytes() == b"test " + + # Test as_bytes multiple times + bytes1 = await p.as_bytes() + bytes2 = await p.as_bytes() + bytes3 = await p.as_bytes() + assert bytes1 == bytes2 == bytes3 == expected_bytes + + +async def test_bytes_io_payload_reusability() -> None: + """Test that BytesIOPayload can be written and read multiple times.""" + data = b"test bytesio payload" + bytes_io = io.BytesIO(data) + p = payload.BytesIOPayload(bytes_io) + + # First write_with_length + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == data + + # Second write_with_length (simulating redirect) + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == data + + # Write with partial length + writer3 = MockStreamWriter() + await p.write_with_length(writer3, 5) + assert writer3.get_written_bytes() == b"test " + + # Test as_bytes multiple times + bytes1 = await p.as_bytes() + bytes2 = await p.as_bytes() + bytes3 = await p.as_bytes() + assert bytes1 == bytes2 == bytes3 == data + + +async def test_string_io_payload_reusability() -> None: + """Test that StringIOPayload can be written and read multiple times.""" + text = "test stringio payload" + expected_bytes = text.encode("utf-8") + string_io = io.StringIO(text) + p = payload.StringIOPayload(string_io) + + # Note: StringIOPayload reads all content in __init__ and becomes a StringPayload + # So it should be fully reusable + + # First write_with_length + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == expected_bytes + + # Second write_with_length (simulating redirect) + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == expected_bytes + + # Write with partial length + writer3 = MockStreamWriter() + await p.write_with_length(writer3, 5) + assert writer3.get_written_bytes() == b"test " + + # Test as_bytes multiple times + bytes1 = await p.as_bytes() + bytes2 = await p.as_bytes() + bytes3 = await p.as_bytes() + assert bytes1 == bytes2 == bytes3 == expected_bytes + + +async def test_buffered_reader_payload_reusability() -> None: + """Test that BufferedReaderPayload can be written and read multiple times.""" + data = b"test buffered reader payload" + buffer = io.BufferedReader(io.BytesIO(data)) # type: ignore[arg-type] + p = payload.BufferedReaderPayload(buffer) + + # First write_with_length + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == data + + # Second write_with_length (simulating redirect) + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == data + + # Write with partial length + writer3 = MockStreamWriter() + await p.write_with_length(writer3, 5) + assert writer3.get_written_bytes() == b"test " + + # Test as_bytes multiple times + bytes1 = await p.as_bytes() + bytes2 = await p.as_bytes() + bytes3 = await p.as_bytes() + assert bytes1 == bytes2 == bytes3 == data + + +async def test_async_iterable_payload_reusability_with_cache() -> None: + """Test that AsyncIterablePayload can be reused when cached via as_bytes.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"async " + yield b"iterable " + yield b"payload" + + expected_data = b"async iterable payload" + p = payload.AsyncIterablePayload(gen()) + + # First call to as_bytes should cache the data + bytes1 = await p.as_bytes() + assert bytes1 == expected_data + assert p._cached_chunks is not None + assert p._iter is None # Iterator exhausted + + # Subsequent as_bytes calls should use cache + bytes2 = await p.as_bytes() + bytes3 = await p.as_bytes() + assert bytes1 == bytes2 == bytes3 == expected_data + + # Now writes should also use the cached data + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == expected_data + + # Second write should also work + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == expected_data + + # Write with partial length + writer3 = MockStreamWriter() + await p.write_with_length(writer3, 5) + assert writer3.get_written_bytes() == b"async" + + +async def test_async_iterable_payload_no_reuse_without_cache() -> None: + """Test that AsyncIterablePayload cannot be reused without caching.""" + + async def gen() -> AsyncIterator[bytes]: + yield b"test " + yield b"data" + + p = payload.AsyncIterablePayload(gen()) + + # First write exhausts the iterator + writer1 = MockStreamWriter() + await p.write_with_length(writer1, None) + assert writer1.get_written_bytes() == b"test data" + assert p._iter is None # Iterator exhausted + assert p._consumed is True + + # Second write should produce empty result + writer2 = MockStreamWriter() + await p.write_with_length(writer2, None) + assert writer2.get_written_bytes() == b"" + + +async def test_bytes_io_payload_close_does_not_close_io() -> None: + """Test that BytesIOPayload close() does not close the underlying BytesIO.""" + bytes_io = io.BytesIO(b"data") + bytes_io_payload = payload.BytesIOPayload(bytes_io) + + # Close the payload + await bytes_io_payload.close() + + # BytesIO should NOT be closed + assert not bytes_io.closed + + # Can still write after close + writer = MockStreamWriter() + await bytes_io_payload.write_with_length(writer, None) + assert writer.get_written_bytes() == b"data" + + +async def test_custom_payload_backwards_compat_as_bytes() -> None: + """Test backwards compatibility for custom Payload that only implements decode().""" + + class LegacyPayload(payload.Payload): + """A custom payload that only implements decode() like old code might do.""" + + def __init__(self, data: str) -> None: + super().__init__(data, headers=CIMultiDict()) + self._data = data + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Custom decode implementation.""" + return self._data + + async def write(self, writer: AbstractStreamWriter) -> None: + """Write implementation which is a no-op for this test.""" + + # Create instance with test data + p = LegacyPayload("Hello, World!") + + # Test that as_bytes() works even though it's not explicitly implemented + # The base class should call decode() and encode the result + result = await p.as_bytes() + assert result == b"Hello, World!" + + # Test with different text + p2 = LegacyPayload("Test with special chars: café") + result_utf8 = await p2.as_bytes(encoding="utf-8") + assert result_utf8 == "Test with special chars: café".encode() + + # Test that decode() still works as expected + assert p.decode() == "Hello, World!" + assert p2.decode() == "Test with special chars: café" + + +async def test_custom_payload_with_encoding_backwards_compat() -> None: + """Test custom Payload with encoding set uses instance encoding for as_bytes().""" + + class EncodedPayload(payload.Payload): + """A custom payload with specific encoding.""" + + def __init__(self, data: str, encoding: str) -> None: + super().__init__(data, headers=CIMultiDict(), encoding=encoding) + self._data = data + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Custom decode implementation.""" + return self._data + + async def write(self, writer: AbstractStreamWriter) -> None: + """Write implementation is a no-op.""" + + # Create instance with specific encoding + p = EncodedPayload("Test data", encoding="latin-1") + + # as_bytes() should use the instance encoding (latin-1) not the default utf-8 + result = await p.as_bytes() + assert result == b"Test data" # ASCII chars are same in latin-1 + + # Test with non-ASCII that differs between encodings + p2 = EncodedPayload("café", encoding="latin-1") + result_latin1 = await p2.as_bytes() + assert result_latin1 == "café".encode("latin-1") + assert result_latin1 != "café".encode() # Should be different bytes + + +async def test_iobase_payload_close_idempotent() -> None: + """Test that IOBasePayload.close() is idempotent and covers the _consumed check.""" + file_like = io.BytesIO(b"test data") + p = payload.IOBasePayload(file_like) + + # First close should set _consumed to True + await p.close() + assert p._consumed is True + + # Second close should be a no-op due to _consumed check (line 621) + await p.close() + assert p._consumed is True + + +def test_iobase_payload_decode() -> None: + """Test IOBasePayload.decode() returns correct string.""" + # Test with UTF-8 encoded text + text = "Hello, 世界! 🌍" + file_like = io.BytesIO(text.encode("utf-8")) + p = payload.IOBasePayload(file_like) + + # decode() should return the original string + assert p.decode() == text + + # Test with different encoding + latin1_text = "café" + file_like2 = io.BytesIO(latin1_text.encode("latin-1")) + p2 = payload.IOBasePayload(file_like2) + assert p2.decode("latin-1") == latin1_text + + # Test that file position is restored + file_like3 = io.BytesIO(b"test data") + file_like3.read(4) # Move position forward + p3 = payload.IOBasePayload(file_like3) + # decode() should read from the stored start position (4) + assert p3.decode() == " data" + + +def test_bytes_payload_size() -> None: + """Test BytesPayload.size property returns correct byte length.""" + # Test with bytes + bp = payload.BytesPayload(b"Hello World") + assert bp.size == 11 + + # Test with empty bytes + bp_empty = payload.BytesPayload(b"") + assert bp_empty.size == 0 + + # Test with bytearray + ba = bytearray(b"Hello World") + bp_array = payload.BytesPayload(ba) + assert bp_array.size == 11 + + +def test_string_payload_size() -> None: + """Test StringPayload.size property with different encodings.""" + # Test ASCII string with default UTF-8 encoding + sp = payload.StringPayload("Hello World") + assert sp.size == 11 + + # Test Unicode string with default UTF-8 encoding + unicode_str = "Hello 世界" + sp_unicode = payload.StringPayload(unicode_str) + assert sp_unicode.size == len(unicode_str.encode("utf-8")) + + # Test with UTF-16 encoding + sp_utf16 = payload.StringPayload("Hello World", encoding="utf-16") + assert sp_utf16.size == len("Hello World".encode("utf-16")) + + # Test with latin-1 encoding + sp_latin1 = payload.StringPayload("café", encoding="latin-1") + assert sp_latin1.size == len("café".encode("latin-1")) + + +def test_string_io_payload_size() -> None: + """Test StringIOPayload.size property.""" + # Test normal string + sio = StringIO("Hello World") + siop = payload.StringIOPayload(sio) + assert siop.size == 11 + + # Test Unicode string + sio_unicode = StringIO("Hello 世界") + siop_unicode = payload.StringIOPayload(sio_unicode) + assert siop_unicode.size == len("Hello 世界".encode()) + + # Test with custom encoding + sio_custom = StringIO("Hello") + siop_custom = payload.StringIOPayload(sio_custom, encoding="utf-16") + assert siop_custom.size == len("Hello".encode("utf-16")) + + # Test with emoji to ensure correct byte count + sio_emoji = StringIO("Hello 👋🌍") + siop_emoji = payload.StringIOPayload(sio_emoji) + assert siop_emoji.size == len("Hello 👋🌍".encode()) + # Verify it's not the string length + assert siop_emoji.size != len("Hello 👋🌍") + + +def test_all_string_payloads_size_is_bytes() -> None: + """Test that all string-like payload classes report size in bytes, not string length.""" + # Test string with multibyte characters + test_str = "Hello 👋 世界 🌍" # Contains emoji and Chinese characters + + # StringPayload + sp = payload.StringPayload(test_str) + assert sp.size == len(test_str.encode("utf-8")) + assert sp.size != len(test_str) # Ensure it's not string length + + # StringIOPayload + sio = StringIO(test_str) + siop = payload.StringIOPayload(sio) + assert siop.size == len(test_str.encode("utf-8")) + assert siop.size != len(test_str) + + # Test with different encoding + sp_utf16 = payload.StringPayload(test_str, encoding="utf-16") + assert sp_utf16.size == len(test_str.encode("utf-16")) + assert sp_utf16.size != sp.size # Different encoding = different size + + # JsonPayload (which extends BytesPayload) + json_data = {"message": test_str} + jp = payload.JsonPayload(json_data) + # JSON escapes Unicode, so we need to check the actual encoded size + json_str = json.dumps(json_data) + assert jp.size == len(json_str.encode("utf-8")) + + # Test JsonPayload with ensure_ascii=False to get actual UTF-8 encoding + jp_utf8 = payload.JsonPayload( + json_data, dumps=lambda x: json.dumps(x, ensure_ascii=False) + ) + json_str_utf8 = json.dumps(json_data, ensure_ascii=False) + assert jp_utf8.size == len(json_str_utf8.encode("utf-8")) + assert jp_utf8.size != len( + json_str_utf8 + ) # Now it's different due to multibyte chars + + +def test_bytes_io_payload_size() -> None: + """Test BytesIOPayload.size property.""" + # Test normal bytes + bio = io.BytesIO(b"Hello World") + biop = payload.BytesIOPayload(bio) + assert biop.size == 11 + + # Test empty BytesIO + bio_empty = io.BytesIO(b"") + biop_empty = payload.BytesIOPayload(bio_empty) + assert biop_empty.size == 0 + + # Test with position not at start + bio_pos = io.BytesIO(b"Hello World") + bio_pos.seek(5) + biop_pos = payload.BytesIOPayload(bio_pos) + assert biop_pos.size == 6 # Size should be from position to end + + +def test_json_payload_size() -> None: + """Test JsonPayload.size property.""" + # Test simple dict + data = {"hello": "world"} + jp = payload.JsonPayload(data) + expected_json = json.dumps(data) # Use actual json.dumps output + assert jp.size == len(expected_json.encode("utf-8")) + + # Test with Unicode + data_unicode = {"message": "Hello 世界"} + jp_unicode = payload.JsonPayload(data_unicode) + expected_unicode = json.dumps(data_unicode) + assert jp_unicode.size == len(expected_unicode.encode("utf-8")) + + # Test with custom encoding + data_custom = {"test": "data"} + jp_custom = payload.JsonPayload(data_custom, encoding="utf-16") + expected_custom = json.dumps(data_custom) + assert jp_custom.size == len(expected_custom.encode("utf-16")) + + +async def test_text_io_payload_size_matches_file_encoding(tmp_path: Path) -> None: + """Test TextIOPayload.size when file encoding matches payload encoding.""" + # Create UTF-8 file + utf8_file = tmp_path / "test_utf8.txt" + content = "Hello 世界" + + # Write file in executor + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, utf8_file.write_text, content, "utf-8") + + # Open file in executor + def open_file() -> TextIO: + return open(utf8_file, encoding="utf-8") + + f = await loop.run_in_executor(None, open_file) + try: + tiop = payload.TextIOPayload(f) + # Size should match the actual UTF-8 encoded size + assert tiop.size == len(content.encode("utf-8")) + finally: + await loop.run_in_executor(None, f.close) + + +async def test_text_io_payload_size_utf16(tmp_path: Path) -> None: + """Test TextIOPayload.size reports correct size with utf-16.""" + # Create UTF-16 file + utf16_file = tmp_path / "test_utf16.txt" + content = "Hello World" + + loop = asyncio.get_running_loop() + # Write file in executor + await loop.run_in_executor(None, utf16_file.write_text, content, "utf-16") + + # Get file size in executor + utf16_file_size = await loop.run_in_executor( + None, lambda: utf16_file.stat().st_size + ) + + # Open file in executor + def open_file() -> TextIO: + return open(utf16_file, encoding="utf-16") + + f = await loop.run_in_executor(None, open_file) + try: + tiop = payload.TextIOPayload(f, encoding="utf-16") + # Payload reports file size on disk (UTF-16) + assert tiop.size == utf16_file_size + + # Write to a buffer to see what actually gets sent + writer = BufferWriter() + await tiop.write(writer) + + # Check that the actual written bytes match file size + assert len(writer.buffer) == utf16_file_size + finally: + await loop.run_in_executor(None, f.close) From 6d13ccca04fbc8962fb399a5549e86464593e9fc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 08:16:03 -0500 Subject: [PATCH 1452/1511] [PR #11011/8658faad backport][3.12] Correct type of ClientRequest.body (#11023) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: J. Nick Koston <nick@koston.org> --- aiohttp/client_middleware_digest_auth.py | 10 ++++++---- aiohttp/client_reqrep.py | 3 ++- docs/client_reference.rst | 7 ++----- tests/test_client_middleware_digest_auth.py | 6 +++--- tests/test_client_request.py | 16 ++++++++-------- 5 files changed, 21 insertions(+), 21 deletions(-) diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index 9a8ffc18313..b2daf76e6bb 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -193,7 +193,9 @@ def __init__( self._nonce_count = 0 self._challenge: DigestAuthChallenge = {} - async def _encode(self, method: str, url: URL, body: Union[bytes, Payload]) -> str: + async def _encode( + self, method: str, url: URL, body: Union[Payload, Literal[b""]] + ) -> str: """ Build digest authorization header for the current challenge. @@ -274,10 +276,10 @@ def KD(s: bytes, d: bytes) -> bytes: A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes)) A2 = f"{method.upper()}:{path}".encode() if qop == "auth-int": - if isinstance(body, bytes): # will always be empty bytes unless Payload - entity_bytes = body - else: + if isinstance(body, Payload): # will always be empty bytes unless Payload entity_bytes = await body.as_bytes() # Get bytes from Payload + else: + entity_bytes = body entity_hash = H(entity_bytes) A2 = b":".join((A2, entity_hash)) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 2322a1d7472..614751a17bd 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -17,6 +17,7 @@ Dict, Iterable, List, + Literal, Mapping, NamedTuple, Optional, @@ -459,7 +460,7 @@ def port(self) -> Optional[int]: return self.url.port @property - def body(self) -> Union[bytes, payload.Payload]: + def body(self) -> Union[payload.Payload, Literal[b""]]: """Request body.""" # empty body is represented as bytes for backwards compatibility return self._body or b"" diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d3c2226aee0..40fd7cdb276 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1882,12 +1882,9 @@ ClientRequest For more information about using middleware, see :ref:`aiohttp-client-middleware`. .. attribute:: body - :type: Payload | FormData + :type: Payload | Literal[b""] - The request body payload. This can be: - - - A :class:`Payload` object for raw data (default is empty bytes ``b""``) - - A :class:`FormData` object for form submissions + The request body payload (defaults to ``b""`` if no body passed). .. danger:: diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index 6da6850bafc..b649e0b601f 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -2,7 +2,7 @@ import io from hashlib import md5, sha1 -from typing import Generator, Union +from typing import Generator, Literal, Union from unittest import mock import pytest @@ -270,7 +270,7 @@ def KD(secret: str, data: str) -> str: @pytest.mark.parametrize( ("body", "body_str"), [ - (b"this is a body", "this is a body"), # Bytes case + (b"", ""), # Bytes case ( BytesIOPayload(io.BytesIO(b"this is a body")), "this is a body", @@ -280,7 +280,7 @@ def KD(secret: str, data: str) -> str: async def test_digest_response_exact_match( qop: str, algorithm: str, - body: Union[bytes, BytesIOPayload], + body: Union[Literal[b""], BytesIOPayload], body_str: str, mock_sha1_digest: mock.MagicMock, ) -> None: diff --git a/tests/test_client_request.py b/tests/test_client_request.py index b1807b96d82..f880bb0859f 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1261,7 +1261,7 @@ def read(self, decode=False): async def test_oserror_on_write_bytes(loop, conn) -> None: req = ClientRequest("POST", URL("http://python.org/"), loop=loop) - req.body = b"test data" + req.body = b"test data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = WriterMock() writer.write.side_effect = OSError @@ -1618,7 +1618,7 @@ async def test_write_bytes_with_content_length_limit( data = b"Hello World" req = ClientRequest("post", URL("http://python.org/"), loop=loop) - req.body = data + req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = StreamWriter(protocol=conn.protocol, loop=loop) # Use content_length=5 to truncate data @@ -1655,7 +1655,7 @@ async def gen() -> AsyncIterator[bytes]: req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 else: - req.body = data + req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = StreamWriter(protocol=conn.protocol, loop=loop) # Use content_length=7 to truncate at the middle of Part2 @@ -1705,7 +1705,7 @@ async def test_warn_if_unclosed_payload_via_body_setter( ResourceWarning, match="The previous request body contains unclosed resources", ): - req.body = b"new data" + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 await req.close() @@ -1723,7 +1723,7 @@ async def test_no_warn_for_autoclose_payload_via_body_setter( # Setting body again should not trigger warning since previous payload has autoclose=True with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter("always") - req.body = b"new data" + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # Filter out any non-ResourceWarning warnings resource_warnings = [ @@ -1753,7 +1753,7 @@ async def test_no_warn_for_consumed_payload_via_body_setter( # Setting body again should not trigger warning since previous payload is consumed with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter("always") - req.body = b"new data" + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # Filter out any non-ResourceWarning warnings resource_warnings = [ @@ -1872,7 +1872,7 @@ async def test_body_setter_closes_previous_payload( req._body = mock_payload # Update body with new data using setter - req.body = b"new body data" + req.body = b"new body data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # Verify the previous payload was closed using _close mock_payload._close.assert_called_once() @@ -2001,7 +2001,7 @@ async def test_warn_stacklevel_points_to_user_code( with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter("always", ResourceWarning) # This line should be reported as the warning source - req.body = b"new data" # LINE TO BE REPORTED + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # LINE TO BE REPORTED # Find the ResourceWarning resource_warnings = [ From f45de4222f5724e88d06190d69dd46ae74c5b50a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 13:49:19 +0000 Subject: [PATCH 1453/1511] [PR #11013/3f7a2e98 backport][3.12] Readjust baseline for ClientRequest benchmarks (#11015) Co-authored-by: Sam Bull <git@sambull.org> Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/test_benchmarks_client_request.py | 73 +++++++++++++++++++++---- 1 file changed, 62 insertions(+), 11 deletions(-) diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 65667995185..34ae3629f9e 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -1,27 +1,38 @@ """codspeed benchmarks for client requests.""" import asyncio -from http.cookies import Morsel +from http.cookies import BaseCookie from typing import Union +from multidict import CIMultiDict from pytest_codspeed import BenchmarkFixture from yarl import URL -from aiohttp.client_reqrep import ClientRequest +from aiohttp.client_reqrep import ClientRequest, ClientResponse +from aiohttp.cookiejar import CookieJar +from aiohttp.helpers import TimerNoop from aiohttp.http_writer import HttpVersion11 +from aiohttp.tracing import Trace def test_client_request_update_cookies( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: - req = ClientRequest("get", URL("http://python.org"), loop=loop) - morsel: "Morsel[str]" = Morsel() - morsel.set(key="string", val="Another string", coded_val="really") - morsel_cookie = {"str": morsel} + url = URL("http://python.org") + + async def setup(): + cookie_jar = CookieJar() + cookie_jar.update_cookies({"string": "Another string"}) + cookies = cookie_jar.filter_cookies(url) + assert cookies["string"].value == "Another string" + req = ClientRequest("get", url, loop=loop) + return req, cookies + + req, cookies = loop.run_until_complete(setup()) @benchmark def _run() -> None: - req.update_cookies(cookies=morsel_cookie) + req.update_cookies(cookies=cookies) def test_create_client_request_with_cookies( @@ -29,15 +40,39 @@ def test_create_client_request_with_cookies( ) -> None: url = URL("http://python.org") + async def setup(): + cookie_jar = CookieJar() + cookie_jar.update_cookies({"cookie": "value"}) + cookies = cookie_jar.filter_cookies(url) + assert cookies["cookie"].value == "value" + return cookies + + cookies = loop.run_until_complete(setup()) + timer = TimerNoop() + traces: list[Trace] = [] + headers = CIMultiDict[str]() + @benchmark def _run() -> None: ClientRequest( method="get", url=url, loop=loop, - headers=None, + params=None, + skip_auto_headers=None, + response_class=ClientResponse, + proxy=None, + proxy_auth=None, + proxy_headers=None, + timer=timer, + session=None, + ssl=True, + traces=traces, + trust_env=False, + server_hostname=None, + headers=headers, data=None, - cookies={"cookie": "value"}, + cookies=cookies, auth=None, version=HttpVersion11, compress=False, @@ -50,6 +85,10 @@ def test_create_client_request_with_headers( loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture ) -> None: url = URL("http://python.org") + timer = TimerNoop() + traces: list[Trace] = [] + headers = CIMultiDict({"header": "value", "another": "header"}) + cookies = BaseCookie[str]() @benchmark def _run() -> None: @@ -57,9 +96,21 @@ def _run() -> None: method="get", url=url, loop=loop, - headers={"header": "value", "another": "header"}, + params=None, + skip_auto_headers=None, + response_class=ClientResponse, + proxy=None, + proxy_auth=None, + proxy_headers=None, + timer=timer, + session=None, + ssl=True, + traces=traces, + trust_env=False, + server_hostname=None, + headers=headers, data=None, - cookies=None, + cookies=cookies, auth=None, version=HttpVersion11, compress=False, From c6f67b03363534541d5834bb161ee25161334cd9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 09:00:15 -0500 Subject: [PATCH 1454/1511] [PR #11020/852297c backport][3.12] Cleanup some type ignores in the client request tests (#11026) --- tests/test_client_request.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f880bb0859f..e8381a3ef77 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -751,7 +751,8 @@ async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> No ) resp = await req.send(conn) assert "/" == req.url.path - assert b"life=42" == req.body._value # type: ignore[union-attr] + assert isinstance(req.body, payload.Payload) + assert b"life=42" == req.body._value assert "application/x-www-form-urlencoded" == req.headers["CONTENT-TYPE"] await req.close() resp.close() @@ -788,7 +789,8 @@ async def test_get_with_data(loop) -> None: meth, URL("http://python.org/"), data={"life": "42"}, loop=loop ) assert "/" == req.url.path - assert b"life=42" == req.body._value # type: ignore[union-attr] + assert isinstance(req.body, payload.Payload) + assert b"life=42" == req.body._value await req.close() From 824d387805118ce4a5749c3de52addc1b2a1e998 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 26 May 2025 15:22:07 +0100 Subject: [PATCH 1455/1511] Move ClientResponse to top of file (#11029) --- aiohttp/client_reqrep.py | 2066 +++++++++++++++++++------------------- 1 file changed, 1032 insertions(+), 1034 deletions(-) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 614751a17bd..41acec87712 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -94,6 +94,7 @@ from .tracing import Trace +_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") @@ -272,1138 +273,1222 @@ def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) -> ) -class ClientRequest: - GET_METHODS = { - hdrs.METH_GET, - hdrs.METH_HEAD, - hdrs.METH_OPTIONS, - hdrs.METH_TRACE, - } - POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} - ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - - DEFAULT_HEADERS = { - hdrs.ACCEPT: "*/*", - hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), - } - - # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. - _body: Union[None, payload.Payload] = None - auth = None - response = None +class ClientResponse(HeadersMixin): - __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data + # Some of these attributes are None when created, + # but will be set by the start() method. + # As the end user will likely never see the None values, we cheat the types below. + # from the Status-Line of the response + version: Optional[HttpVersion] = None # HTTP-Version + status: int = None # type: ignore[assignment] # Status-Code + reason: Optional[str] = None # Reason-Phrase - # These class defaults help create_autospec() work correctly. - # If autospec is improved in future, maybe these can be removed. - url = URL() - method = "GET" + content: StreamReader = None # type: ignore[assignment] # Payload stream + _body: Optional[bytes] = None + _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] + _history: Tuple["ClientResponse", ...] = () + _raw_headers: RawHeaders = None # type: ignore[assignment] - _continue = None # waiter future for '100 Continue' response + _connection: Optional["Connection"] = None # current connection + _cookies: Optional[SimpleCookie] = None + _continue: Optional["asyncio.Future[bool]"] = None + _source_traceback: Optional[traceback.StackSummary] = None + _session: Optional["ClientSession"] = None + # set up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + _in_context = False - _skip_auto_headers: Optional["CIMultiDict[None]"] = None + _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" - # N.B. - # Adding __del__ method with self._writer closing doesn't make sense - # because _writer is instance method, thus it keeps a reference to self. - # Until writer has finished finalizer will not be called. + __writer: Optional["asyncio.Task[None]"] = None def __init__( self, method: str, url: URL, *, - params: Query = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - data: Any = None, - cookies: Optional[LooseCookies] = None, - auth: Optional[BasicAuth] = None, - version: http.HttpVersion = http.HttpVersion11, - compress: Union[str, bool, None] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - response_class: Optional[Type["ClientResponse"]] = None, - proxy: Optional[URL] = None, - proxy_auth: Optional[BasicAuth] = None, - timer: Optional[BaseTimerContext] = None, - session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - proxy_headers: Optional[LooseHeaders] = None, - traces: Optional[List["Trace"]] = None, - trust_env: bool = False, - server_hostname: Optional[str] = None, - ): - if loop is None: - loop = asyncio.get_event_loop() - if match := _CONTAINS_CONTROL_CHAR_RE.search(method): - raise ValueError( - f"Method cannot contain non-token characters {method!r} " - f"(found at least {match.group()!r})" - ) + writer: "Optional[asyncio.Task[None]]", + continue100: Optional["asyncio.Future[bool]"], + timer: BaseTimerContext, + request_info: RequestInfo, + traces: List["Trace"], + loop: asyncio.AbstractEventLoop, + session: "ClientSession", + ) -> None: # URL forbids subclasses, so a simple type check is enough. - assert type(url) is URL, url - if proxy is not None: - assert type(proxy) is URL, proxy - # FIXME: session is None in tests only, need to fix tests - # assert session is not None - if TYPE_CHECKING: - assert session is not None - self._session = session - if params: - url = url.extend_query(params) - self.original_url = url - self.url = url.with_fragment(None) if url.raw_fragment else url - self.method = method.upper() - self.chunked = chunked - self.compress = compress - self.loop = loop - self.length = None - if response_class is None: - real_response_class = ClientResponse - else: - real_response_class = response_class - self.response_class: Type[ClientResponse] = real_response_class - self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl if ssl is not None else True - self.server_hostname = server_hostname + assert type(url) is URL + + self.method = method + self._real_url = url + self._url = url.with_fragment(None) if url.raw_fragment else url + if writer is not None: + self._writer = writer + if continue100 is not None: + self._continue = continue100 + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache: Dict[str, Any] = {} + self._traces = traces + self._loop = loop + # Save reference to _resolve_charset, so that get_encoding() will still + # work after the response has finished reading the body. + # TODO: Fix session=None in tests (see ClientRequest.__init__). + if session is not None: + # store a reference to session #1985 + self._session = session + self._resolve_charset = session._resolve_charset if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) - self.update_version(version) - self.update_host(url) - self.update_headers(headers) - self.update_auto_headers(skip_auto_headers) - self.update_cookies(cookies) - self.update_content_encoding(data) - self.update_auth(auth, trust_env) - self.update_proxy(proxy, proxy_auth, proxy_headers) - - self.update_body_from_data(data) - if data is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() - self.update_expect_continue(expect100) - self._traces = [] if traces is None else traces - def __reset_writer(self, _: object = None) -> None: self.__writer = None - def _get_content_length(self) -> Optional[int]: - """Extract and validate Content-Length header value. - - Returns parsed Content-Length value or None if not set. - Raises ValueError if header exists but cannot be parsed as an integer. - """ - if hdrs.CONTENT_LENGTH not in self.headers: - return None - - content_length_hdr = self.headers[hdrs.CONTENT_LENGTH] - try: - return int(content_length_hdr) - except ValueError: - raise ValueError( - f"Invalid Content-Length header: {content_length_hdr}" - ) from None - - @property - def skip_auto_headers(self) -> CIMultiDict[None]: - return self._skip_auto_headers or CIMultiDict() - @property def _writer(self) -> Optional["asyncio.Task[None]"]: + """The writer task for streaming data. + + _writer is only provided for backwards compatibility + for subclasses that may need to access it. + """ return self.__writer @_writer.setter - def _writer(self, writer: "asyncio.Task[None]") -> None: + def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + """Set the writer task for streaming data.""" if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - writer.add_done_callback(self.__reset_writer) - - def is_ssl(self) -> bool: - return self.url.scheme in _SSL_SCHEMES - - @property - def ssl(self) -> Union["SSLContext", bool, Fingerprint]: - return self._ssl - - @property - def connection_key(self) -> ConnectionKey: - if proxy_headers := self.proxy_headers: - h: Optional[int] = hash(tuple(proxy_headers.items())) + if writer is None: + return + if writer.done(): + # The writer is already done, so we can clear it immediately. + self.__writer = None else: - h = None - url = self.url - return tuple.__new__( - ConnectionKey, - ( - url.raw_host or "", - url.port, - url.scheme in _SSL_SCHEMES, - self._ssl, - self.proxy, - self.proxy_auth, - h, - ), - ) + writer.add_done_callback(self.__reset_writer) @property - def host(self) -> str: - ret = self.url.raw_host - assert ret is not None - return ret + def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() + return self._cookies - @property - def port(self) -> Optional[int]: - return self.url.port + @cookies.setter + def cookies(self, cookies: SimpleCookie) -> None: + self._cookies = cookies - @property - def body(self) -> Union[payload.Payload, Literal[b""]]: - """Request body.""" - # empty body is represented as bytes for backwards compatibility - return self._body or b"" + @reify + def url(self) -> URL: + return self._url - @body.setter - def body(self, value: Any) -> None: - """Set request body with warning for non-autoclose payloads. + @reify + def url_obj(self) -> URL: + warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) + return self._url - WARNING: This setter must be called from within an event loop and is not - thread-safe. Setting body outside of an event loop may raise RuntimeError - when closing file-based payloads. + @reify + def real_url(self) -> URL: + return self._real_url - DEPRECATED: Direct assignment to body is deprecated and will be removed - in a future version. Use await update_body() instead for proper resource - management. - """ - # Close existing payload if present - if self._body is not None: - # Warn if the payload needs manual closing - # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload - _warn_if_unclosed_payload(self._body, stacklevel=3) - # NOTE: In the future, when we remove sync close support, - # this setter will need to be removed and only the async - # update_body() method will be available. For now, we call - # _close() for backwards compatibility. - self._body._close() - self._update_body(value) + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host - @property - def request_info(self) -> RequestInfo: - headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) - # These are created on every request, so we use a NamedTuple - # for performance reasons. We don't use the RequestInfo.__new__ - # method because it has a different signature which is provided - # for backwards compatibility only. - return tuple.__new__( - RequestInfo, (self.url, self.method, headers, self.original_url) - ) + @reify + def headers(self) -> "CIMultiDictProxy[str]": + return self._headers - @property - def session(self) -> "ClientSession": - """Return the ClientSession instance. + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers - This property provides access to the ClientSession that initiated - this request, allowing middleware to make additional requests - using the same session. - """ - return self._session + @reify + def request_info(self) -> RequestInfo: + return self._request_info - def update_host(self, url: URL) -> None: - """Update destination host, port and connection type (ssl).""" - # get host/port - if not url.raw_host: - raise InvalidURL(url) + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) - # basic auth info - if url.raw_user or url.raw_password: - self.auth = helpers.BasicAuth(url.user or "", url.password or "") + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return - def update_version(self, version: Union[http.HttpVersion, str]) -> None: - """Convert request version to two elements tuple. + if self._connection is not None: + self._connection.release() + self._cleanup_writer() - parser HTTP version '1.1' => (1, 1) - """ - if isinstance(version, str): - v = [part.strip() for part in version.split(".", 1)] - try: - version = http.HttpVersion(int(v[0]), int(v[1])) - except ValueError: - raise ValueError( - f"Can not parse http version number: {version}" - ) from None - self.version = version + if self._loop.get_debug(): + kwargs = {"source": self} + _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) + context = {"client_response": self, "message": "Unclosed response"} + if self._source_traceback: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) - def update_headers(self, headers: Optional[LooseHeaders]) -> None: - """Update request headers.""" - self.headers: CIMultiDict[str] = CIMultiDict() + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode( + "ascii", "backslashreplace" + ).decode("ascii") + else: + ascii_encodable_reason = "None" + print( + "<ClientResponse({}) [{} {}]>".format( + ascii_encodable_url, self.status, ascii_encodable_reason + ), + file=out, + ) + print(self.headers, file=out) + return out.getvalue() - # Build the host header - host = self.url.host_port_subcomponent + @property + def connection(self) -> Optional["Connection"]: + return self._connection - # host_port_subcomponent is None when the URL is a relative URL. - # but we know we do not have a relative URL here. - assert host is not None - self.headers[hdrs.HOST] = host + @reify + def history(self) -> Tuple["ClientResponse", ...]: + """A sequence of of responses, if redirects occurred.""" + return self._history - if not headers: - return + @reify + def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + links_str = ", ".join(self.headers.getall("link", [])) - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() + if not links_str: + return MultiDictProxy(MultiDict()) - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key in hdrs.HOST_ALL: - self.headers[key] = value - else: - self.headers.add(key, value) + links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() - def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: - if skip_auto_headers is not None: - self._skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] - else: - # Fast path when there are no headers to skip - # which is the most common case. - used_headers = self.headers + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] - for hdr, val in self.DEFAULT_HEADERS.items(): - if hdr not in used_headers: - self.headers[hdr] = val + link: MultiDict[Union[str, URL]] = MultiDict() - if hdrs.USER_AGENT not in used_headers: - self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + for param in params: + match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + key, _, value, _ = match.groups() - def update_cookies(self, cookies: Optional[LooseCookies]) -> None: - """Update request cookies header.""" - if not cookies: - return + link.add(key, value) - c = SimpleCookie() - if hdrs.COOKIE in self.headers: - c.load(self.headers.get(hdrs.COOKIE, "")) - del self.headers[hdrs.COOKIE] + key = link.get("rel", url) - if isinstance(cookies, Mapping): - iter_cookies = cookies.items() - else: - iter_cookies = cookies # type: ignore[assignment] - for name, value in iter_cookies: - if isinstance(value, Morsel): - # Preserve coded_value - mrsl_val = value.get(value.key, Morsel()) - mrsl_val.set(value.key, value.value, value.coded_value) - c[name] = mrsl_val - else: - c[name] = value # type: ignore[assignment] + link.add("url", self.url.join(URL(url))) - self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() + links.add(str(key), MultiDictProxy(link)) - def update_content_encoding(self, data: Any) -> None: - """Set request content encoding.""" - if not data: - # Don't compress an empty body. - self.compress = None - return + return MultiDictProxy(links) - if self.headers.get(hdrs.CONTENT_ENCODING): - if self.compress: - raise ValueError( - "compress can not be set if Content-Encoding header is set" - ) - elif self.compress: - if not isinstance(self.compress, str): - self.compress = "deflate" - self.headers[hdrs.CONTENT_ENCODING] = self.compress - self.chunked = True # enable chunked, no need to deal with length + async def start(self, connection: "Connection") -> "ClientResponse": + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection - def update_transfer_encoding(self) -> None: - """Analyze transfer-encoding header.""" - te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() + with self._timer: + while True: + # read response + try: + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, + self.history, + status=exc.code, + message=exc.message, + headers=exc.headers, + ) from exc - if "chunked" in te: - if self.chunked: - raise ValueError( - "chunked can not be set " - 'if "Transfer-Encoding: chunked" header is set' - ) + if message.code < 100 or message.code > 199 or message.code == 101: + break - elif self.chunked: - if hdrs.CONTENT_LENGTH in self.headers: - raise ValueError( - "chunked can not be set if Content-Length header is set" - ) + if self._continue is not None: + set_result(self._continue, True) + self._continue = None - self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - elif ( - self._body is not None - and hdrs.CONTENT_LENGTH not in self.headers - and (size := self._body.size) is not None - ): - self.headers[hdrs.CONTENT_LENGTH] = str(size) + # payload eof handler + payload.on_eof(self._response_eof) - def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: - """Set basic auth.""" - if auth is None: - auth = self.auth - if auth is None and trust_env and self.url.host is not None: - netrc_obj = netrc_from_env() - with contextlib.suppress(LookupError): - auth = basicauth_from_netrc(netrc_obj, self.url.host) - if auth is None: - return + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason - if not isinstance(auth, helpers.BasicAuth): - raise TypeError("BasicAuth() tuple is required instead") + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] - self.headers[hdrs.AUTHORIZATION] = auth.encode() + # payload + self.content = payload - def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: - """Update request body from data.""" - if self._body is not None: - _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel) + # cookies + if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): + cookies = SimpleCookie() + for hdr in cookie_hdrs: + try: + cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + self._cookies = cookies + return self - if body is None: - self._body = None + def _response_eof(self) -> None: + if self._closed: return - # FormData - maybe_payload = body() if isinstance(body, FormData) else body + # protocol could be None because connection could be detached + protocol = self._connection and self._connection.protocol + if protocol is not None and protocol.upgraded: + return - try: - body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None) - except payload.LookupError: - body_payload = FormData(maybe_payload)() # type: ignore[arg-type] + self._closed = True + self._cleanup_writer() + self._release_connection() - self._body = body_payload - # enable chunked encoding if needed - if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: - if (size := body_payload.size) is not None: - self.headers[hdrs.CONTENT_LENGTH] = str(size) - else: - self.chunked = True + @property + def closed(self) -> bool: + return self._closed - # copy payload headers - assert body_payload.headers - headers = self.headers - skip_headers = self._skip_auto_headers - for key, value in body_payload.headers.items(): - if key in headers or (skip_headers is not None and key in skip_headers): - continue - headers[key] = value + def close(self) -> None: + if not self._released: + self._notify_content() - def _update_body(self, body: Any) -> None: - """Update request body after its already been set.""" - # Remove existing Content-Length header since body is changing - if hdrs.CONTENT_LENGTH in self.headers: - del self.headers[hdrs.CONTENT_LENGTH] + self._closed = True + if self._loop is None or self._loop.is_closed(): + return - # Remove existing Transfer-Encoding header to avoid conflicts - if self.chunked and hdrs.TRANSFER_ENCODING in self.headers: - del self.headers[hdrs.TRANSFER_ENCODING] + self._cleanup_writer() + if self._connection is not None: + self._connection.close() + self._connection = None - # Now update the body using the existing method - # Called from _update_body, add 1 to stacklevel from caller - self.update_body_from_data(body, _stacklevel=4) + def release(self) -> Any: + if not self._released: + self._notify_content() - # Update transfer encoding headers if needed (same logic as __init__) - if body is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() + self._closed = True - async def update_body(self, body: Any) -> None: + self._cleanup_writer() + self._release_connection() + return noop() + + @property + def ok(self) -> bool: + """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + + This is **not** a check for ``200 OK`` but a check that the response + status is under 400. """ - Update request body and close previous payload if needed. + return 400 > self.status - This method safely updates the request body by first closing any existing - payload to prevent resource leaks, then setting the new body. + def raise_for_status(self) -> None: + if not self.ok: + # reason should always be not None for a started response + assert self.reason is not None - IMPORTANT: Always use this method instead of setting request.body directly. - Direct assignment to request.body will leak resources if the previous body - contains file handles, streams, or other resources that need cleanup. + # If we're in a context we can rely on __aexit__() to release as the + # exception propagates. + if not self._in_context: + self.release() - Args: - body: The new body content. Can be: - - bytes/bytearray: Raw binary data - - str: Text data (will be encoded using charset from Content-Type) - - FormData: Form data that will be encoded as multipart/form-data - - Payload: A pre-configured payload object - - AsyncIterable: An async iterable of bytes chunks - - File-like object: Will be read and sent as binary data - - None: Clears the body + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers, + ) - Usage: - # CORRECT: Use update_body - await request.update_body(b"new request data") + def _release_connection(self) -> None: + if self._connection is not None: + if self.__writer is None: + self._connection.release() + self._connection = None + else: + self.__writer.add_done_callback(lambda f: self._release_connection()) - # WRONG: Don't set body directly - # request.body = b"new request data" # This will leak resources! + async def _wait_released(self) -> None: + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + self._release_connection() - # Update with form data - form_data = FormData() - form_data.add_field('field', 'value') - await request.update_body(form_data) + def _cleanup_writer(self) -> None: + if self.__writer is not None: + self.__writer.cancel() + self._session = None - # Clear body - await request.update_body(None) + def _notify_content(self) -> None: + content = self.content + if content and content.exception() is None: + set_exception(content, _CONNECTION_CLOSED_EXCEPTION) + self._released = True - Note: - This method is async because it may need to close file handles or - other resources associated with the previous payload. Always await - this method to ensure proper cleanup. + async def wait_for_close(self) -> None: + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + self.release() - Warning: - Setting request.body directly is highly discouraged and can lead to: - - Resource leaks (unclosed file handles, streams) - - Memory leaks (unreleased buffers) - - Unexpected behavior with streaming payloads + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received( + self.method, self.url, self._body + ) + except BaseException: + self.close() + raise + elif self._released: # Response explicitly released + raise ClientConnectionError("Connection closed") - It is not recommended to change the payload type in middleware. If the - body was already set (e.g., as bytes), it's best to keep the same type - rather than converting it (e.g., to str) as this may result in unexpected - behavior. + protocol = self._connection and self._connection.protocol + if protocol is None or not protocol.upgraded: + await self._wait_released() # Underlying connection released + return self._body - See Also: - - update_body_from_data: Synchronous body update without cleanup - - body property: Direct body access (STRONGLY DISCOURAGED) + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + mimetype = helpers.parse_mimetype(ctype) - """ - # Close existing payload if it exists and needs closing - if self._body is not None: - await self._body.close() - self._update_body(body) + encoding = mimetype.parameters.get("charset") + if encoding: + with contextlib.suppress(LookupError, ValueError): + return codecs.lookup(encoding).name - def update_expect_continue(self, expect: bool = False) -> None: - if expect: - self.headers[hdrs.EXPECT] = "100-continue" - elif ( - hdrs.EXPECT in self.headers - and self.headers[hdrs.EXPECT].lower() == "100-continue" + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" ): - expect = True + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + return "utf-8" - if expect: - self._continue = self.loop.create_future() + if self._body is None: + raise RuntimeError( + "Cannot compute fallback encoding of a not yet read body" + ) - def update_proxy( - self, - proxy: Optional[URL], - proxy_auth: Optional[BasicAuth], - proxy_headers: Optional[LooseHeaders], - ) -> None: - self.proxy = proxy - if proxy is None: - self.proxy_auth = None - self.proxy_headers = None - return + return self._resolve_charset(self, self._body) - if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): - raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy_auth = proxy_auth + async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + """Read response payload and decode.""" + if self._body is None: + await self.read() - if proxy_headers is not None and not isinstance( - proxy_headers, (MultiDict, MultiDictProxy) - ): - proxy_headers = CIMultiDict(proxy_headers) - self.proxy_headers = proxy_headers + if encoding is None: + encoding = self.get_encoding() - async def write_bytes( + return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] + + async def json( self, - writer: AbstractStreamWriter, - conn: "Connection", - content_length: Optional[int], + *, + encoding: Optional[str] = None, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + content_type: Optional[str] = "application/json", + ) -> Any: + """Read and decodes JSON response.""" + if self._body is None: + await self.read() + + if content_type: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + if not _is_expected_content_type(ctype, content_type): + raise ContentTypeError( + self.request_info, + self.history, + status=self.status, + message=( + "Attempt to decode JSON with unexpected mimetype: %s" % ctype + ), + headers=self.headers, + ) + + stripped = self._body.strip() # type: ignore[union-attr] + if not stripped: + return None + + if encoding is None: + encoding = self.get_encoding() + + return loads(stripped.decode(encoding)) + + async def __aenter__(self) -> "ClientResponse": + self._in_context = True + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], ) -> None: - """ - Write the request body to the connection stream. + self._in_context = False + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can close connection + # if state is broken + self.release() + await self.wait_for_close() - This method handles writing different types of request bodies: - 1. Payload objects (using their specialized write_with_length method) - 2. Bytes/bytearray objects - 3. Iterable body content - Args: - writer: The stream writer to write the body to - conn: The connection being used for this request - content_length: Optional maximum number of bytes to write from the body - (None means write the entire body) +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - The method properly handles: - - Waiting for 100-Continue responses if required - - Content length constraints for chunked encoding - - Error handling for network issues, cancellation, and other exceptions - - Signaling EOF and timeout management + DEFAULT_HEADERS = { + hdrs.ACCEPT: "*/*", + hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), + } - Raises: - ClientOSError: When there's an OS-level error writing the body - ClientConnectionError: When there's a general connection error - asyncio.CancelledError: When the operation is cancelled + # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. + _body: Union[None, payload.Payload] = None + auth = None + response = None - """ - # 100 response - if self._continue is not None: - # Force headers to be sent before waiting for 100-continue - writer.send_headers() - await writer.drain() - await self._continue + __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data - protocol = conn.protocol - assert protocol is not None - try: - # This should be a rare case but the - # self._body can be set to None while - # the task is being started or we wait above - # for the 100-continue response. - # The more likely case is we have an empty - # payload, but 100-continue is still expected. - if self._body is not None: - await self._body.write_with_length(writer, content_length) - except OSError as underlying_exc: - reraised_exc = underlying_exc + # These class defaults help create_autospec() work correctly. + # If autospec is improved in future, maybe these can be removed. + url = URL() + method = "GET" - # Distinguish between timeout and other OS errors for better error reporting - exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( - underlying_exc, asyncio.TimeoutError - ) - if exc_is_not_timeout: - reraised_exc = ClientOSError( - underlying_exc.errno, - f"Can not write request body for {self.url !s}", - ) + _continue = None # waiter future for '100 Continue' response - set_exception(protocol, reraised_exc, underlying_exc) - except asyncio.CancelledError: - # Body hasn't been fully sent, so connection can't be reused - conn.close() - raise - except Exception as underlying_exc: - set_exception( - protocol, - ClientConnectionError( - "Failed to send bytes into the underlying connection " - f"{conn !s}: {underlying_exc!r}", - ), - underlying_exc, - ) - else: - # Successfully wrote the body, signal EOF and start response timeout - await writer.write_eof() - protocol.start_timeout() + _skip_auto_headers: Optional["CIMultiDict[None]"] = None - async def send(self, conn: "Connection") -> "ClientResponse": - # Specify request target: - # - CONNECT request must send authority form URI - # - not CONNECT proxy must send absolute form URI - # - most common is origin form URI - if self.method == hdrs.METH_CONNECT: - connect_host = self.url.host_subcomponent - assert connect_host is not None - path = f"{connect_host}:{self.url.port}" - elif self.proxy and not self.is_ssl(): - path = str(self.url) + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__( + self, + method: str, + url: URL, + *, + params: Query = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + data: Any = None, + cookies: Optional[LooseCookies] = None, + auth: Optional[BasicAuth] = None, + version: http.HttpVersion = http.HttpVersion11, + compress: Union[str, bool, None] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + response_class: Optional[Type["ClientResponse"]] = None, + proxy: Optional[URL] = None, + proxy_auth: Optional[BasicAuth] = None, + timer: Optional[BaseTimerContext] = None, + session: Optional["ClientSession"] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + proxy_headers: Optional[LooseHeaders] = None, + traces: Optional[List["Trace"]] = None, + trust_env: bool = False, + server_hostname: Optional[str] = None, + ): + if loop is None: + loop = asyncio.get_event_loop() + if match := _CONTAINS_CONTROL_CHAR_RE.search(method): + raise ValueError( + f"Method cannot contain non-token characters {method!r} " + f"(found at least {match.group()!r})" + ) + # URL forbids subclasses, so a simple type check is enough. + assert type(url) is URL, url + if proxy is not None: + assert type(proxy) is URL, proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + if TYPE_CHECKING: + assert session is not None + self._session = session + if params: + url = url.extend_query(params) + self.original_url = url + self.url = url.with_fragment(None) if url.raw_fragment else url + self.method = method.upper() + self.chunked = chunked + self.compress = compress + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse else: - path = self.url.raw_path_qs + real_response_class = response_class + self.response_class: Type[ClientResponse] = real_response_class + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl if ssl is not None else True + self.server_hostname = server_hostname - protocol = conn.protocol - assert protocol is not None - writer = StreamWriter( - protocol, - self.loop, - on_chunk_sent=( - functools.partial(self._on_chunk_request_sent, self.method, self.url) - if self._traces - else None - ), - on_headers_sent=( - functools.partial(self._on_headers_request_sent, self.method, self.url) - if self._traces - else None - ), - ) + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) - if self.compress: - writer.enable_compression(self.compress) # type: ignore[arg-type] + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth, trust_env) + self.update_proxy(proxy, proxy_auth, proxy_headers) - if self.chunked is not None: - writer.enable_chunking() + self.update_body_from_data(data) + if data is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + self._traces = [] if traces is None else traces - # set default content-type - if ( - self.method in self.POST_METHODS - and ( - self._skip_auto_headers is None - or hdrs.CONTENT_TYPE not in self._skip_auto_headers - ) - and hdrs.CONTENT_TYPE not in self.headers - ): - self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" + def __reset_writer(self, _: object = None) -> None: + self.__writer = None - v = self.version - if hdrs.CONNECTION not in self.headers: - if conn._connector.force_close: - if v == HttpVersion11: - self.headers[hdrs.CONNECTION] = "close" - elif v == HttpVersion10: - self.headers[hdrs.CONNECTION] = "keep-alive" + def _get_content_length(self) -> Optional[int]: + """Extract and validate Content-Length header value. - # status + headers - status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" + Returns parsed Content-Length value or None if not set. + Raises ValueError if header exists but cannot be parsed as an integer. + """ + if hdrs.CONTENT_LENGTH not in self.headers: + return None - # Buffer headers for potential coalescing with body - await writer.write_headers(status_line, self.headers) + content_length_hdr = self.headers[hdrs.CONTENT_LENGTH] + try: + return int(content_length_hdr) + except ValueError: + raise ValueError( + f"Invalid Content-Length header: {content_length_hdr}" + ) from None - task: Optional["asyncio.Task[None]"] - if self._body or self._continue is not None or protocol.writing_paused: - coro = self.write_bytes(writer, conn, self._get_content_length()) - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to write - # bytes immediately to avoid having to schedule - # the task on the event loop. - task = asyncio.Task(coro, loop=self.loop, eager_start=True) - else: - task = self.loop.create_task(coro) - if task.done(): - task = None - else: - self._writer = task - else: - # We have nothing to write because - # - there is no body - # - the protocol does not have writing paused - # - we are not waiting for a 100-continue response - protocol.start_timeout() - writer.set_eof() - task = None - response_class = self.response_class - assert response_class is not None - self.response = response_class( - self.method, - self.original_url, - writer=task, - continue100=self._continue, - timer=self._timer, - request_info=self.request_info, - traces=self._traces, - loop=self.loop, - session=self._session, - ) - return self.response + @property + def skip_auto_headers(self) -> CIMultiDict[None]: + return self._skip_auto_headers or CIMultiDict() - async def close(self) -> None: - if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise + @property + def _writer(self) -> Optional["asyncio.Task[None]"]: + return self.__writer - def terminate(self) -> None: + @_writer.setter + def _writer(self, writer: "asyncio.Task[None]") -> None: if self.__writer is not None: - if not self.loop.is_closed(): - self.__writer.cancel() self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = None - - async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: - for trace in self._traces: - await trace.send_request_chunk_sent(method, url, chunk) - - async def _on_headers_request_sent( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - for trace in self._traces: - await trace.send_request_headers(method, url, headers) + self.__writer = writer + writer.add_done_callback(self.__reset_writer) + def is_ssl(self) -> bool: + return self.url.scheme in _SSL_SCHEMES -_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") + @property + def ssl(self) -> Union["SSLContext", bool, Fingerprint]: + return self._ssl + @property + def connection_key(self) -> ConnectionKey: + if proxy_headers := self.proxy_headers: + h: Optional[int] = hash(tuple(proxy_headers.items())) + else: + h = None + url = self.url + return tuple.__new__( + ConnectionKey, + ( + url.raw_host or "", + url.port, + url.scheme in _SSL_SCHEMES, + self._ssl, + self.proxy, + self.proxy_auth, + h, + ), + ) -class ClientResponse(HeadersMixin): + @property + def host(self) -> str: + ret = self.url.raw_host + assert ret is not None + return ret - # Some of these attributes are None when created, - # but will be set by the start() method. - # As the end user will likely never see the None values, we cheat the types below. - # from the Status-Line of the response - version: Optional[HttpVersion] = None # HTTP-Version - status: int = None # type: ignore[assignment] # Status-Code - reason: Optional[str] = None # Reason-Phrase + @property + def port(self) -> Optional[int]: + return self.url.port - content: StreamReader = None # type: ignore[assignment] # Payload stream - _body: Optional[bytes] = None - _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] - _history: Tuple["ClientResponse", ...] = () - _raw_headers: RawHeaders = None # type: ignore[assignment] + @property + def body(self) -> Union[payload.Payload, Literal[b""]]: + """Request body.""" + # empty body is represented as bytes for backwards compatibility + return self._body or b"" - _connection: Optional["Connection"] = None # current connection - _cookies: Optional[SimpleCookie] = None - _continue: Optional["asyncio.Future[bool]"] = None - _source_traceback: Optional[traceback.StackSummary] = None - _session: Optional["ClientSession"] = None - # set up by ClientRequest after ClientResponse object creation - # post-init stage allows to not change ctor signature - _closed = True # to allow __del__ for non-initialized properly response - _released = False - _in_context = False + @body.setter + def body(self, value: Any) -> None: + """Set request body with warning for non-autoclose payloads. - _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" + WARNING: This setter must be called from within an event loop and is not + thread-safe. Setting body outside of an event loop may raise RuntimeError + when closing file-based payloads. - __writer: Optional["asyncio.Task[None]"] = None + DEPRECATED: Direct assignment to body is deprecated and will be removed + in a future version. Use await update_body() instead for proper resource + management. + """ + # Close existing payload if present + if self._body is not None: + # Warn if the payload needs manual closing + # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload + _warn_if_unclosed_payload(self._body, stacklevel=3) + # NOTE: In the future, when we remove sync close support, + # this setter will need to be removed and only the async + # update_body() method will be available. For now, we call + # _close() for backwards compatibility. + self._body._close() + self._update_body(value) - def __init__( - self, - method: str, - url: URL, - *, - writer: "Optional[asyncio.Task[None]]", - continue100: Optional["asyncio.Future[bool]"], - timer: BaseTimerContext, - request_info: RequestInfo, - traces: List["Trace"], - loop: asyncio.AbstractEventLoop, - session: "ClientSession", - ) -> None: - # URL forbids subclasses, so a simple type check is enough. - assert type(url) is URL + @property + def request_info(self) -> RequestInfo: + headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) + # These are created on every request, so we use a NamedTuple + # for performance reasons. We don't use the RequestInfo.__new__ + # method because it has a different signature which is provided + # for backwards compatibility only. + return tuple.__new__( + RequestInfo, (self.url, self.method, headers, self.original_url) + ) - self.method = method + @property + def session(self) -> "ClientSession": + """Return the ClientSession instance. - self._real_url = url - self._url = url.with_fragment(None) if url.raw_fragment else url - if writer is not None: - self._writer = writer - if continue100 is not None: - self._continue = continue100 - self._request_info = request_info - self._timer = timer if timer is not None else TimerNoop() - self._cache: Dict[str, Any] = {} - self._traces = traces - self._loop = loop - # Save reference to _resolve_charset, so that get_encoding() will still - # work after the response has finished reading the body. - # TODO: Fix session=None in tests (see ClientRequest.__init__). - if session is not None: - # store a reference to session #1985 - self._session = session - self._resolve_charset = session._resolve_charset - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) + This property provides access to the ClientSession that initiated + this request, allowing middleware to make additional requests + using the same session. + """ + return self._session - def __reset_writer(self, _: object = None) -> None: - self.__writer = None + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.raw_host: + raise InvalidURL(url) - @property - def _writer(self) -> Optional["asyncio.Task[None]"]: - """The writer task for streaming data. + # basic auth info + if url.raw_user or url.raw_password: + self.auth = helpers.BasicAuth(url.user or "", url.password or "") - _writer is only provided for backwards compatibility - for subclasses that may need to access it. + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) """ - return self.__writer + if isinstance(version, str): + v = [part.strip() for part in version.split(".", 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + f"Can not parse http version number: {version}" + ) from None + self.version = version - @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: - """Set the writer task for streaming data.""" - if self.__writer is not None: - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = writer - if writer is None: + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers: CIMultiDict[str] = CIMultiDict() + + # Build the host header + host = self.url.host_port_subcomponent + + # host_port_subcomponent is None when the URL is a relative URL. + # but we know we do not have a relative URL here. + assert host is not None + self.headers[hdrs.HOST] = host + + if not headers: return - if writer.done(): - # The writer is already done, so we can clear it immediately. - self.__writer = None + + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key in hdrs.HOST_ALL: + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + if skip_auto_headers is not None: + self._skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] else: - writer.add_done_callback(self.__reset_writer) + # Fast path when there are no headers to skip + # which is the most common case. + used_headers = self.headers - @property - def cookies(self) -> SimpleCookie: - if self._cookies is None: - self._cookies = SimpleCookie() - return self._cookies + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers[hdr] = val - @cookies.setter - def cookies(self, cookies: SimpleCookie) -> None: - self._cookies = cookies + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE - @reify - def url(self) -> URL: - return self._url + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return - @reify - def url_obj(self) -> URL: - warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) - return self._url + c = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, "")) + del self.headers[hdrs.COOKIE] - @reify - def real_url(self) -> URL: - return self._real_url + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore[assignment] + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) + c[name] = mrsl_val + else: + c[name] = value # type: ignore[assignment] - @reify - def host(self) -> str: - assert self._url.host is not None - return self._url.host + self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() - @reify - def headers(self) -> "CIMultiDictProxy[str]": - return self._headers + def update_content_encoding(self, data: Any) -> None: + """Set request content encoding.""" + if not data: + # Don't compress an empty body. + self.compress = None + return - @reify - def raw_headers(self) -> RawHeaders: - return self._raw_headers + if self.headers.get(hdrs.CONTENT_ENCODING): + if self.compress: + raise ValueError( + "compress can not be set if Content-Encoding header is set" + ) + elif self.compress: + if not isinstance(self.compress, str): + self.compress = "deflate" + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length - @reify - def request_info(self) -> RequestInfo: - return self._request_info + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() - @reify - def content_disposition(self) -> Optional[ContentDisposition]: - raw = self._headers.get(hdrs.CONTENT_DISPOSITION) - if raw is None: - return None - disposition_type, params_dct = multipart.parse_content_disposition(raw) - params = MappingProxyType(params_dct) - filename = multipart.content_disposition_filename(params) - return ContentDisposition(disposition_type, params, filename) + if "chunked" in te: + if self.chunked: + raise ValueError( + "chunked can not be set " + 'if "Transfer-Encoding: chunked" header is set' + ) - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + "chunked can not be set if Content-Length header is set" + ) + + self.headers[hdrs.TRANSFER_ENCODING] = "chunked" + elif ( + self._body is not None + and hdrs.CONTENT_LENGTH not in self.headers + and (size := self._body.size) is not None + ): + self.headers[hdrs.CONTENT_LENGTH] = str(size) + + def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None and trust_env and self.url.host is not None: + netrc_obj = netrc_from_env() + with contextlib.suppress(LookupError): + auth = basicauth_from_netrc(netrc_obj, self.url.host) + if auth is None: return - if self._connection is not None: - self._connection.release() - self._cleanup_writer() + if not isinstance(auth, helpers.BasicAuth): + raise TypeError("BasicAuth() tuple is required instead") - if self._loop.get_debug(): - kwargs = {"source": self} - _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) - context = {"client_response": self, "message": "Unclosed response"} - if self._source_traceback: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) + self.headers[hdrs.AUTHORIZATION] = auth.encode() - def __repr__(self) -> str: - out = io.StringIO() - ascii_encodable_url = str(self.url) - if self.reason: - ascii_encodable_reason = self.reason.encode( - "ascii", "backslashreplace" - ).decode("ascii") - else: - ascii_encodable_reason = "None" - print( - "<ClientResponse({}) [{} {}]>".format( - ascii_encodable_url, self.status, ascii_encodable_reason - ), - file=out, - ) - print(self.headers, file=out) - return out.getvalue() + def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: + """Update request body from data.""" + if self._body is not None: + _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel) - @property - def connection(self) -> Optional["Connection"]: - return self._connection + if body is None: + self._body = None + return - @reify - def history(self) -> Tuple["ClientResponse", ...]: - """A sequence of of responses, if redirects occurred.""" - return self._history + # FormData + maybe_payload = body() if isinstance(body, FormData) else body - @reify - def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": - links_str = ", ".join(self.headers.getall("link", [])) + try: + body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None) + except payload.LookupError: + body_payload = FormData(maybe_payload)() # type: ignore[arg-type] - if not links_str: - return MultiDictProxy(MultiDict()) + self._body = body_payload + # enable chunked encoding if needed + if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: + if (size := body_payload.size) is not None: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + else: + self.chunked = True + + # copy payload headers + assert body_payload.headers + headers = self.headers + skip_headers = self._skip_auto_headers + for key, value in body_payload.headers.items(): + if key in headers or (skip_headers is not None and key in skip_headers): + continue + headers[key] = value + + def _update_body(self, body: Any) -> None: + """Update request body after its already been set.""" + # Remove existing Content-Length header since body is changing + if hdrs.CONTENT_LENGTH in self.headers: + del self.headers[hdrs.CONTENT_LENGTH] - links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + # Remove existing Transfer-Encoding header to avoid conflicts + if self.chunked and hdrs.TRANSFER_ENCODING in self.headers: + del self.headers[hdrs.TRANSFER_ENCODING] - for val in re.split(r",(?=\s*<)", links_str): - match = re.match(r"\s*<(.*)>(.*)", val) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - url, params_str = match.groups() - params = params_str.split(";")[1:] + # Now update the body using the existing method + # Called from _update_body, add 1 to stacklevel from caller + self.update_body_from_data(body, _stacklevel=4) - link: MultiDict[Union[str, URL]] = MultiDict() + # Update transfer encoding headers if needed (same logic as __init__) + if body is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() - for param in params: - match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - key, _, value, _ = match.groups() + async def update_body(self, body: Any) -> None: + """ + Update request body and close previous payload if needed. - link.add(key, value) + This method safely updates the request body by first closing any existing + payload to prevent resource leaks, then setting the new body. - key = link.get("rel", url) + IMPORTANT: Always use this method instead of setting request.body directly. + Direct assignment to request.body will leak resources if the previous body + contains file handles, streams, or other resources that need cleanup. - link.add("url", self.url.join(URL(url))) + Args: + body: The new body content. Can be: + - bytes/bytearray: Raw binary data + - str: Text data (will be encoded using charset from Content-Type) + - FormData: Form data that will be encoded as multipart/form-data + - Payload: A pre-configured payload object + - AsyncIterable: An async iterable of bytes chunks + - File-like object: Will be read and sent as binary data + - None: Clears the body - links.add(str(key), MultiDictProxy(link)) + Usage: + # CORRECT: Use update_body + await request.update_body(b"new request data") - return MultiDictProxy(links) + # WRONG: Don't set body directly + # request.body = b"new request data" # This will leak resources! - async def start(self, connection: "Connection") -> "ClientResponse": - """Start response processing.""" - self._closed = False - self._protocol = connection.protocol - self._connection = connection + # Update with form data + form_data = FormData() + form_data.add_field('field', 'value') + await request.update_body(form_data) - with self._timer: - while True: - # read response - try: - protocol = self._protocol - message, payload = await protocol.read() # type: ignore[union-attr] - except http.HttpProcessingError as exc: - raise ClientResponseError( - self.request_info, - self.history, - status=exc.code, - message=exc.message, - headers=exc.headers, - ) from exc + # Clear body + await request.update_body(None) - if message.code < 100 or message.code > 199 or message.code == 101: - break + Note: + This method is async because it may need to close file handles or + other resources associated with the previous payload. Always await + this method to ensure proper cleanup. - if self._continue is not None: - set_result(self._continue, True) - self._continue = None + Warning: + Setting request.body directly is highly discouraged and can lead to: + - Resource leaks (unclosed file handles, streams) + - Memory leaks (unreleased buffers) + - Unexpected behavior with streaming payloads - # payload eof handler - payload.on_eof(self._response_eof) + It is not recommended to change the payload type in middleware. If the + body was already set (e.g., as bytes), it's best to keep the same type + rather than converting it (e.g., to str) as this may result in unexpected + behavior. - # response status - self.version = message.version - self.status = message.code - self.reason = message.reason + See Also: + - update_body_from_data: Synchronous body update without cleanup + - body property: Direct body access (STRONGLY DISCOURAGED) - # headers - self._headers = message.headers # type is CIMultiDictProxy - self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + """ + # Close existing payload if it exists and needs closing + if self._body is not None: + await self._body.close() + self._update_body(body) - # payload - self.content = payload + def update_expect_continue(self, expect: bool = False) -> None: + if expect: + self.headers[hdrs.EXPECT] = "100-continue" + elif ( + hdrs.EXPECT in self.headers + and self.headers[hdrs.EXPECT].lower() == "100-continue" + ): + expect = True - # cookies - if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): - cookies = SimpleCookie() - for hdr in cookie_hdrs: - try: - cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) - self._cookies = cookies - return self + if expect: + self._continue = self.loop.create_future() - def _response_eof(self) -> None: - if self._closed: + def update_proxy( + self, + proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders], + ) -> None: + self.proxy = proxy + if proxy is None: + self.proxy_auth = None + self.proxy_headers = None return - # protocol could be None because connection could be detached - protocol = self._connection and self._connection.protocol - if protocol is not None and protocol.upgraded: - return + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy_auth = proxy_auth - self._closed = True - self._cleanup_writer() - self._release_connection() + if proxy_headers is not None and not isinstance( + proxy_headers, (MultiDict, MultiDictProxy) + ): + proxy_headers = CIMultiDict(proxy_headers) + self.proxy_headers = proxy_headers - @property - def closed(self) -> bool: - return self._closed + async def write_bytes( + self, + writer: AbstractStreamWriter, + conn: "Connection", + content_length: Optional[int], + ) -> None: + """ + Write the request body to the connection stream. - def close(self) -> None: - if not self._released: - self._notify_content() + This method handles writing different types of request bodies: + 1. Payload objects (using their specialized write_with_length method) + 2. Bytes/bytearray objects + 3. Iterable body content - self._closed = True - if self._loop is None or self._loop.is_closed(): - return + Args: + writer: The stream writer to write the body to + conn: The connection being used for this request + content_length: Optional maximum number of bytes to write from the body + (None means write the entire body) - self._cleanup_writer() - if self._connection is not None: - self._connection.close() - self._connection = None + The method properly handles: + - Waiting for 100-Continue responses if required + - Content length constraints for chunked encoding + - Error handling for network issues, cancellation, and other exceptions + - Signaling EOF and timeout management - def release(self) -> Any: - if not self._released: - self._notify_content() + Raises: + ClientOSError: When there's an OS-level error writing the body + ClientConnectionError: When there's a general connection error + asyncio.CancelledError: When the operation is cancelled - self._closed = True + """ + # 100 response + if self._continue is not None: + # Force headers to be sent before waiting for 100-continue + writer.send_headers() + await writer.drain() + await self._continue - self._cleanup_writer() - self._release_connection() - return noop() + protocol = conn.protocol + assert protocol is not None + try: + # This should be a rare case but the + # self._body can be set to None while + # the task is being started or we wait above + # for the 100-continue response. + # The more likely case is we have an empty + # payload, but 100-continue is still expected. + if self._body is not None: + await self._body.write_with_length(writer, content_length) + except OSError as underlying_exc: + reraised_exc = underlying_exc + + # Distinguish between timeout and other OS errors for better error reporting + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", + ) + + set_exception(protocol, reraised_exc, underlying_exc) + except asyncio.CancelledError: + # Body hasn't been fully sent, so connection can't be reused + conn.close() + raise + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + "Failed to send bytes into the underlying connection " + f"{conn !s}: {underlying_exc!r}", + ), + underlying_exc, + ) + else: + # Successfully wrote the body, signal EOF and start response timeout + await writer.write_eof() + protocol.start_timeout() - @property - def ok(self) -> bool: - """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + async def send(self, conn: "Connection") -> "ClientResponse": + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + connect_host = self.url.host_subcomponent + assert connect_host is not None + path = f"{connect_host}:{self.url.port}" + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path_qs - This is **not** a check for ``200 OK`` but a check that the response - status is under 400. - """ - return 400 > self.status + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, + self.loop, + on_chunk_sent=( + functools.partial(self._on_chunk_request_sent, self.method, self.url) + if self._traces + else None + ), + on_headers_sent=( + functools.partial(self._on_headers_request_sent, self.method, self.url) + if self._traces + else None + ), + ) - def raise_for_status(self) -> None: - if not self.ok: - # reason should always be not None for a started response - assert self.reason is not None + if self.compress: + writer.enable_compression(self.compress) # type: ignore[arg-type] - # If we're in a context we can rely on __aexit__() to release as the - # exception propagates. - if not self._in_context: - self.release() + if self.chunked is not None: + writer.enable_chunking() - raise ClientResponseError( - self.request_info, - self.history, - status=self.status, - message=self.reason, - headers=self.headers, + # set default content-type + if ( + self.method in self.POST_METHODS + and ( + self._skip_auto_headers is None + or hdrs.CONTENT_TYPE not in self._skip_auto_headers ) + and hdrs.CONTENT_TYPE not in self.headers + ): + self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - def _release_connection(self) -> None: - if self._connection is not None: - if self.__writer is None: - self._connection.release() - self._connection = None - else: - self.__writer.add_done_callback(lambda f: self._release_connection()) + v = self.version + if hdrs.CONNECTION not in self.headers: + if conn._connector.force_close: + if v == HttpVersion11: + self.headers[hdrs.CONNECTION] = "close" + elif v == HttpVersion10: + self.headers[hdrs.CONNECTION] = "keep-alive" - async def _wait_released(self) -> None: - if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - self._release_connection() + # status + headers + status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" - def _cleanup_writer(self) -> None: - if self.__writer is not None: - self.__writer.cancel() - self._session = None + # Buffer headers for potential coalescing with body + await writer.write_headers(status_line, self.headers) - def _notify_content(self) -> None: - content = self.content - if content and content.exception() is None: - set_exception(content, _CONNECTION_CLOSED_EXCEPTION) - self._released = True + task: Optional["asyncio.Task[None]"] + if self._body or self._continue is not None or protocol.writing_paused: + coro = self.write_bytes(writer, conn, self._get_content_length()) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to write + # bytes immediately to avoid having to schedule + # the task on the event loop. + task = asyncio.Task(coro, loop=self.loop, eager_start=True) + else: + task = self.loop.create_task(coro) + if task.done(): + task = None + else: + self._writer = task + else: + # We have nothing to write because + # - there is no body + # - the protocol does not have writing paused + # - we are not waiting for a 100-continue response + protocol.start_timeout() + writer.set_eof() + task = None + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, + self.original_url, + writer=task, + continue100=self._continue, + timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session, + ) + return self.response - async def wait_for_close(self) -> None: + async def close(self) -> None: if self.__writer is not None: try: await self.__writer @@ -1414,107 +1499,20 @@ async def wait_for_close(self) -> None: and task.cancelling() ): raise - self.release() - - async def read(self) -> bytes: - """Read response payload.""" - if self._body is None: - try: - self._body = await self.content.read() - for trace in self._traces: - await trace.send_response_chunk_received( - self.method, self.url, self._body - ) - except BaseException: - self.close() - raise - elif self._released: # Response explicitly released - raise ClientConnectionError("Connection closed") - - protocol = self._connection and self._connection.protocol - if protocol is None or not protocol.upgraded: - await self._wait_released() # Underlying connection released - return self._body - - def get_encoding(self) -> str: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - mimetype = helpers.parse_mimetype(ctype) - - encoding = mimetype.parameters.get("charset") - if encoding: - with contextlib.suppress(LookupError, ValueError): - return codecs.lookup(encoding).name - - if mimetype.type == "application" and ( - mimetype.subtype == "json" or mimetype.subtype == "rdap" - ): - # RFC 7159 states that the default encoding is UTF-8. - # RFC 7483 defines application/rdap+json - return "utf-8" - - if self._body is None: - raise RuntimeError( - "Cannot compute fallback encoding of a not yet read body" - ) - - return self._resolve_charset(self, self._body) - - async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: - """Read response payload and decode.""" - if self._body is None: - await self.read() - if encoding is None: - encoding = self.get_encoding() - - return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] - - async def json( - self, - *, - encoding: Optional[str] = None, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - content_type: Optional[str] = "application/json", - ) -> Any: - """Read and decodes JSON response.""" - if self._body is None: - await self.read() - - if content_type: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - if not _is_expected_content_type(ctype, content_type): - raise ContentTypeError( - self.request_info, - self.history, - status=self.status, - message=( - "Attempt to decode JSON with unexpected mimetype: %s" % ctype - ), - headers=self.headers, - ) - - stripped = self._body.strip() # type: ignore[union-attr] - if not stripped: - return None - - if encoding is None: - encoding = self.get_encoding() - - return loads(stripped.decode(encoding)) + def terminate(self) -> None: + if self.__writer is not None: + if not self.loop.is_closed(): + self.__writer.cancel() + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = None - async def __aenter__(self) -> "ClientResponse": - self._in_context = True - return self + async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(method, url, chunk) - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + async def _on_headers_request_sent( + self, method: str, url: URL, headers: "CIMultiDict[str]" ) -> None: - self._in_context = False - # similar to _RequestContextManager, we do not need to check - # for exceptions, response object can close connection - # if state is broken - self.release() - await self.wait_for_close() + for trace in self._traces: + await trace.send_request_headers(method, url, headers) From b192479238e63cdba013a5fa4d59b56944ec5cd7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 09:47:16 -0500 Subject: [PATCH 1456/1511] Release 3.12.1rc0 (#11030) --- CHANGES.rst | 20 ++++++++++++++++++++ aiohttp/__init__.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index ddbebd82369..a2703bb0a8c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,26 @@ .. towncrier release notes start +3.12.1rc0 (2025-05-26) +====================== + +Features +-------- + +- Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`. + + Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases. + + + *Related issues and pull requests on GitHub:* + :issue:`5530`, :issue:`5577`, :issue:`9201`, :issue:`11017`. + + + + +---- + + 3.12.0 (2025-05-24) =================== diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4bc6a3a2b22..e61fb80e8c8 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.0.dev0" +__version__ = "3.12.1rc0" from typing import TYPE_CHECKING, Tuple From 152e4160fde8985e770398da5271396d7a831198 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 10:46:02 -0500 Subject: [PATCH 1457/1511] Release 3.12.1 (#11031) --- CHANGES.rst | 1412 +------------------------------------ CHANGES/11017.feature.rst | 3 - CHANGES/5530.feature.rst | 1 - CHANGES/5577.feature.rst | 1 - CHANGES/9201.feature.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 3 insertions(+), 1417 deletions(-) delete mode 100644 CHANGES/11017.feature.rst delete mode 120000 CHANGES/5530.feature.rst delete mode 120000 CHANGES/5577.feature.rst delete mode 120000 CHANGES/9201.feature.rst diff --git a/CHANGES.rst b/CHANGES.rst index a2703bb0a8c..82e7ad49de8 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,8 +10,8 @@ .. towncrier release notes start -3.12.1rc0 (2025-05-26) -====================== +3.12.1 (2025-05-26) +=================== Features -------- @@ -303,1414 +303,6 @@ Miscellaneous internal changes ----- - - -3.12.0rc1 (2025-05-24) -====================== - -Bug fixes ---------- - -- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`6009`, :issue:`10988`. - - - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10951`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - -- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. - - - *Related issues and pull requests on GitHub:* - :issue:`10972`. - - - -- Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`. - - This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding. - - When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads. - - This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests. - - - *Related issues and pull requests on GitHub:* - :issue:`10991`. - - - - -Improved documentation ----------------------- - -- Improved documentation for middleware by adding warnings and examples about - request body stream consumption. The documentation now clearly explains that - request body streams can only be read once and provides best practices for - sharing parsed request data between middleware and handlers -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2914`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0rc0 (2025-05-23) -====================== - -Bug fixes ---------- - -- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`6009`, :issue:`10988`. - - - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10951`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - -- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. - - - *Related issues and pull requests on GitHub:* - :issue:`10972`. - - - - -Improved documentation ----------------------- - -- Improved documentation for middleware by adding warnings and examples about - request body stream consumption. The documentation now clearly explains that - request body streams can only be read once and provides best practices for - sharing parsed request data between middleware and handlers -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2914`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b3 (2025-05-22) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10951`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`, :issue:`10952`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b2 (2025-05-22) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b1 (2025-05-22) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b0 (2025-05-20) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ---- diff --git a/CHANGES/11017.feature.rst b/CHANGES/11017.feature.rst deleted file mode 100644 index 361c56e3fe8..00000000000 --- a/CHANGES/11017.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`. - -Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases. diff --git a/CHANGES/5530.feature.rst b/CHANGES/5530.feature.rst deleted file mode 120000 index 63bf4429e55..00000000000 --- a/CHANGES/5530.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11017.feature.rst \ No newline at end of file diff --git a/CHANGES/5577.feature.rst b/CHANGES/5577.feature.rst deleted file mode 120000 index 63bf4429e55..00000000000 --- a/CHANGES/5577.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11017.feature.rst \ No newline at end of file diff --git a/CHANGES/9201.feature.rst b/CHANGES/9201.feature.rst deleted file mode 120000 index 63bf4429e55..00000000000 --- a/CHANGES/9201.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11017.feature.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e61fb80e8c8..5c88b0724ce 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.1rc0" +__version__ = "3.12.1" from typing import TYPE_CHECKING, Tuple From 857229c7a53adc56bea3b2aec32c3246ae6a3c26 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 16:10:25 -0500 Subject: [PATCH 1458/1511] Increment version to 3.12.1.dev0 (#11034) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 5c88b0724ce..3664a77f4b1 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.1" +__version__ = "3.12.1.dev0" from typing import TYPE_CHECKING, Tuple From 6c3daa74dd060efa001e357e43d69e78f667da9e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 23:01:34 +0000 Subject: [PATCH 1459/1511] [PR #11035/3915d7a6 backport][3.12] Fix Content-Length header regression for requests with None body (#11037) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11035.bugfix.rst | 3 + aiohttp/client_reqrep.py | 13 +-- tests/test_client_request.py | 194 +++++++++++++++++++++++++++++++---- 3 files changed, 182 insertions(+), 28 deletions(-) create mode 100644 CHANGES/11035.bugfix.rst diff --git a/CHANGES/11035.bugfix.rst b/CHANGES/11035.bugfix.rst new file mode 100644 index 00000000000..2b74708f746 --- /dev/null +++ b/CHANGES/11035.bugfix.rst @@ -0,0 +1,3 @@ +Fixed ``Content-Length`` header not being set to ``0`` for non-GET requests with ``None`` body -- by :user:`bdraco`. + +Non-GET requests (``POST``, ``PUT``, ``PATCH``, ``DELETE``) with ``None`` as the body now correctly set the ``Content-Length`` header to ``0``, matching the behavior of requests with empty bytes (``b""``). This regression was introduced in aiohttp 3.12.1. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 41acec87712..a04c86b1c53 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1136,12 +1136,6 @@ def update_transfer_encoding(self) -> None: ) self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - elif ( - self._body is not None - and hdrs.CONTENT_LENGTH not in self.headers - and (size := self._body.size) is not None - ): - self.headers[hdrs.CONTENT_LENGTH] = str(size) def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: """Set basic auth.""" @@ -1166,6 +1160,13 @@ def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: if body is None: self._body = None + # Set Content-Length to 0 when body is None for methods that expect a body + if ( + self.method not in self.GET_METHODS + and not self.chunked + and hdrs.CONTENT_LENGTH not in self.headers + ): + self.headers[hdrs.CONTENT_LENGTH] = "0" return # FormData diff --git a/tests/test_client_request.py b/tests/test_client_request.py index e8381a3ef77..7274420d246 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -205,7 +205,7 @@ def test_host_port_nondefault_wss(make_request) -> None: def test_host_port_none_port(make_request) -> None: req = make_request("get", "unix://localhost/path") - assert req.headers["Host"] == "localhost" + assert req.headers[hdrs.HOST] == "localhost" def test_host_port_err(make_request) -> None: @@ -220,17 +220,17 @@ def test_hostname_err(make_request) -> None: def test_host_header_host_first(make_request) -> None: req = make_request("get", "http://python.org/") - assert list(req.headers)[0] == "Host" + assert list(req.headers)[0] == hdrs.HOST def test_host_header_host_without_port(make_request) -> None: req = make_request("get", "http://python.org/") - assert req.headers["HOST"] == "python.org" + assert req.headers[hdrs.HOST] == "python.org" def test_host_header_host_with_default_port(make_request) -> None: req = make_request("get", "http://python.org:80/") - assert req.headers["HOST"] == "python.org" + assert req.headers[hdrs.HOST] == "python.org" def test_host_header_host_with_nondefault_port(make_request) -> None: @@ -348,12 +348,12 @@ def test_skip_default_useragent_header(make_request) -> None: def test_headers(make_request) -> None: req = make_request( - "post", "http://python.org/", headers={"Content-Type": "text/plain"} + "post", "http://python.org/", headers={hdrs.CONTENT_TYPE: "text/plain"} ) - assert "CONTENT-TYPE" in req.headers - assert req.headers["CONTENT-TYPE"] == "text/plain" - assert req.headers["ACCEPT-ENCODING"] == "gzip, deflate, br" + assert hdrs.CONTENT_TYPE in req.headers + assert req.headers[hdrs.CONTENT_TYPE] == "text/plain" + assert req.headers[hdrs.ACCEPT_ENCODING] == "gzip, deflate, br" def test_headers_list(make_request) -> None: @@ -979,7 +979,7 @@ async def test_body_with_size_sets_content_length( async def test_body_payload_with_size_no_content_length( loop: asyncio.AbstractEventLoop, ) -> None: - """Test that when a body payload with size is set directly, Content-Length is added.""" + """Test that when a body payload is set via update_body, Content-Length is added.""" # Create a payload with a known size data = b"payload data" bytes_payload = payload.BytesPayload(data) @@ -991,23 +991,28 @@ async def test_body_payload_with_size_no_content_length( loop=loop, ) - # Set body directly (bypassing update_body_from_data to avoid it setting Content-Length) - req._body = bytes_payload - - # Ensure conditions for the code path we want to test - assert req._body is not None - assert hdrs.CONTENT_LENGTH not in req.headers - assert req._body.size is not None - assert not req.chunked + # Initially no body should be set + assert req._body is None + # POST method with None body should have Content-Length: 0 + assert req.headers[hdrs.CONTENT_LENGTH] == "0" - # Now trigger update_transfer_encoding which should set Content-Length - req.update_transfer_encoding() + # Update body using the public method + await req.update_body(bytes_payload) # Verify Content-Length was set from body.size - assert req.headers["CONTENT-LENGTH"] == str(len(data)) + assert req.headers[hdrs.CONTENT_LENGTH] == str(len(data)) assert req.body is bytes_payload assert req._body is bytes_payload # Access _body which is the Payload + assert req._body is not None # type: ignore[unreachable] assert req._body.size == len(data) + + # Set body back to None + await req.update_body(None) + + # Verify Content-Length is back to 0 for POST with None body + assert req.headers[hdrs.CONTENT_LENGTH] == "0" + assert req._body is None + await req.close() @@ -1980,8 +1985,8 @@ async def test_update_body_updates_content_length( # Clear body await req.update_body(None) - # For None body, Content-Length should not be set - assert "Content-Length" not in req.headers + # For None body with POST method, Content-Length should be set to 0 + assert req.headers[hdrs.CONTENT_LENGTH] == "0" await req.close() @@ -2075,4 +2080,149 @@ async def test_expect100_with_body_becomes_none() -> None: req._body = None await req.write_bytes(mock_writer, mock_conn, None) + + +@pytest.mark.parametrize( + ("method", "data", "expected_content_length"), + [ + # GET methods should not have Content-Length with None body + ("GET", None, None), + ("HEAD", None, None), + ("OPTIONS", None, None), + ("TRACE", None, None), + # POST methods should have Content-Length: 0 with None body + ("POST", None, "0"), + ("PUT", None, "0"), + ("PATCH", None, "0"), + ("DELETE", None, "0"), + # Empty bytes should always set Content-Length: 0 + ("GET", b"", "0"), + ("HEAD", b"", "0"), + ("POST", b"", "0"), + ("PUT", b"", "0"), + # Non-empty bytes should set appropriate Content-Length + ("GET", b"test", "4"), + ("POST", b"test", "4"), + ("PUT", b"hello world", "11"), + ("PATCH", b"data", "4"), + ("DELETE", b"x", "1"), + ], +) +def test_content_length_for_methods( + method: str, + data: Optional[bytes], + expected_content_length: Optional[str], + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that Content-Length header is set correctly for all HTTP methods.""" + req = ClientRequest(method, URL("http://python.org/"), data=data, loop=loop) + + actual_content_length = req.headers.get(hdrs.CONTENT_LENGTH) + assert actual_content_length == expected_content_length + + +@pytest.mark.parametrize("method", ["GET", "HEAD", "OPTIONS", "TRACE"]) +def test_get_methods_classification(method: str) -> None: + """Test that GET-like methods are correctly classified.""" + assert method in ClientRequest.GET_METHODS + + +@pytest.mark.parametrize("method", ["POST", "PUT", "PATCH", "DELETE"]) +def test_non_get_methods_classification(method: str) -> None: + """Test that POST-like methods are not in GET_METHODS.""" + assert method not in ClientRequest.GET_METHODS + + +async def test_content_length_with_string_data(loop: asyncio.AbstractEventLoop) -> None: + """Test Content-Length when data is a string.""" + data = "Hello, World!" + req = ClientRequest("POST", URL("http://python.org/"), data=data, loop=loop) + # String should be encoded to bytes, default encoding is utf-8 + assert req.headers[hdrs.CONTENT_LENGTH] == str(len(data.encode("utf-8"))) + await req.close() + + +async def test_content_length_with_async_iterable( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that async iterables use chunked encoding, not Content-Length.""" + + async def data_gen() -> AsyncIterator[bytes]: + yield b"chunk1" # pragma: no cover + + req = ClientRequest("POST", URL("http://python.org/"), data=data_gen(), loop=loop) + assert hdrs.CONTENT_LENGTH not in req.headers + assert req.chunked + assert req.headers[hdrs.TRANSFER_ENCODING] == "chunked" + await req.close() + + +async def test_content_length_not_overridden(loop: asyncio.AbstractEventLoop) -> None: + """Test that explicitly set Content-Length is not overridden.""" + req = ClientRequest( + "POST", + URL("http://python.org/"), + data=b"test", + headers={hdrs.CONTENT_LENGTH: "100"}, + loop=loop, + ) + # Should keep the explicitly set value + assert req.headers[hdrs.CONTENT_LENGTH] == "100" + await req.close() + + +async def test_content_length_with_formdata(loop: asyncio.AbstractEventLoop) -> None: + """Test Content-Length with FormData.""" + form = aiohttp.FormData() + form.add_field("field", "value") + + req = ClientRequest("POST", URL("http://python.org/"), data=form, loop=loop) + # FormData with known size should set Content-Length + assert hdrs.CONTENT_LENGTH in req.headers + await req.close() + + +async def test_no_content_length_with_chunked(loop: asyncio.AbstractEventLoop) -> None: + """Test that chunked encoding prevents Content-Length header.""" + req = ClientRequest( + "POST", + URL("http://python.org/"), + data=b"test", + chunked=True, + loop=loop, + ) + assert hdrs.CONTENT_LENGTH not in req.headers + assert req.headers[hdrs.TRANSFER_ENCODING] == "chunked" + await req.close() + + +@pytest.mark.parametrize("method", ["POST", "PUT", "PATCH", "DELETE"]) +async def test_update_body_none_sets_content_length_zero( + method: str, loop: asyncio.AbstractEventLoop +) -> None: + """Test that updating body to None sets Content-Length: 0 for POST-like methods.""" + # Create request with initial body + req = ClientRequest(method, URL("http://python.org/"), data=b"initial", loop=loop) + assert req.headers[hdrs.CONTENT_LENGTH] == "7" + + # Update body to None + await req.update_body(None) + assert req.headers[hdrs.CONTENT_LENGTH] == "0" + assert req._body is None + await req.close() + + +@pytest.mark.parametrize("method", ["GET", "HEAD", "OPTIONS", "TRACE"]) +async def test_update_body_none_no_content_length_for_get_methods( + method: str, loop: asyncio.AbstractEventLoop +) -> None: + """Test that updating body to None doesn't set Content-Length for GET-like methods.""" + # Create request with initial body + req = ClientRequest(method, URL("http://python.org/"), data=b"initial", loop=loop) + assert req.headers[hdrs.CONTENT_LENGTH] == "7" + + # Update body to None + await req.update_body(None) + assert hdrs.CONTENT_LENGTH not in req.headers + assert req._body is None await req.close() From ecb7086730646b0a4293b32654a1d17b3e8e3dd0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 18:21:11 -0500 Subject: [PATCH 1460/1511] Release 3.12.2 (#11039) --- CHANGES.rst | 20 ++++++++++++++++++++ CHANGES/11035.bugfix.rst | 3 --- aiohttp/__init__.py | 2 +- 3 files changed, 21 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/11035.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 82e7ad49de8..b0fdbe7ed5c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,26 @@ .. towncrier release notes start +3.12.2 (2025-05-26) +=================== + +Bug fixes +--------- + +- Fixed ``Content-Length`` header not being set to ``0`` for non-GET requests with ``None`` body -- by :user:`bdraco`. + + Non-GET requests (``POST``, ``PUT``, ``PATCH``, ``DELETE``) with ``None`` as the body now correctly set the ``Content-Length`` header to ``0``, matching the behavior of requests with empty bytes (``b""``). This regression was introduced in aiohttp 3.12.1. + + + *Related issues and pull requests on GitHub:* + :issue:`11035`. + + + + +---- + + 3.12.1 (2025-05-26) =================== diff --git a/CHANGES/11035.bugfix.rst b/CHANGES/11035.bugfix.rst deleted file mode 100644 index 2b74708f746..00000000000 --- a/CHANGES/11035.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed ``Content-Length`` header not being set to ``0`` for non-GET requests with ``None`` body -- by :user:`bdraco`. - -Non-GET requests (``POST``, ``PUT``, ``PATCH``, ``DELETE``) with ``None`` as the body now correctly set the ``Content-Length`` header to ``0``, matching the behavior of requests with empty bytes (``b""``). This regression was introduced in aiohttp 3.12.1. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 3664a77f4b1..cd30c676465 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.1.dev0" +__version__ = "3.12.2" from typing import TYPE_CHECKING, Tuple From 73e6dfdc9c6430a8e19284626cafb82f44772c50 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 26 May 2025 21:06:13 -0500 Subject: [PATCH 1461/1511] Increment version to 3.12.3.dev0 (#11042) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index cd30c676465..4d3c8b0f2c7 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.2" +__version__ = "3.12.3.dev0" from typing import TYPE_CHECKING, Tuple From 3a8825b82ae4bf45c552eae6a03f3e407f639c6a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 14:01:51 -0500 Subject: [PATCH 1462/1511] [PR #11055/abcb2cc4 backport][3.12] Fix failing lint jobs due to caching (#11058) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index daa701c2aa9..69b777e0624 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -49,7 +49,7 @@ jobs: - name: Cache PyPI uses: actions/cache@v4.2.3 with: - key: pip-lint-${{ hashFiles('requirements/*.txt') }} + key: pip-lint-${{ hashFiles('requirements/*.txt') }}-v2 path: ~/.cache/pip restore-keys: | pip-lint- From 09396d030361b02c6c2145836b07cb2590ce9beb Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 20:15:30 +0000 Subject: [PATCH 1463/1511] [PR #11060/59259572 backport][3.12] Fix failing linter CI (#11062) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 69b777e0624..83f5fd3ee03 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -49,7 +49,7 @@ jobs: - name: Cache PyPI uses: actions/cache@v4.2.3 with: - key: pip-lint-${{ hashFiles('requirements/*.txt') }}-v2 + key: pip-lint-${{ hashFiles('requirements/*.txt') }}-v3 path: ~/.cache/pip restore-keys: | pip-lint- @@ -69,6 +69,7 @@ jobs: make mypy - name: Install libenchant run: | + sudo apt-get update sudo apt install libenchant-2-dev - name: Install spell checker run: | From 872cab623e818d2a19d92591d9f0cc76ac16d608 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 20:47:34 +0000 Subject: [PATCH 1464/1511] [PR #11056/7f691674 backport][3.12] Prevent blockbuster False Positives from coverage.py Locking (#11065) Co-authored-by: J. Nick Koston <nick@koston.org> --- tests/conftest.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 69469b3c793..54e0d3f21a7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -80,6 +80,14 @@ def blockbuster(request: pytest.FixtureRequest) -> Iterator[None]: bb.functions[func].can_block_in( "aiohttp/web_urldispatcher.py", "add_static" ) + # Note: coverage.py uses locking internally which can cause false positives + # in blockbuster when it instruments code. This is particularly problematic + # on Windows where it can lead to flaky test failures. + # Additionally, we're not particularly worried about threading.Lock.acquire happening + # by accident in this codebase as we primarily use asyncio.Lock for + # synchronization in async code. + # Allow lock.acquire calls to prevent these false positives + bb.functions["threading.Lock.acquire"].deactivate() yield From 696ae5275ae463fb89fc6cca3194f0b81d3513f8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 21:13:32 +0000 Subject: [PATCH 1465/1511] [PR #11054/e2eb1959 backport][3.12] Fix CookieJar memory leak in filter_cookies() (#11068) Co-authored-by: J. Nick Koston <nick@koston.org> Fixes #11052 memory leak issue --- CHANGES/11052.bugfix.rst | 2 ++ CHANGES/11054.bugfix.rst | 1 + aiohttp/cookiejar.py | 2 ++ tests/test_cookiejar.py | 54 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 59 insertions(+) create mode 100644 CHANGES/11052.bugfix.rst create mode 120000 CHANGES/11054.bugfix.rst diff --git a/CHANGES/11052.bugfix.rst b/CHANGES/11052.bugfix.rst new file mode 100644 index 00000000000..73e4ea216c8 --- /dev/null +++ b/CHANGES/11052.bugfix.rst @@ -0,0 +1,2 @@ +Fixed memory leak in :py:meth:`~aiohttp.CookieJar.filter_cookies` that caused unbounded memory growth +when making requests to different URL paths -- by :user:`bdraco` and :user:`Cycloctane`. diff --git a/CHANGES/11054.bugfix.rst b/CHANGES/11054.bugfix.rst new file mode 120000 index 00000000000..2d6e2428f3e --- /dev/null +++ b/CHANGES/11054.bugfix.rst @@ -0,0 +1 @@ +11052.bugfix.rst \ No newline at end of file diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index f6b9a921767..696ffddc315 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -353,6 +353,8 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": path_len = len(request_url.path) # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 for p in pairs: + if p not in self._cookies: + continue for name, cookie in self._cookies[p].items(): domain = cookie["domain"] diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 4c37e962597..26efaa30d04 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -1127,3 +1127,57 @@ async def test_treat_as_secure_origin() -> None: assert len(jar) == 1 filtered_cookies = jar.filter_cookies(request_url=endpoint) assert len(filtered_cookies) == 1 + + +async def test_filter_cookies_does_not_leak_memory() -> None: + """Test that filter_cookies doesn't create empty cookie entries. + + Regression test for https://github.com/aio-libs/aiohttp/issues/11052 + """ + jar = CookieJar() + + # Set a cookie with Path=/ + jar.update_cookies({"test_cookie": "value; Path=/"}, URL("http://example.com/")) + + # Check initial state + assert len(jar) == 1 + initial_storage_size = len(jar._cookies) + initial_morsel_cache_size = len(jar._morsel_cache) + + # Make multiple requests with different paths + paths = [ + "/", + "/api", + "/api/v1", + "/api/v1/users", + "/api/v1/users/123", + "/static/css/style.css", + "/images/logo.png", + ] + + for path in paths: + url = URL(f"http://example.com{path}") + filtered = jar.filter_cookies(url) + # Should still get the cookie + assert len(filtered) == 1 + assert "test_cookie" in filtered + + # Storage size should not grow significantly + # Only the shared cookie entry ('', '') may be added + final_storage_size = len(jar._cookies) + assert final_storage_size <= initial_storage_size + 1 + + # Verify _morsel_cache doesn't leak either + # It should only have entries for domains/paths where cookies exist + final_morsel_cache_size = len(jar._morsel_cache) + assert final_morsel_cache_size <= initial_morsel_cache_size + 1 + + # Verify no empty entries were created for domain-path combinations + for key, cookies in jar._cookies.items(): + if key != ("", ""): # Skip the shared cookie entry + assert len(cookies) > 0, f"Empty cookie entry found for {key}" + + # Verify _morsel_cache entries correspond to actual cookies + for key, morsels in jar._morsel_cache.items(): + assert key in jar._cookies, f"Orphaned morsel cache entry for {key}" + assert len(morsels) > 0, f"Empty morsel cache entry found for {key}" From 3df3ab5a2fb2ba3bdc016a8515e04663e8903755 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 28 May 2025 16:45:41 -0500 Subject: [PATCH 1466/1511] [PR #11064/876102c backport][3.12] Remove update of libenchant from linter workflow (#11071) --- .github/workflows/ci-cd.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 83f5fd3ee03..1d44ddda982 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -49,7 +49,7 @@ jobs: - name: Cache PyPI uses: actions/cache@v4.2.3 with: - key: pip-lint-${{ hashFiles('requirements/*.txt') }}-v3 + key: pip-lint-${{ hashFiles('requirements/*.txt') }}-v4 path: ~/.cache/pip restore-keys: | pip-lint- @@ -67,10 +67,6 @@ jobs: - name: Run linters run: | make mypy - - name: Install libenchant - run: | - sudo apt-get update - sudo apt install libenchant-2-dev - name: Install spell checker run: | pip install -r requirements/doc-spelling.in -c requirements/doc-spelling.txt From 2002b9dd09436bb232e1df651ede5bb92e5f04c8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 28 May 2025 17:20:09 -0500 Subject: [PATCH 1467/1511] Release 3.12.3 (#11073) --- CHANGES.rst | 19 +++++++++++++++++++ CHANGES/11052.bugfix.rst | 2 -- CHANGES/11054.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 20 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/11052.bugfix.rst delete mode 120000 CHANGES/11054.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index b0fdbe7ed5c..418475ce772 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,25 @@ .. towncrier release notes start +3.12.3 (2025-05-28) +=================== + +Bug fixes +--------- + +- Fixed memory leak in :py:meth:`~aiohttp.CookieJar.filter_cookies` that caused unbounded memory growth + when making requests to different URL paths -- by :user:`bdraco` and :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`11052`, :issue:`11054`. + + + + +---- + + 3.12.2 (2025-05-26) =================== diff --git a/CHANGES/11052.bugfix.rst b/CHANGES/11052.bugfix.rst deleted file mode 100644 index 73e4ea216c8..00000000000 --- a/CHANGES/11052.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed memory leak in :py:meth:`~aiohttp.CookieJar.filter_cookies` that caused unbounded memory growth -when making requests to different URL paths -- by :user:`bdraco` and :user:`Cycloctane`. diff --git a/CHANGES/11054.bugfix.rst b/CHANGES/11054.bugfix.rst deleted file mode 120000 index 2d6e2428f3e..00000000000 --- a/CHANGES/11054.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11052.bugfix.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4d3c8b0f2c7..31c39176b03 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.3.dev0" +__version__ = "3.12.3" from typing import TYPE_CHECKING, Tuple From 0abffd5f0e2e7607a505562b4592f9a00511c6c8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 00:24:39 +0000 Subject: [PATCH 1468/1511] [PR #11074/e550c78a backport][3.12] Fix connector not waiting for connections to close (#11077) Co-authored-by: J. Nick Koston <nick@koston.org> fixes #1925 fixes #3736 --- CHANGES/11074.bugfix.rst | 1 + CHANGES/1925.bugfix.rst | 1 + aiohttp/client_proto.py | 15 ++++++++ aiohttp/connector.py | 60 ++++++++++++++++++++++++------ tests/test_client_request.py | 7 +++- tests/test_client_session.py | 9 ++++- tests/test_connector.py | 71 +++++++++++++++++++++++++----------- 7 files changed, 130 insertions(+), 34 deletions(-) create mode 100644 CHANGES/11074.bugfix.rst create mode 120000 CHANGES/1925.bugfix.rst diff --git a/CHANGES/11074.bugfix.rst b/CHANGES/11074.bugfix.rst new file mode 100644 index 00000000000..120f8efd914 --- /dev/null +++ b/CHANGES/11074.bugfix.rst @@ -0,0 +1 @@ +Fixed connector not waiting for connections to close before returning from :meth:`~aiohttp.BaseConnector.close` (partial backport of :pr:`3733`) -- by :user:`atemate` and :user:`bdraco`. diff --git a/CHANGES/1925.bugfix.rst b/CHANGES/1925.bugfix.rst new file mode 120000 index 00000000000..eb158f4b0f9 --- /dev/null +++ b/CHANGES/1925.bugfix.rst @@ -0,0 +1 @@ +11074.bugfix.rst \ No newline at end of file diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 2d64b3f3644..6a0318e553a 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -4,6 +4,7 @@ from .base_protocol import BaseProtocol from .client_exceptions import ( + ClientConnectionError, ClientOSError, ClientPayloadError, ServerDisconnectedError, @@ -14,6 +15,7 @@ EMPTY_BODY_STATUS_CODES, BaseTimerContext, set_exception, + set_result, ) from .http import HttpResponseParser, RawResponseMessage from .http_exceptions import HttpProcessingError @@ -43,6 +45,7 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._read_timeout_handle: Optional[asyncio.TimerHandle] = None self._timeout_ceil_threshold: Optional[float] = 5 + self.closed: asyncio.Future[None] = self._loop.create_future() @property def upgraded(self) -> bool: @@ -83,6 +86,18 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: connection_closed_cleanly = original_connection_error is None + if connection_closed_cleanly: + set_result(self.closed, None) + else: + assert original_connection_error is not None + set_exception( + self.closed, + ClientConnectionError( + f"Connection lost: {original_connection_error !s}", + ), + original_connection_error, + ) + if self._payload_parser is not None: with suppress(Exception): # FIXME: log this somehow? self._payload_parser.feed_eof() diff --git a/aiohttp/connector.py b/aiohttp/connector.py index dd0d27a7054..926a62684f6 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1,5 +1,6 @@ import asyncio import functools +import logging import random import socket import sys @@ -131,6 +132,14 @@ def __del__(self) -> None: ) +async def _wait_for_close(waiters: List[Awaitable[object]]) -> None: + """Wait for all waiters to finish closing.""" + results = await asyncio.gather(*waiters, return_exceptions=True) + for res in results: + if isinstance(res, Exception): + logging.error("Error while closing connector: %r", res) + + class Connection: _source_traceback = None @@ -222,10 +231,14 @@ def closed(self) -> bool: class _TransportPlaceholder: """placeholder for BaseConnector.connect function""" - __slots__ = () + __slots__ = ("closed",) + + def __init__(self, closed_future: asyncio.Future[Optional[Exception]]) -> None: + """Initialize a placeholder for a transport.""" + self.closed = closed_future def close(self) -> None: - """Close the placeholder transport.""" + """Close the placeholder.""" class BaseConnector: @@ -322,6 +335,10 @@ def __init__( self._cleanup_closed_disabled = not enable_cleanup_closed self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] + self._placeholder_future: asyncio.Future[Optional[Exception]] = ( + loop.create_future() + ) + self._placeholder_future.set_result(None) self._cleanup_closed() def __del__(self, _warnings: Any = warnings) -> None: @@ -454,18 +471,30 @@ def _cleanup_closed(self) -> None: def close(self) -> Awaitable[None]: """Close all opened transports.""" - self._close() - return _DeprecationWaiter(noop()) + if not (waiters := self._close()): + # If there are no connections to close, we can return a noop + # awaitable to avoid scheduling a task on the event loop. + return _DeprecationWaiter(noop()) + coro = _wait_for_close(waiters) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to close connections + # immediately to avoid having to schedule the task on the event loop. + task = asyncio.Task(coro, loop=self._loop, eager_start=True) + else: + task = self._loop.create_task(coro) + return _DeprecationWaiter(task) + + def _close(self) -> List[Awaitable[object]]: + waiters: List[Awaitable[object]] = [] - def _close(self) -> None: if self._closed: - return + return waiters self._closed = True try: if self._loop.is_closed(): - return + return waiters # cancel cleanup task if self._cleanup_handle: @@ -476,16 +505,20 @@ def _close(self) -> None: self._cleanup_closed_handle.cancel() for data in self._conns.values(): - for proto, t0 in data: + for proto, _ in data: proto.close() + waiters.append(proto.closed) for proto in self._acquired: proto.close() + waiters.append(proto.closed) for transport in self._cleanup_closed_transports: if transport is not None: transport.abort() + return waiters + finally: self._conns.clear() self._acquired.clear() @@ -546,7 +579,9 @@ async def connect( if (conn := await self._get(key, traces)) is not None: return conn - placeholder = cast(ResponseHandler, _TransportPlaceholder()) + placeholder = cast( + ResponseHandler, _TransportPlaceholder(self._placeholder_future) + ) self._acquired.add(placeholder) if self._limit_per_host: self._acquired_per_host[key].add(placeholder) @@ -898,15 +933,18 @@ def __init__( self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() self._socket_factory = socket_factory - def close(self) -> Awaitable[None]: + def _close(self) -> List[Awaitable[object]]: """Close all ongoing DNS calls.""" for fut in chain.from_iterable(self._throttle_dns_futures.values()): fut.cancel() + waiters = super()._close() + for t in self._resolve_host_tasks: t.cancel() + waiters.append(t) - return super().close() + return waiters @property def family(self) -> int: diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 7274420d246..b3eb55d921b 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -69,6 +69,8 @@ def protocol(loop, transport): protocol.transport = transport protocol._drain_helper.return_value = loop.create_future() protocol._drain_helper.return_value.set_result(None) + protocol.closed = loop.create_future() + protocol.closed.set_result(None) return protocol @@ -1404,7 +1406,10 @@ async def send(self, conn): async def create_connection(req, traces, timeout): assert isinstance(req, CustomRequest) - return mock.Mock() + proto = mock.Mock() + proto.closed = loop.create_future() + proto.closed.set_result(None) + return proto connector = BaseConnector(loop=loop) connector._create_connection = create_connection diff --git a/tests/test_client_session.py b/tests/test_client_session.py index e31144abd0b..56c7a5c0c13 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -33,6 +33,8 @@ async def make_conn(): conn = loop.run_until_complete(make_conn()) proto = mock.Mock() + proto.closed = loop.create_future() + proto.closed.set_result(None) conn._conns["a"] = deque([(proto, 123)]) yield conn loop.run_until_complete(conn.close()) @@ -429,7 +431,10 @@ async def test_reraise_os_error(create_session) -> None: async def create_connection(req, traces, timeout): # return self.transport, self.protocol - return mock.Mock() + proto = mock.Mock() + proto.closed = session._loop.create_future() + proto.closed.set_result(None) + return proto session._connector._create_connection = create_connection session._connector._release = mock.Mock() @@ -464,6 +469,8 @@ async def connect(req, traces, timeout): async def create_connection(req, traces, timeout): # return self.transport, self.protocol conn = mock.Mock() + conn.closed = session._loop.create_future() + conn.closed.set_result(None) return conn session._connector.connect = connect diff --git a/tests/test_connector.py b/tests/test_connector.py index 8128b47f02d..f17ded6d960 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -293,7 +293,7 @@ async def test_async_context_manager(loop) -> None: async def test_close(loop) -> None: - proto = mock.Mock() + proto = create_mocked_conn() conn = aiohttp.BaseConnector(loop=loop) assert not conn.closed @@ -305,6 +305,35 @@ async def test_close(loop) -> None: assert conn.closed +async def test_close_with_exception_during_closing( + loop: asyncio.AbstractEventLoop, caplog: pytest.LogCaptureFixture +) -> None: + """Test that exceptions during connection closing are logged.""" + proto = create_mocked_conn() + + # Make the closed future raise an exception when awaited + exc_future = loop.create_future() + exc_future.set_exception(RuntimeError("Connection close failed")) + proto.closed = exc_future + + conn = aiohttp.BaseConnector(loop=loop) + conn._conns[("host", 8080, False)] = deque([(proto, object())]) + + # Clear any existing log records + caplog.clear() + + # Close should complete even with the exception + await conn.close() + + # Check that the error was logged + assert len(caplog.records) == 1 + assert caplog.records[0].levelname == "ERROR" + assert "Error while closing connector" in caplog.records[0].message + assert "RuntimeError('Connection close failed')" in caplog.records[0].message + + assert conn.closed + + async def test_get(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: conn = aiohttp.BaseConnector() try: @@ -431,7 +460,7 @@ async def test_release(loop, key) -> None: conn = aiohttp.BaseConnector(loop=loop) conn._release_waiter = mock.Mock() - proto = mock.Mock(should_close=False) + proto = create_mocked_conn(should_close=False) conn._acquired.add(proto) conn._acquired_per_host[key].add(proto) @@ -469,7 +498,7 @@ async def test_release_ssl_transport( async def test_release_already_closed(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) - proto = mock.Mock() + proto = create_mocked_conn() key = 1 conn._acquired.add(proto) await conn.close() @@ -569,7 +598,7 @@ async def test_release_waiter_no_available(loop, key, key2) -> None: async def test_release_close(loop, key) -> None: conn = aiohttp.BaseConnector(loop=loop) - proto = mock.Mock(should_close=True) + proto = create_mocked_conn(should_close=True) conn._acquired.add(proto) conn._release(key, proto) @@ -1504,7 +1533,7 @@ async def test_release_close_do_not_add_to_pool(loop, key) -> None: # see issue #473 conn = aiohttp.BaseConnector(loop=loop) - proto = mock.Mock(should_close=True) + proto = create_mocked_conn(should_close=True) conn._acquired.add(proto) conn._release(key, proto) @@ -1514,12 +1543,12 @@ async def test_release_close_do_not_add_to_pool(loop, key) -> None: async def test_release_close_do_not_delete_existing_connections(key) -> None: - proto1 = mock.Mock() + proto1 = create_mocked_conn() conn = aiohttp.BaseConnector() conn._conns[key] = deque([(proto1, 1)]) - proto = mock.Mock(should_close=True) + proto = create_mocked_conn(should_close=True) conn._acquired.add(proto) conn._release(key, proto) assert conn._conns[key] == deque([(proto1, 1)]) @@ -1529,7 +1558,7 @@ async def test_release_close_do_not_delete_existing_connections(key) -> None: async def test_release_not_started(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) - proto = mock.Mock(should_close=False) + proto = create_mocked_conn(should_close=False) key = 1 conn._acquired.add(proto) conn._release(key, proto) @@ -1544,7 +1573,7 @@ async def test_release_not_started(loop) -> None: async def test_release_not_opened(loop, key) -> None: conn = aiohttp.BaseConnector(loop=loop) - proto = mock.Mock() + proto = create_mocked_conn() conn._acquired.add(proto) conn._release(key, proto) assert proto.close.called @@ -1553,7 +1582,7 @@ async def test_release_not_opened(loop, key) -> None: async def test_connect(loop, key) -> None: - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest("GET", URL("http://localhost:80"), loop=loop) @@ -1588,7 +1617,7 @@ async def test_connect_tracing(loop) -> None: trace_config.freeze() traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest("GET", URL("http://host:80"), loop=loop) @@ -2116,7 +2145,7 @@ async def test_ssl_context_once() -> None: async def test_close_twice(loop) -> None: - proto = mock.Mock() + proto = create_mocked_conn() conn = aiohttp.BaseConnector(loop=loop) conn._conns[1] = deque([(proto, object())]) @@ -2133,7 +2162,7 @@ async def test_close_twice(loop) -> None: async def test_close_cancels_cleanup_handle(loop) -> None: conn = aiohttp.BaseConnector(loop=loop) - conn._release(1, mock.Mock(should_close=False)) + conn._release(1, create_mocked_conn(should_close=False)) assert conn._cleanup_handle is not None await conn.close() assert conn._cleanup_handle is None @@ -2584,7 +2613,7 @@ async def test_connect_queued_operation_tracing(loop, key) -> None: trace_config.freeze() traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest( @@ -2628,7 +2657,7 @@ async def test_connect_reuseconn_tracing(loop, key) -> None: trace_config.freeze() traces = [Trace(session, trace_config, trace_config.trace_config_ctx())] - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest( @@ -2681,7 +2710,7 @@ async def f(): async def test_connect_with_no_limit_and_limit_per_host(loop, key) -> None: - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest("GET", URL("http://localhost1:80"), loop=loop) @@ -2746,7 +2775,7 @@ async def f(): async def test_connect_with_limit_cancelled(loop) -> None: - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest("GET", URL("http://host:80"), loop=loop) @@ -2794,7 +2823,7 @@ async def check_with_exc(err: Exception) -> None: async def test_connect_with_limit_concurrent(loop) -> None: - proto = mock.Mock() + proto = create_mocked_conn() proto.should_close = False proto.is_connected.return_value = True @@ -2816,7 +2845,7 @@ async def create_connection(req, traces, timeout): # Make a new transport mock each time because acquired # transports are stored in a set. Reusing the same object # messes with the count. - proto = mock.Mock(should_close=False) + proto = create_mocked_conn(should_close=False) proto.is_connected.return_value = True return proto @@ -2899,7 +2928,7 @@ async def test_connect_waiters_cleanup_key_error(loop) -> None: async def test_close_with_acquired_connection(loop) -> None: - proto = mock.Mock() + proto = create_mocked_conn() proto.is_connected.return_value = True req = ClientRequest("GET", URL("http://host:80"), loop=loop) @@ -3017,7 +3046,7 @@ async def test_cancelled_waiter(loop) -> None: conn = aiohttp.BaseConnector(limit=1, loop=loop) req = mock.Mock() req.connection_key = "key" - proto = mock.Mock() + proto = create_mocked_conn() async def create_connection(req, traces=None): await asyncio.sleep(1) From faa742b423cb5ae8fedb3424d0c35860052e06d1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 28 May 2025 19:55:02 -0500 Subject: [PATCH 1469/1511] Release 3.12.4 (#11078) --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/11074.bugfix.rst | 1 - CHANGES/1925.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 19 insertions(+), 3 deletions(-) delete mode 100644 CHANGES/11074.bugfix.rst delete mode 120000 CHANGES/1925.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 418475ce772..8d3bcbac867 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.12.4 (2025-05-28) +=================== + +Bug fixes +--------- + +- Fixed connector not waiting for connections to close before returning from :meth:`~aiohttp.BaseConnector.close` (partial backport of :pr:`3733`) -- by :user:`atemate` and :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`1925`, :issue:`11074`. + + + + +---- + + 3.12.3 (2025-05-28) =================== diff --git a/CHANGES/11074.bugfix.rst b/CHANGES/11074.bugfix.rst deleted file mode 100644 index 120f8efd914..00000000000 --- a/CHANGES/11074.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed connector not waiting for connections to close before returning from :meth:`~aiohttp.BaseConnector.close` (partial backport of :pr:`3733`) -- by :user:`atemate` and :user:`bdraco`. diff --git a/CHANGES/1925.bugfix.rst b/CHANGES/1925.bugfix.rst deleted file mode 120000 index eb158f4b0f9..00000000000 --- a/CHANGES/1925.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11074.bugfix.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 31c39176b03..56201805d30 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.3" +__version__ = "3.12.4" from typing import TYPE_CHECKING, Tuple From 6836bb0a997e7e82625ca6967214fce5687c750f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 28 May 2025 20:58:57 -0500 Subject: [PATCH 1470/1511] Increment version to 3.12.5.dev0 (#11080) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 56201805d30..74da5e01c07 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.4" +__version__ = "3.12.5.dev0" from typing import TYPE_CHECKING, Tuple From 0cbdc67384306f8300fbaf5014b0decf14b8fe19 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 06:42:55 +0000 Subject: [PATCH 1471/1511] [PR #11081/5da0231f backport][3.12] Revert cache key change for linter (#11083) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 1d44ddda982..1cae0bd57fe 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -49,7 +49,7 @@ jobs: - name: Cache PyPI uses: actions/cache@v4.2.3 with: - key: pip-lint-${{ hashFiles('requirements/*.txt') }}-v4 + key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip restore-keys: | pip-lint- From 73cca7c1da9d36c6555fe0a6865a6ccd0002ce3d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 22:47:14 +0000 Subject: [PATCH 1472/1511] [PR #11085/51698fb1 backport][3.12] Replace expensive isinstance checks with faster alternatives (#11086) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11085.misc.rst | 1 + aiohttp/client_reqrep.py | 4 ++-- aiohttp/cookiejar.py | 4 ++-- aiohttp/multipart.py | 5 +++-- aiohttp/payload.py | 3 ++- 5 files changed, 10 insertions(+), 7 deletions(-) create mode 100644 CHANGES/11085.misc.rst diff --git a/CHANGES/11085.misc.rst b/CHANGES/11085.misc.rst new file mode 100644 index 00000000000..67b1915cfcb --- /dev/null +++ b/CHANGES/11085.misc.rst @@ -0,0 +1 @@ +Improved performance of isinstance checks by using collections.abc types instead of typing module equivalents -- by :user:`bdraco`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index a04c86b1c53..75df6d0e115 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -7,6 +7,7 @@ import sys import traceback import warnings +from collections.abc import Mapping as ABCMapping from hashlib import md5, sha1, sha256 from http.cookies import CookieError, Morsel, SimpleCookie from types import MappingProxyType, TracebackType @@ -18,7 +19,6 @@ Iterable, List, Literal, - Mapping, NamedTuple, Optional, Tuple, @@ -1085,7 +1085,7 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: c.load(self.headers.get(hdrs.COOKIE, "")) del self.headers[hdrs.COOKIE] - if isinstance(cookies, Mapping): + if isinstance(cookies, ABCMapping): iter_cookies = cookies.items() else: iter_cookies = cookies # type: ignore[assignment] diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 696ffddc315..ca32e4123b1 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -11,6 +11,7 @@ import time import warnings from collections import defaultdict +from collections.abc import Mapping as ABCMapping from http.cookies import BaseCookie, Morsel, SimpleCookie from typing import ( DefaultDict, @@ -18,7 +19,6 @@ Iterable, Iterator, List, - Mapping, Optional, Set, Tuple, @@ -236,7 +236,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No # Don't accept cookies from IPs return - if isinstance(cookies, Mapping): + if isinstance(cookies, ABCMapping): cookies = cookies.items() for name, cookie in cookies: diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 231c67c7bb7..90a2a5c28e4 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -6,6 +6,7 @@ import uuid import warnings from collections import deque +from collections.abc import Mapping as ABCMapping, Sequence as ABCSequence from types import TracebackType from typing import ( TYPE_CHECKING, @@ -953,12 +954,12 @@ def append_form( headers: Optional[Mapping[str, str]] = None, ) -> Payload: """Helper to append form urlencoded part.""" - assert isinstance(obj, (Sequence, Mapping)) + assert isinstance(obj, (ABCSequence, ABCMapping)) if headers is None: headers = CIMultiDict() - if isinstance(obj, Mapping): + if isinstance(obj, ABCMapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 4a2c7922337..2149b7a4c7a 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -7,6 +7,7 @@ import sys import warnings from abc import ABC, abstractmethod +from collections.abc import Iterable as ABCIterable from itertools import chain from typing import ( IO, @@ -137,7 +138,7 @@ def register( self._first.append((factory, type)) elif order is Order.normal: self._normal.append((factory, type)) - if isinstance(type, Iterable): + if isinstance(type, ABCIterable): for t in type: self._normal_lookup[t] = factory else: From c7e03efdced9224f1d60bf3992c0767a60a732cf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 23:25:09 +0000 Subject: [PATCH 1473/1511] [PR #11088/b1da65e1 backport][3.12] Remove ABC names for isinstance checks (#11089) --- CHANGES/11088.misc.rst | 1 + aiohttp/client_reqrep.py | 4 ++-- aiohttp/cookiejar.py | 4 ++-- aiohttp/multipart.py | 8 +++----- aiohttp/payload.py | 5 ++--- 5 files changed, 10 insertions(+), 12 deletions(-) create mode 120000 CHANGES/11088.misc.rst diff --git a/CHANGES/11088.misc.rst b/CHANGES/11088.misc.rst new file mode 120000 index 00000000000..c9ebf3c31e1 --- /dev/null +++ b/CHANGES/11088.misc.rst @@ -0,0 +1 @@ +11085.misc.rst \ No newline at end of file diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 75df6d0e115..e437ef67aff 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -7,7 +7,7 @@ import sys import traceback import warnings -from collections.abc import Mapping as ABCMapping +from collections.abc import Mapping from hashlib import md5, sha1, sha256 from http.cookies import CookieError, Morsel, SimpleCookie from types import MappingProxyType, TracebackType @@ -1085,7 +1085,7 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: c.load(self.headers.get(hdrs.COOKIE, "")) del self.headers[hdrs.COOKIE] - if isinstance(cookies, ABCMapping): + if isinstance(cookies, Mapping): iter_cookies = cookies.items() else: iter_cookies = cookies # type: ignore[assignment] diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index ca32e4123b1..a755a893409 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -11,7 +11,7 @@ import time import warnings from collections import defaultdict -from collections.abc import Mapping as ABCMapping +from collections.abc import Mapping from http.cookies import BaseCookie, Morsel, SimpleCookie from typing import ( DefaultDict, @@ -236,7 +236,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No # Don't accept cookies from IPs return - if isinstance(cookies, ABCMapping): + if isinstance(cookies, Mapping): cookies = cookies.items() for name, cookie in cookies: diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 90a2a5c28e4..79f8481ee30 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -6,7 +6,7 @@ import uuid import warnings from collections import deque -from collections.abc import Mapping as ABCMapping, Sequence as ABCSequence +from collections.abc import Mapping, Sequence from types import TracebackType from typing import ( TYPE_CHECKING, @@ -15,9 +15,7 @@ Dict, Iterator, List, - Mapping, Optional, - Sequence, Tuple, Type, Union, @@ -954,12 +952,12 @@ def append_form( headers: Optional[Mapping[str, str]] = None, ) -> Payload: """Helper to append form urlencoded part.""" - assert isinstance(obj, (ABCSequence, ABCMapping)) + assert isinstance(obj, (Sequence, Mapping)) if headers is None: headers = CIMultiDict() - if isinstance(obj, ABCMapping): + if isinstance(obj, Mapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 2149b7a4c7a..7180fd2b430 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -7,7 +7,7 @@ import sys import warnings from abc import ABC, abstractmethod -from collections.abc import Iterable as ABCIterable +from collections.abc import Iterable from itertools import chain from typing import ( IO, @@ -15,7 +15,6 @@ Any, Dict, Final, - Iterable, List, Optional, Set, @@ -138,7 +137,7 @@ def register( self._first.append((factory, type)) elif order is Order.normal: self._normal.append((factory, type)) - if isinstance(type, ABCIterable): + if isinstance(type, Iterable): for t in type: self._normal_lookup[t] = factory else: From 8efe84e9c08f238cb13fcc818fbfb85fbaacc5e8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 30 May 2025 22:05:59 -0500 Subject: [PATCH 1474/1511] [PR #11094/50bb06b backport][3.12] Fix SSL shutdown timeout for streaming connections (#11095) --- CHANGES/11091.feature.rst | 1 + CHANGES/11094.feature.rst | 1 + aiohttp/client.py | 3 +- aiohttp/connector.py | 43 ++++++++++++--- docs/client_reference.rst | 25 ++++++++- tests/test_client_functional.py | 65 ++++++++++++++++++++++ tests/test_client_session.py | 30 +++++++++- tests/test_connector.py | 98 +++++++++++++++++++++++++++++++++ tests/test_proxy.py | 24 +++++--- 9 files changed, 272 insertions(+), 18 deletions(-) create mode 100644 CHANGES/11091.feature.rst create mode 120000 CHANGES/11094.feature.rst diff --git a/CHANGES/11091.feature.rst b/CHANGES/11091.feature.rst new file mode 100644 index 00000000000..a4db2ddced5 --- /dev/null +++ b/CHANGES/11091.feature.rst @@ -0,0 +1 @@ +Added ``ssl_shutdown_timeout`` parameter to :py:class:`~aiohttp.ClientSession` and :py:class:`~aiohttp.TCPConnector` to control the grace period for SSL shutdown handshake on TLS connections. This helps prevent "connection reset" errors on the server side while avoiding excessive delays during connector cleanup. Note: This parameter only takes effect on Python 3.11+ -- by :user:`bdraco`. diff --git a/CHANGES/11094.feature.rst b/CHANGES/11094.feature.rst new file mode 120000 index 00000000000..a21761406a1 --- /dev/null +++ b/CHANGES/11094.feature.rst @@ -0,0 +1 @@ +11091.feature.rst \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index 3b2cd2796cc..6457248d5ea 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -303,6 +303,7 @@ def __init__( max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", middlewares: Sequence[ClientMiddlewareType] = (), + ssl_shutdown_timeout: Optional[float] = 0.1, ) -> None: # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. @@ -361,7 +362,7 @@ def __init__( ) if connector is None: - connector = TCPConnector(loop=loop) + connector = TCPConnector(ssl_shutdown_timeout=ssl_shutdown_timeout) if connector._loop is not loop: raise RuntimeError("Session and connector has to use same event loop") diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 926a62684f6..6fa75d31a98 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -879,6 +879,12 @@ class TCPConnector(BaseConnector): socket_factory - A SocketFactoryType function that, if supplied, will be used to create sockets given an AddrInfoType. + ssl_shutdown_timeout - Grace period for SSL shutdown handshake on TLS + connections. Default is 0.1 seconds. This usually + allows for a clean SSL shutdown by notifying the + remote peer of connection closure, while avoiding + excessive delays during connector cleanup. + Note: Only takes effect on Python 3.11+. """ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) @@ -905,6 +911,7 @@ def __init__( happy_eyeballs_delay: Optional[float] = 0.25, interleave: Optional[int] = None, socket_factory: Optional[SocketFactoryType] = None, + ssl_shutdown_timeout: Optional[float] = 0.1, ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -932,6 +939,7 @@ def __init__( self._interleave = interleave self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() self._socket_factory = socket_factory + self._ssl_shutdown_timeout = ssl_shutdown_timeout def _close(self) -> List[Awaitable[object]]: """Close all ongoing DNS calls.""" @@ -1176,6 +1184,13 @@ async def _wrap_create_connection( loop=self._loop, socket_factory=self._socket_factory, ) + # Add ssl_shutdown_timeout for Python 3.11+ when SSL is used + if ( + kwargs.get("ssl") + and self._ssl_shutdown_timeout is not None + and sys.version_info >= (3, 11) + ): + kwargs["ssl_shutdown_timeout"] = self._ssl_shutdown_timeout return await self._loop.create_connection(*args, **kwargs, sock=sock) except cert_errors as exc: raise ClientConnectorCertificateError(req.connection_key, exc) from exc @@ -1314,13 +1329,27 @@ async def _start_tls_connection( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold ): try: - tls_transport = await self._loop.start_tls( - underlying_transport, - tls_proto, - sslcontext, - server_hostname=req.server_hostname or req.host, - ssl_handshake_timeout=timeout.total, - ) + # ssl_shutdown_timeout is only available in Python 3.11+ + if ( + sys.version_info >= (3, 11) + and self._ssl_shutdown_timeout is not None + ): + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.server_hostname or req.host, + ssl_handshake_timeout=timeout.total, + ssl_shutdown_timeout=self._ssl_shutdown_timeout, + ) + else: + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.server_hostname or req.host, + ssl_handshake_timeout=timeout.total, + ) except BaseException: # We need to close the underlying transport since # `start_tls()` probably failed before it had a diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 40fd7cdb276..07839686039 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -57,7 +57,8 @@ The client session supports the context manager protocol for self closing. read_bufsize=2**16, \ max_line_size=8190, \ max_field_size=8190, \ - fallback_charset_resolver=lambda r, b: "utf-8") + fallback_charset_resolver=lambda r, b: "utf-8", \ + ssl_shutdown_timeout=0.1) The class for creating client sessions and making requests. @@ -256,6 +257,16 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.8.6 + :param float ssl_shutdown_timeout: Grace period for SSL shutdown handshake on TLS + connections (``0.1`` seconds by default). This usually provides sufficient time + to notify the remote peer of connection closure, helping prevent broken + connections on the server side, while minimizing delays during connector + cleanup. This timeout is passed to the underlying :class:`TCPConnector` + when one is created automatically. Note: This parameter only takes effect + on Python 3.11+. + + .. versionadded:: 3.12.5 + .. attribute:: closed ``True`` if the session has been closed, ``False`` otherwise. @@ -1185,7 +1196,7 @@ is controlled by *force_close* constructor's parameter). force_close=False, limit=100, limit_per_host=0, \ enable_cleanup_closed=False, timeout_ceil_threshold=5, \ happy_eyeballs_delay=0.25, interleave=None, loop=None, \ - socket_factory=None) + socket_factory=None, ssl_shutdown_timeout=0.1) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. @@ -1312,6 +1323,16 @@ is controlled by *force_close* constructor's parameter). .. versionadded:: 3.12 + :param float ssl_shutdown_timeout: Grace period for SSL shutdown on TLS + connections (``0.1`` seconds by default). This parameter balances two + important considerations: usually providing sufficient time to notify + the remote server (which helps prevent "connection reset" errors), + while avoiding unnecessary delays during connector cleanup. + The default value provides a reasonable compromise for most use cases. + Note: This parameter only takes effect on Python 3.11+. + + .. versionadded:: 3.12.5 + .. attribute:: family *TCP* socket family e.g. :data:`socket.AF_INET` or diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index cb4edd3d1e1..1d91956c4a3 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -12,6 +12,7 @@ import tarfile import time import zipfile +from contextlib import suppress from typing import ( Any, AsyncIterator, @@ -685,6 +686,70 @@ async def handler(request): assert txt == "Test message" +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="ssl_shutdown_timeout requires Python 3.11+" +) +async def test_ssl_client_shutdown_timeout( + aiohttp_server: AiohttpServer, + ssl_ctx: ssl.SSLContext, + aiohttp_client: AiohttpClient, + client_ssl_ctx: ssl.SSLContext, +) -> None: + # Test that ssl_shutdown_timeout is properly used during connection closure + + connector = aiohttp.TCPConnector(ssl=client_ssl_ctx, ssl_shutdown_timeout=0.1) + + async def streaming_handler(request: web.Request) -> NoReturn: + # Create a streaming response that continuously sends data + response = web.StreamResponse() + await response.prepare(request) + + # Keep sending data until connection is closed + while True: + await response.write(b"data chunk\n") + await asyncio.sleep(0.01) # Small delay between chunks + + assert False, "not reached" + + app = web.Application() + app.router.add_route("GET", "/stream", streaming_handler) + server = await aiohttp_server(app, ssl=ssl_ctx) + client = await aiohttp_client(server, connector=connector) + + # Verify the connector has the correct timeout + assert connector._ssl_shutdown_timeout == 0.1 + + # Start a streaming request to establish SSL connection with active data transfer + resp = await client.get("/stream") + assert resp.status == 200 + + # Create a background task that continuously reads data + async def read_loop() -> None: + while True: + # Read "data chunk\n" + await resp.content.read(11) + + read_task = asyncio.create_task(read_loop()) + await asyncio.sleep(0) # Yield control to ensure read_task starts + + # Record the time before closing + start_time = time.monotonic() + + # Now close the connector while the stream is still active + # This will test the ssl_shutdown_timeout during an active connection + await connector.close() + + # Verify the connection was closed within a reasonable time + # Should be close to ssl_shutdown_timeout (0.1s) but allow some margin + elapsed = time.monotonic() - start_time + assert elapsed < 0.3, f"Connection closure took too long: {elapsed}s" + + read_task.cancel() + with suppress(asyncio.CancelledError): + await read_task + assert read_task.done(), "Read task should be cancelled after connection closure" + + async def test_ssl_client_alpn( aiohttp_server: AiohttpServer, aiohttp_client: AiohttpClient, diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 56c7a5c0c13..0fdfaee6761 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -310,7 +310,35 @@ async def test_create_connector(create_session, loop, mocker) -> None: assert connector.close.called -def test_connector_loop(loop) -> None: +async def test_ssl_shutdown_timeout_passed_to_connector() -> None: + # Test default value + async with ClientSession() as session: + assert isinstance(session.connector, TCPConnector) + assert session.connector._ssl_shutdown_timeout == 0.1 + + # Test custom value + async with ClientSession(ssl_shutdown_timeout=1.0) as session: + assert isinstance(session.connector, TCPConnector) + assert session.connector._ssl_shutdown_timeout == 1.0 + + # Test None value + async with ClientSession(ssl_shutdown_timeout=None) as session: + assert isinstance(session.connector, TCPConnector) + assert session.connector._ssl_shutdown_timeout is None + + # Test that it doesn't affect when custom connector is provided + custom_conn = TCPConnector(ssl_shutdown_timeout=2.0) + async with ClientSession( + connector=custom_conn, ssl_shutdown_timeout=1.0 + ) as session: + assert session.connector is not None + assert isinstance(session.connector, TCPConnector) + assert ( + session.connector._ssl_shutdown_timeout == 2.0 + ) # Should use connector's value + + +def test_connector_loop(loop: asyncio.AbstractEventLoop) -> None: with contextlib.ExitStack() as stack: another_loop = asyncio.new_event_loop() stack.enter_context(contextlib.closing(another_loop)) diff --git a/tests/test_connector.py b/tests/test_connector.py index f17ded6d960..3b2d28ea46c 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -2002,6 +2002,104 @@ async def test_tcp_connector_ctor() -> None: await conn.close() +async def test_tcp_connector_ssl_shutdown_timeout( + loop: asyncio.AbstractEventLoop, +) -> None: + # Test default value + conn = aiohttp.TCPConnector() + assert conn._ssl_shutdown_timeout == 0.1 + await conn.close() + + # Test custom value + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0) + assert conn._ssl_shutdown_timeout == 1.0 + await conn.close() + + # Test None value + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None) + assert conn._ssl_shutdown_timeout is None + await conn.close() + + +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="ssl_shutdown_timeout requires Python 3.11+" +) +async def test_tcp_connector_ssl_shutdown_timeout_passed_to_create_connection( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + # Test that ssl_shutdown_timeout is passed to create_connection for SSL connections + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + create_connection.return_value = mock.Mock(), mock.Mock() + + req = ClientRequest("GET", URL("https://example.com"), loop=loop) + + with closing(await conn.connect(req, [], ClientTimeout())): + assert create_connection.call_args.kwargs["ssl_shutdown_timeout"] == 2.5 + + await conn.close() + + # Test with None value + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + create_connection.return_value = mock.Mock(), mock.Mock() + + req = ClientRequest("GET", URL("https://example.com"), loop=loop) + + with closing(await conn.connect(req, [], ClientTimeout())): + # When ssl_shutdown_timeout is None, it should not be in kwargs + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + await conn.close() + + # Test that ssl_shutdown_timeout is NOT passed for non-SSL connections + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + create_connection.return_value = mock.Mock(), mock.Mock() + + req = ClientRequest("GET", URL("http://example.com"), loop=loop) + + with closing(await conn.connect(req, [], ClientTimeout())): + # For non-SSL connections, ssl_shutdown_timeout should not be passed + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + await conn.close() + + +@pytest.mark.skipif(sys.version_info >= (3, 11), reason="Test for Python < 3.11") +async def test_tcp_connector_ssl_shutdown_timeout_not_passed_pre_311( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + # Test that ssl_shutdown_timeout is NOT passed to create_connection on Python < 3.11 + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + create_connection.return_value = mock.Mock(), mock.Mock() + + # Test with HTTPS + req = ClientRequest("GET", URL("https://example.com"), loop=loop) + with closing(await conn.connect(req, [], ClientTimeout())): + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + # Test with HTTP + req = ClientRequest("GET", URL("http://example.com"), loop=loop) + with closing(await conn.connect(req, [], ClientTimeout())): + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + await conn.close() + + async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector() assert conn.allowed_protocol_schema_set == {"", "tcp", "http", "https", "ws", "wss"} diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 0e73210f58b..f5ebf6adc4f 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -936,13 +936,23 @@ async def make_conn(): connector._create_connection(req, None, aiohttp.ClientTimeout()) ) - self.loop.start_tls.assert_called_with( - mock.ANY, - mock.ANY, - _SSL_CONTEXT_VERIFIED, - server_hostname="www.python.org", - ssl_handshake_timeout=mock.ANY, - ) + if sys.version_info >= (3, 11): + self.loop.start_tls.assert_called_with( + mock.ANY, + mock.ANY, + _SSL_CONTEXT_VERIFIED, + server_hostname="www.python.org", + ssl_handshake_timeout=mock.ANY, + ssl_shutdown_timeout=0.1, + ) + else: + self.loop.start_tls.assert_called_with( + mock.ANY, + mock.ANY, + _SSL_CONTEXT_VERIFIED, + server_hostname="www.python.org", + ssl_handshake_timeout=mock.ANY, + ) self.assertEqual(req.url.path, "/") self.assertEqual(proxy_req.method, "CONNECT") From d4e62efaccf9af60eb02f660454b2ee201cfb88d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Fri, 30 May 2025 22:27:29 -0500 Subject: [PATCH 1475/1511] Release 3.12.5 (#11097) --- CHANGES.rst | 30 ++++++++++++++++++++++++++++++ CHANGES/11085.misc.rst | 1 - CHANGES/11088.misc.rst | 1 - CHANGES/11091.feature.rst | 1 - CHANGES/11094.feature.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 31 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/11085.misc.rst delete mode 120000 CHANGES/11088.misc.rst delete mode 100644 CHANGES/11091.feature.rst delete mode 120000 CHANGES/11094.feature.rst diff --git a/CHANGES.rst b/CHANGES.rst index 8d3bcbac867..360750dd88f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,36 @@ .. towncrier release notes start +3.12.5 (2025-05-30) +=================== + +Features +-------- + +- Added ``ssl_shutdown_timeout`` parameter to :py:class:`~aiohttp.ClientSession` and :py:class:`~aiohttp.TCPConnector` to control the grace period for SSL shutdown handshake on TLS connections. This helps prevent "connection reset" errors on the server side while avoiding excessive delays during connector cleanup. Note: This parameter only takes effect on Python 3.11+ -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11091`, :issue:`11094`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of isinstance checks by using collections.abc types instead of typing module equivalents -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11085`, :issue:`11088`. + + + + +---- + + 3.12.4 (2025-05-28) =================== diff --git a/CHANGES/11085.misc.rst b/CHANGES/11085.misc.rst deleted file mode 100644 index 67b1915cfcb..00000000000 --- a/CHANGES/11085.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performance of isinstance checks by using collections.abc types instead of typing module equivalents -- by :user:`bdraco`. diff --git a/CHANGES/11088.misc.rst b/CHANGES/11088.misc.rst deleted file mode 120000 index c9ebf3c31e1..00000000000 --- a/CHANGES/11088.misc.rst +++ /dev/null @@ -1 +0,0 @@ -11085.misc.rst \ No newline at end of file diff --git a/CHANGES/11091.feature.rst b/CHANGES/11091.feature.rst deleted file mode 100644 index a4db2ddced5..00000000000 --- a/CHANGES/11091.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``ssl_shutdown_timeout`` parameter to :py:class:`~aiohttp.ClientSession` and :py:class:`~aiohttp.TCPConnector` to control the grace period for SSL shutdown handshake on TLS connections. This helps prevent "connection reset" errors on the server side while avoiding excessive delays during connector cleanup. Note: This parameter only takes effect on Python 3.11+ -- by :user:`bdraco`. diff --git a/CHANGES/11094.feature.rst b/CHANGES/11094.feature.rst deleted file mode 120000 index a21761406a1..00000000000 --- a/CHANGES/11094.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11091.feature.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 74da5e01c07..fc946e05e9f 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.5.dev0" +__version__ = "3.12.5" from typing import TYPE_CHECKING, Tuple From 497df2e83921023e3b88f2adb99390f2ac0b55e5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 31 May 2025 00:13:17 -0500 Subject: [PATCH 1476/1511] [PR #11100/947247fd backport][3.12] Fix spurious "Future exception was never retrieved" warnings for connection lost errors (#11101) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11100.bugfix.rst | 3 +++ aiohttp/client_proto.py | 6 ++++++ tests/test_client_proto.py | 19 +++++++++++++++++++ 3 files changed, 28 insertions(+) create mode 100644 CHANGES/11100.bugfix.rst diff --git a/CHANGES/11100.bugfix.rst b/CHANGES/11100.bugfix.rst new file mode 100644 index 00000000000..a7c54059a14 --- /dev/null +++ b/CHANGES/11100.bugfix.rst @@ -0,0 +1,3 @@ +Fixed spurious "Future exception was never retrieved" warnings for connection lost errors when the connector is not closed -- by :user:`bdraco`. + +When connections are lost, the exception is now marked as retrieved since it is always propagated through other means, preventing unnecessary warnings in logs. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 6a0318e553a..2d8c2e578c4 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -97,6 +97,12 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: ), original_connection_error, ) + # Mark the exception as retrieved to prevent + # "Future exception was never retrieved" warnings + # The exception is always passed on through + # other means, so this is safe + with suppress(Exception): + self.closed.exception() if self._payload_parser is not None: with suppress(Exception): # FIXME: log this somehow? diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index af1286dc310..c7fb79a5f44 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -247,3 +247,22 @@ async def test_connection_lost_sets_transport_to_none(loop, mocker) -> None: proto.connection_lost(OSError()) assert proto.transport is None + + +async def test_connection_lost_exception_is_marked_retrieved( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that connection_lost properly handles exceptions without warnings.""" + proto = ResponseHandler(loop=loop) + proto.connection_made(mock.Mock()) + + # Simulate an SSL shutdown timeout error + ssl_error = TimeoutError("SSL shutdown timed out") + proto.connection_lost(ssl_error) + + # Verify the exception was set on the closed future + assert proto.closed.done() + exc = proto.closed.exception() + assert exc is not None + assert "Connection lost: SSL shutdown timed out" in str(exc) + assert exc.__cause__ is ssl_error From 623690d815f7127f0beb170ce7d36b384cf4f55c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 31 May 2025 00:16:20 -0500 Subject: [PATCH 1477/1511] Release 3.12.6 (#11103) --- CHANGES.rst | 20 ++++++++++++++++++++ CHANGES/11100.bugfix.rst | 3 --- aiohttp/__init__.py | 2 +- 3 files changed, 21 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/11100.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 360750dd88f..0e10454a3d1 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,26 @@ .. towncrier release notes start +3.12.6 (2025-05-31) +=================== + +Bug fixes +--------- + +- Fixed spurious "Future exception was never retrieved" warnings for connection lost errors when the connector is not closed -- by :user:`bdraco`. + + When connections are lost, the exception is now marked as retrieved since it is always propagated through other means, preventing unnecessary warnings in logs. + + + *Related issues and pull requests on GitHub:* + :issue:`11100`. + + + + +---- + + 3.12.5 (2025-05-30) =================== diff --git a/CHANGES/11100.bugfix.rst b/CHANGES/11100.bugfix.rst deleted file mode 100644 index a7c54059a14..00000000000 --- a/CHANGES/11100.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed spurious "Future exception was never retrieved" warnings for connection lost errors when the connector is not closed -- by :user:`bdraco`. - -When connections are lost, the exception is now marked as retrieved since it is always propagated through other means, preventing unnecessary warnings in logs. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index fc946e05e9f..6a0b167be83 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.5" +__version__ = "3.12.6" from typing import TYPE_CHECKING, Tuple From 1710f059dd6c63fcac1efbfd041b1e1cdda9f474 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 31 May 2025 01:00:46 -0500 Subject: [PATCH 1478/1511] Increment version to 3.12.7.dev0 (#11099) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6a0b167be83..78f22b4051f 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.6" +__version__ = "3.12.7.dev0" from typing import TYPE_CHECKING, Tuple From b1aa238220d0bd73bd6a91f065961e67cc1fb0b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 1 Jun 2025 09:48:49 -0500 Subject: [PATCH 1479/1511] [PR #11106/cfb9931 backport][3.12] Fix cookies with duplicate names being lost when updating cookie jar (#11108) --- CHANGES/11105.bugfix.rst | 10 ++ CHANGES/11106.bugfix.rst | 1 + CHANGES/4486.bugfix.rst | 1 + aiohttp/abc.py | 29 ++++- aiohttp/client.py | 7 +- aiohttp/client_reqrep.py | 29 +++-- docs/client_reference.rst | 13 ++ tests/test_client_functional.py | 146 +++++++++++++++++++++ tests/test_client_response.py | 144 ++++++++++++++++++++- tests/test_client_session.py | 63 ++++++++-- tests/test_cookiejar.py | 217 ++++++++++++++++++++++++++++++++ 11 files changed, 635 insertions(+), 25 deletions(-) create mode 100644 CHANGES/11105.bugfix.rst create mode 120000 CHANGES/11106.bugfix.rst create mode 120000 CHANGES/4486.bugfix.rst diff --git a/CHANGES/11105.bugfix.rst b/CHANGES/11105.bugfix.rst new file mode 100644 index 00000000000..33578aa7a95 --- /dev/null +++ b/CHANGES/11105.bugfix.rst @@ -0,0 +1,10 @@ +Fixed an issue where cookies with duplicate names but different domains or paths +were lost when updating the cookie jar. The :class:`~aiohttp.ClientSession` +cookie jar now correctly stores all cookies even if they have the same name but +different domain or path, following the :rfc:`6265#section-5.3` storage model -- by :user:`bdraco`. + +Note that :attr:`ClientResponse.cookies <aiohttp.ClientResponse.cookies>` returns +a :class:`~http.cookies.SimpleCookie` which uses the cookie name as a key, so +only the last cookie with each name is accessible via this interface. All cookies +can be accessed via :meth:`ClientResponse.headers.getall('Set-Cookie') +<multidict.MultiDictProxy.getall>` if needed. diff --git a/CHANGES/11106.bugfix.rst b/CHANGES/11106.bugfix.rst new file mode 120000 index 00000000000..3e5efb0f3f3 --- /dev/null +++ b/CHANGES/11106.bugfix.rst @@ -0,0 +1 @@ +11105.bugfix.rst \ No newline at end of file diff --git a/CHANGES/4486.bugfix.rst b/CHANGES/4486.bugfix.rst new file mode 120000 index 00000000000..3e5efb0f3f3 --- /dev/null +++ b/CHANGES/4486.bugfix.rst @@ -0,0 +1 @@ +11105.bugfix.rst \ No newline at end of file diff --git a/aiohttp/abc.py b/aiohttp/abc.py index c1bf5032d0d..353c18be266 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -3,7 +3,7 @@ import socket from abc import ABC, abstractmethod from collections.abc import Sized -from http.cookies import BaseCookie, Morsel +from http.cookies import BaseCookie, CookieError, Morsel, SimpleCookie from typing import ( TYPE_CHECKING, Any, @@ -14,6 +14,7 @@ Iterable, List, Optional, + Sequence, Tuple, TypedDict, Union, @@ -22,6 +23,7 @@ from multidict import CIMultiDict from yarl import URL +from .log import client_logger from .typedefs import LooseCookies if TYPE_CHECKING: @@ -192,6 +194,31 @@ def clear_domain(self, domain: str) -> None: def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: """Update cookies.""" + def update_cookies_from_headers( + self, headers: Sequence[str], response_url: URL + ) -> None: + """ + Update cookies from raw Set-Cookie headers. + + Default implementation parses each header separately to preserve + cookies with same name but different domain/path. + """ + # Default implementation for backward compatibility + cookies_to_update: List[Tuple[str, Morsel[str]]] = [] + for cookie_header in headers: + tmp_cookie = SimpleCookie() + try: + tmp_cookie.load(cookie_header) + # Collect all cookies as tuples (name, morsel) + for name, morsel in tmp_cookie.items(): + cookies_to_update.append((name, morsel)) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + + # Update all cookies at once for efficiency + if cookies_to_update: + self.update_cookies(cookies_to_update, response_url) + @abstractmethod def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": """Return the jar's cookies filtered by their attributes.""" diff --git a/aiohttp/client.py b/aiohttp/client.py index 6457248d5ea..576a965ba5d 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -779,8 +779,11 @@ async def _connect_and_send_request( raise raise ClientOSError(*exc.args) from exc - if cookies := resp._cookies: - self._cookie_jar.update_cookies(cookies, resp.url) + # Update cookies from raw headers to preserve duplicates + if resp._raw_cookie_headers: + self._cookie_jar.update_cookies_from_headers( + resp._raw_cookie_headers, resp.url + ) # redirects if resp.status in (301, 302, 303, 307, 308) and allow_redirects: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e437ef67aff..01835260cc5 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -291,6 +291,7 @@ class ClientResponse(HeadersMixin): _connection: Optional["Connection"] = None # current connection _cookies: Optional[SimpleCookie] = None + _raw_cookie_headers: Optional[Tuple[str, ...]] = None _continue: Optional["asyncio.Future[bool]"] = None _source_traceback: Optional[traceback.StackSummary] = None _session: Optional["ClientSession"] = None @@ -372,12 +373,29 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: @property def cookies(self) -> SimpleCookie: if self._cookies is None: - self._cookies = SimpleCookie() + if self._raw_cookie_headers is not None: + # Parse cookies for response.cookies (SimpleCookie for backward compatibility) + cookies = SimpleCookie() + for hdr in self._raw_cookie_headers: + try: + cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + self._cookies = cookies + else: + self._cookies = SimpleCookie() return self._cookies @cookies.setter def cookies(self, cookies: SimpleCookie) -> None: self._cookies = cookies + # Generate raw cookie headers from the SimpleCookie + if cookies: + self._raw_cookie_headers = tuple( + morsel.OutputString() for morsel in cookies.values() + ) + else: + self._raw_cookie_headers = None @reify def url(self) -> URL: @@ -543,13 +561,8 @@ async def start(self, connection: "Connection") -> "ClientResponse": # cookies if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): - cookies = SimpleCookie() - for hdr in cookie_hdrs: - try: - cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) - self._cookies = cookies + # Store raw cookie headers for CookieJar + self._raw_cookie_headers = tuple(cookie_hdrs) return self def _response_eof(self) -> None: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 07839686039..8a721f514cd 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1499,6 +1499,19 @@ Response object HTTP cookies of response (*Set-Cookie* HTTP header, :class:`~http.cookies.SimpleCookie`). + .. note:: + + Since :class:`~http.cookies.SimpleCookie` uses cookie name as the + key, cookies with the same name but different domains or paths will + be overwritten. Only the last cookie with a given name will be + accessible via this attribute. + + To access all cookies, including duplicates with the same name, + use :meth:`response.headers.getall('Set-Cookie') <multidict.MultiDictProxy.getall>`. + + The session's cookie jar will correctly store all cookies, even if + they are not accessible via this attribute. + .. attribute:: headers A case-insensitive multidict proxy with HTTP headers of diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 1d91956c4a3..ca1a7dd1d6b 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -2712,6 +2712,7 @@ async def handler(request): async with client.get("/") as resp: assert 200 == resp.status cookie_names = {c.key for c in client.session.cookie_jar} + _ = resp.cookies assert cookie_names == {"c1", "c2"} m_log.warning.assert_called_with("Can not load response cookies: %s", mock.ANY) @@ -5111,3 +5112,148 @@ async def redirect_handler(request: web.Request) -> web.Response: assert ( payload.close_called ), "Payload.close() was not called when InvalidUrlRedirectClientError (invalid origin) was raised" + + +async def test_amazon_like_cookie_scenario(aiohttp_client: AiohttpClient) -> None: + """Test real-world cookie scenario similar to Amazon.""" + + class FakeResolver(AbstractResolver): + def __init__(self, port: int): + self._port = port + + async def resolve( + self, host: str, port: int = 0, family: int = 0 + ) -> List[ResolveResult]: + if host in ("amazon.it", "www.amazon.it"): + return [ + { + "hostname": host, + "host": "127.0.0.1", + "port": self._port, + "family": socket.AF_INET, + "proto": 0, + "flags": 0, + } + ] + assert False, f"Unexpected host: {host}" + + async def close(self) -> None: + """Close the resolver if needed.""" + + async def handler(request: web.Request) -> web.Response: + response = web.Response(text="Login successful") + + # Simulate Amazon-like cookies from the issue + cookies = [ + "session-id=146-7423990-7621939; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/; " + "Secure; HttpOnly", + "session-id=147-8529641-8642103; Domain=.www.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/; HttpOnly", + "session-id-time=2082758401l; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/; Secure", + "session-id-time=2082758402l; Domain=.www.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/", + "ubid-acbit=257-7531983-5395266; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/; Secure", + 'x-acbit="KdvJzu8W@Fx6Jj3EuNFLuP0N7OtkuCfs"; Version=1; ' + "Domain=.amazon.it; Path=/; Secure; HttpOnly", + "at-acbit=Atza|IwEBIM-gLr8; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/; " + "Secure; HttpOnly", + 'sess-at-acbit="4+6VzSJPHIFD/OqO264hFxIng8Y="; ' + "Domain=.amazon.it; Expires=Mon, 31-May-2027 10:00:00 GMT; " + "Path=/; Secure; HttpOnly", + "lc-acbit=it_IT; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/", + "i18n-prefs=EUR; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/", + "av-profile=null; Domain=.amazon.it; " + "Expires=Mon, 31-May-2027 10:00:00 GMT; Path=/; Secure", + 'user-pref-token="Am81ywsJ69xObBnuJ2FbilVH0mg="; ' + "Domain=.amazon.it; Path=/; Secure", + ] + + for cookie in cookies: + response.headers.add("Set-Cookie", cookie) + + return response + + app = web.Application() + app.router.add_get("/", handler) + + # Get the test server + server = await aiohttp_client(app) + port = server.port + + # Create a new client session with our fake resolver + resolver = FakeResolver(port) + + async with ( + aiohttp.TCPConnector(resolver=resolver, force_close=True) as connector, + aiohttp.ClientSession(connector=connector) as session, + ): + # Make request to www.amazon.it which will resolve to + # 127.0.0.1:port. This allows cookies for both .amazon.it + # and .www.amazon.it domains + resp = await session.get(f"http://www.amazon.it:{port}/") + + # Check headers + cookie_headers = resp.headers.getall("Set-Cookie") + assert ( + len(cookie_headers) == 12 + ), f"Expected 12 headers, got {len(cookie_headers)}" + + # Check parsed cookies - SimpleCookie only keeps the last + # cookie with each name. So we expect 10 unique cookie names + # (not 12) + expected_cookie_names = { + "session-id", # Will only have one + "session-id-time", # Will only have one + "ubid-acbit", + "x-acbit", + "at-acbit", + "sess-at-acbit", + "lc-acbit", + "i18n-prefs", + "av-profile", + "user-pref-token", + } + assert set(resp.cookies.keys()) == expected_cookie_names + assert ( + len(resp.cookies) == 10 + ), f"Expected 10 cookies in SimpleCookie, got {len(resp.cookies)}" + + # The important part: verify the session's cookie jar has + # all cookies. The cookie jar should have all 12 cookies, + # not just 10 + jar_cookies = list(session.cookie_jar) + assert ( + len(jar_cookies) == 12 + ), f"Expected 12 cookies in jar, got {len(jar_cookies)}" + + # Verify we have both session-id cookies with different domains + session_ids = [c for c in jar_cookies if c.key == "session-id"] + assert ( + len(session_ids) == 2 + ), f"Expected 2 session-id cookies, got {len(session_ids)}" + + # Verify the domains are different + session_id_domains = {c["domain"] for c in session_ids} + assert session_id_domains == { + "amazon.it", + "www.amazon.it", + }, f"Got domains: {session_id_domains}" + + # Verify we have both session-id-time cookies with different + # domains + session_id_times = [c for c in jar_cookies if c.key == "session-id-time"] + assert ( + len(session_id_times) == 2 + ), f"Expected 2 session-id-time cookies, got {len(session_id_times)}" + + # Now test that the raw headers were properly preserved + assert resp._raw_cookie_headers is not None + assert ( + len(resp._raw_cookie_headers) == 12 + ), "All raw headers should be preserved" diff --git a/tests/test_client_response.py b/tests/test_client_response.py index 4a8000962d1..2d70feaf06d 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -3,6 +3,7 @@ import asyncio import gc import sys +from http.cookies import SimpleCookie from typing import Callable from unittest import mock @@ -11,7 +12,7 @@ from yarl import URL import aiohttp -from aiohttp import ClientSession, http +from aiohttp import ClientSession, hdrs, http from aiohttp.client_reqrep import ClientResponse, RequestInfo from aiohttp.helpers import TimerNoop @@ -1333,3 +1334,144 @@ def test_response_not_closed_after_get_ok(mocker) -> None: assert not response.ok assert not response.closed assert spy.call_count == 0 + + +def test_response_duplicate_cookie_names( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: + """ + Test that response.cookies handles duplicate cookie names correctly. + + Note: This behavior (losing cookies with same name but different domains/paths) + is arguably undesirable, but we promise to return a SimpleCookie object, and + SimpleCookie uses cookie name as the key. This is documented behavior. + + To access all cookies including duplicates, users should use: + - response.headers.getall('Set-Cookie') for raw headers + - The session's cookie jar correctly stores all cookies + """ + response = ClientResponse( + "get", + URL("http://example.com"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + + # Set headers with duplicate cookie names but different domains + headers = CIMultiDict( + [ + ( + "Set-Cookie", + "session-id=123-4567890; Domain=.example.com; Path=/; Secure", + ), + ("Set-Cookie", "session-id=098-7654321; Domain=.www.example.com; Path=/"), + ("Set-Cookie", "user-pref=dark; Domain=.example.com; Path=/"), + ("Set-Cookie", "user-pref=light; Domain=api.example.com; Path=/"), + ] + ) + response._headers = CIMultiDictProxy(headers) + # Set raw cookie headers as done in ClientResponse.start() + response._raw_cookie_headers = tuple(headers.getall("Set-Cookie", [])) + + # SimpleCookie only keeps the last cookie with each name + # This is expected behavior since SimpleCookie uses name as the key + assert len(response.cookies) == 2 # Only 'session-id' and 'user-pref' + assert response.cookies["session-id"].value == "098-7654321" # Last one wins + assert response.cookies["user-pref"].value == "light" # Last one wins + + +def test_response_raw_cookie_headers_preserved( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: + """Test that raw Set-Cookie headers are preserved in _raw_cookie_headers.""" + response = ClientResponse( + "get", + URL("http://example.com"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + + # Set headers with multiple cookies + cookie_headers = [ + "session-id=123; Domain=.example.com; Path=/; Secure", + "session-id=456; Domain=.www.example.com; Path=/", + "tracking=xyz; Domain=.example.com; Path=/; HttpOnly", + ] + + headers: CIMultiDict[str] = CIMultiDict() + for cookie_hdr in cookie_headers: + headers.add("Set-Cookie", cookie_hdr) + + response._headers = CIMultiDictProxy(headers) + + # Set raw cookie headers as done in ClientResponse.start() + response._raw_cookie_headers = tuple(response.headers.getall(hdrs.SET_COOKIE, [])) + + # Verify raw headers are preserved + assert response._raw_cookie_headers == tuple(cookie_headers) + assert len(response._raw_cookie_headers) == 3 + + # But SimpleCookie only has unique names + assert len(response.cookies) == 2 # 'session-id' and 'tracking' + + +def test_response_cookies_setter_updates_raw_headers( + loop: asyncio.AbstractEventLoop, session: ClientSession +) -> None: + """Test that setting cookies property updates _raw_cookie_headers.""" + response = ClientResponse( + "get", + URL("http://example.com"), + request_info=mock.Mock(), + writer=WriterMock(), + continue100=None, + timer=TimerNoop(), + traces=[], + loop=loop, + session=session, + ) + + # Create a SimpleCookie with some cookies + cookies = SimpleCookie() + cookies["session-id"] = "123456" + cookies["session-id"]["domain"] = ".example.com" + cookies["session-id"]["path"] = "/" + cookies["session-id"]["secure"] = True + + cookies["tracking"] = "xyz789" + cookies["tracking"]["domain"] = ".example.com" + cookies["tracking"]["httponly"] = True + + # Set the cookies property + response.cookies = cookies + + # Verify _raw_cookie_headers was updated + assert response._raw_cookie_headers is not None + assert len(response._raw_cookie_headers) == 2 + assert isinstance(response._raw_cookie_headers, tuple) + + # Check the raw headers contain the expected cookie strings + raw_headers = list(response._raw_cookie_headers) + assert any("session-id=123456" in h for h in raw_headers) + assert any("tracking=xyz789" in h for h in raw_headers) + assert any("Secure" in h for h in raw_headers) + assert any("HttpOnly" in h for h in raw_headers) + + # Verify cookies property returns the same object + assert response.cookies is cookies + + # Test setting empty cookies + empty_cookies = SimpleCookie() + response.cookies = empty_cookies + # Should not set _raw_cookie_headers for empty cookies + assert response._raw_cookie_headers is None diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 0fdfaee6761..2702350f132 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -4,8 +4,8 @@ import io import json from collections import deque -from http.cookies import SimpleCookie -from typing import Any, Awaitable, Callable, List +from http.cookies import BaseCookie, SimpleCookie +from typing import Any, Awaitable, Callable, Iterator, List, Optional, cast from unittest import mock from uuid import uuid4 @@ -15,7 +15,7 @@ from yarl import URL import aiohttp -from aiohttp import CookieJar, client, hdrs, web +from aiohttp import CookieJar, abc, client, hdrs, web from aiohttp.client import ClientSession from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ClientRequest @@ -639,8 +639,43 @@ async def create_connection( async def test_cookie_jar_usage(loop: Any, aiohttp_client: Any) -> None: req_url = None - jar = mock.Mock() - jar.filter_cookies.return_value = None + class MockCookieJar(abc.AbstractCookieJar): + def __init__(self) -> None: + self._update_cookies_mock = mock.Mock() + self._filter_cookies_mock = mock.Mock(return_value=BaseCookie()) + self._clear_mock = mock.Mock() + self._clear_domain_mock = mock.Mock() + self._items: List[Any] = [] + + @property + def quote_cookie(self) -> bool: + return True + + def clear(self, predicate: Optional[abc.ClearCookiePredicate] = None) -> None: + self._clear_mock(predicate) + + def clear_domain(self, domain: str) -> None: + self._clear_domain_mock(domain) + + def update_cookies(self, cookies: Any, response_url: URL = URL()) -> None: + self._update_cookies_mock(cookies, response_url) + + def filter_cookies(self, request_url: URL) -> BaseCookie[str]: + return cast(BaseCookie[str], self._filter_cookies_mock(request_url)) + + def __len__(self) -> int: + return len(self._items) + + def __iter__(self) -> Iterator[Any]: + return iter(self._items) + + jar = MockCookieJar() + + assert jar.quote_cookie is True + assert len(jar) == 0 + assert list(jar) == [] + jar.clear() + jar.clear_domain("example.com") async def handler(request): nonlocal req_url @@ -657,22 +692,24 @@ async def handler(request): ) # Updating the cookie jar with initial user defined cookies - jar.update_cookies.assert_called_with({"request": "req_value"}) + jar._update_cookies_mock.assert_called_with({"request": "req_value"}, URL()) - jar.update_cookies.reset_mock() + jar._update_cookies_mock.reset_mock() resp = await session.get("/") await resp.release() # Filtering the cookie jar before sending the request, # getting the request URL as only parameter - jar.filter_cookies.assert_called_with(URL(req_url)) + jar._filter_cookies_mock.assert_called_with(URL(req_url)) # Updating the cookie jar with the response cookies - assert jar.update_cookies.called - resp_cookies = jar.update_cookies.call_args[0][0] - assert isinstance(resp_cookies, SimpleCookie) - assert "response" in resp_cookies - assert resp_cookies["response"].value == "resp_value" + assert jar._update_cookies_mock.called + resp_cookies = jar._update_cookies_mock.call_args[0][0] + # Now update_cookies is called with a list of tuples + assert isinstance(resp_cookies, list) + assert len(resp_cookies) == 1 + assert resp_cookies[0][0] == "response" + assert resp_cookies[0][1].value == "resp_value" async def test_cookies_with_not_quoted_cookie_jar( diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 26efaa30d04..e1b6e351e3d 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -2,11 +2,13 @@ import datetime import heapq import itertools +import logging import pathlib import pickle import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie from operator import not_ +from typing import List, Set from unittest import mock import pytest @@ -1181,3 +1183,218 @@ async def test_filter_cookies_does_not_leak_memory() -> None: for key, morsels in jar._morsel_cache.items(): assert key in jar._cookies, f"Orphaned morsel cache entry for {key}" assert len(morsels) > 0, f"Empty morsel cache entry found for {key}" + + +async def test_update_cookies_from_headers() -> None: + """Test update_cookies_from_headers method.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/path") + + # Test with simple cookies + headers = [ + "session-id=123456; Path=/", + "user-pref=dark-mode; Domain=.example.com", + "tracking=xyz789; Secure; HttpOnly", + ] + + jar.update_cookies_from_headers(headers, url) + + # Verify all cookies were added to the jar + assert len(jar) == 3 + + # Check cookies available for HTTP URL (secure cookie should be filtered out) + filtered_http: BaseCookie[str] = jar.filter_cookies(url) + assert len(filtered_http) == 2 + assert "session-id" in filtered_http + assert filtered_http["session-id"].value == "123456" + assert "user-pref" in filtered_http + assert filtered_http["user-pref"].value == "dark-mode" + assert "tracking" not in filtered_http # Secure cookie not available on HTTP + + # Check cookies available for HTTPS URL (all cookies should be available) + url_https: URL = URL("https://example.com/path") + filtered_https: BaseCookie[str] = jar.filter_cookies(url_https) + assert len(filtered_https) == 3 + assert "tracking" in filtered_https + assert filtered_https["tracking"].value == "xyz789" + + +async def test_update_cookies_from_headers_duplicate_names() -> None: + """Test that duplicate cookie names with different domains are preserved.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://www.example.com/") + + # Headers with duplicate names but different domains + headers: List[str] = [ + "session-id=123456; Domain=.example.com; Path=/", + "session-id=789012; Domain=.www.example.com; Path=/", + "user-pref=light; Domain=.example.com", + "user-pref=dark; Domain=sub.example.com", + ] + + jar.update_cookies_from_headers(headers, url) + + # Should have 3 cookies (user-pref=dark for sub.example.com is rejected) + assert len(jar) == 3 + + # Verify we have both session-id cookies + all_cookies: List[Morsel[str]] = list(jar) + session_ids: List[Morsel[str]] = [c for c in all_cookies if c.key == "session-id"] + assert len(session_ids) == 2 + + # Check their domains are different + domains: Set[str] = {c["domain"] for c in session_ids} + assert domains == {"example.com", "www.example.com"} + + +async def test_update_cookies_from_headers_invalid_cookies( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that invalid cookies are logged and skipped.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/") + + # Mix of valid and invalid cookies + headers: List[str] = [ + "valid-cookie=value123", + "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=" + "{925EC0B8-CB17-4BEB-8A35-1033813B0523}; " + "HttpOnly; Path=/", # This cookie with curly braces causes CookieError + "another-valid=value456", + ] + + # Enable logging for the client logger + with caplog.at_level(logging.WARNING, logger="aiohttp.client"): + jar.update_cookies_from_headers(headers, url) + + # Check that we logged warnings for invalid cookies + assert "Can not load response cookies" in caplog.text + + # Valid cookies should still be added + assert len(jar) >= 2 # At least the two clearly valid cookies + filtered: BaseCookie[str] = jar.filter_cookies(url) + assert "valid-cookie" in filtered + assert "another-valid" in filtered + + +async def test_update_cookies_from_headers_empty_list() -> None: + """Test that empty header list is handled gracefully.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/") + + # Should not raise any errors + jar.update_cookies_from_headers([], url) + + assert len(jar) == 0 + + +async def test_update_cookies_from_headers_with_attributes() -> None: + """Test cookies with various attributes are handled correctly.""" + jar: CookieJar = CookieJar() + url: URL = URL("https://secure.example.com/app/page") + + headers: List[str] = [ + "secure-cookie=value1; Secure; HttpOnly; SameSite=Strict", + "expiring-cookie=value2; Max-Age=3600; Path=/app", + "domain-cookie=value3; Domain=.example.com; Path=/", + "dated-cookie=value4; Expires=Wed, 09 Jun 2030 10:18:14 GMT", + ] + + jar.update_cookies_from_headers(headers, url) + + # All cookies should be stored + assert len(jar) == 4 + + # Verify secure cookie (should work on HTTPS subdomain) + # Note: cookies without explicit path get path from URL (/app) + filtered_https_root: BaseCookie[str] = jar.filter_cookies( + URL("https://secure.example.com/") + ) + assert len(filtered_https_root) == 1 # Only domain-cookie has Path=/ + assert "domain-cookie" in filtered_https_root + + # Check app path + filtered_https_app: BaseCookie[str] = jar.filter_cookies( + URL("https://secure.example.com/app/") + ) + assert len(filtered_https_app) == 4 # All cookies match + assert "secure-cookie" in filtered_https_app + assert "expiring-cookie" in filtered_https_app + assert "domain-cookie" in filtered_https_app + assert "dated-cookie" in filtered_https_app + + # Secure cookie should not be available on HTTP + filtered_http_app: BaseCookie[str] = jar.filter_cookies( + URL("http://secure.example.com/app/") + ) + assert "secure-cookie" not in filtered_http_app + assert "expiring-cookie" in filtered_http_app # Non-secure cookies still available + assert "domain-cookie" in filtered_http_app + assert "dated-cookie" in filtered_http_app + + +async def test_update_cookies_from_headers_preserves_existing() -> None: + """Test that update_cookies_from_headers preserves existing cookies.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/") + + # Add some initial cookies + jar.update_cookies( + { + "existing1": "value1", + "existing2": "value2", + }, + url, + ) + + # Add more cookies via headers + headers: List[str] = [ + "new-cookie1=value3", + "new-cookie2=value4", + ] + + jar.update_cookies_from_headers(headers, url) + + # Should have all 4 cookies + assert len(jar) == 4 + filtered: BaseCookie[str] = jar.filter_cookies(url) + assert "existing1" in filtered + assert "existing2" in filtered + assert "new-cookie1" in filtered + assert "new-cookie2" in filtered + + +async def test_update_cookies_from_headers_overwrites_same_cookie() -> None: + """Test that cookies with same name/domain/path are overwritten.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/") + + # Add initial cookie + jar.update_cookies({"session": "old-value"}, url) + + # Update with new value via headers + headers: List[str] = ["session=new-value"] + jar.update_cookies_from_headers(headers, url) + + # Should still have just 1 cookie with updated value + assert len(jar) == 1 + filtered: BaseCookie[str] = jar.filter_cookies(url) + assert filtered["session"].value == "new-value" + + +async def test_dummy_cookie_jar_update_cookies_from_headers() -> None: + """Test that DummyCookieJar ignores update_cookies_from_headers.""" + jar: DummyCookieJar = DummyCookieJar() + url: URL = URL("http://example.com/") + + headers: List[str] = [ + "cookie1=value1", + "cookie2=value2", + ] + + # Should not raise and should not store anything + jar.update_cookies_from_headers(headers, url) + + assert len(jar) == 0 + filtered: BaseCookie[str] = jar.filter_cookies(url) + assert len(filtered) == 0 From 06887a918ee88395082cb5ea53adccaf3e30aa00 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sun, 1 Jun 2025 12:05:10 -0500 Subject: [PATCH 1480/1511] [PR #11107/21d640d backport][3.12] Avoid creating closed futures that will never be awaited (#11110) --- CHANGES/11107.misc.rst | 1 + aiohttp/client_proto.py | 59 ++++++++++++++++++++++++++------------ aiohttp/connector.py | 6 ++-- tests/test_client_proto.py | 41 ++++++++++++++++++++++++-- tests/test_connector.py | 22 ++++++++++++++ 5 files changed, 106 insertions(+), 23 deletions(-) create mode 100644 CHANGES/11107.misc.rst diff --git a/CHANGES/11107.misc.rst b/CHANGES/11107.misc.rst new file mode 100644 index 00000000000..37ac4622bd9 --- /dev/null +++ b/CHANGES/11107.misc.rst @@ -0,0 +1 @@ +Avoided creating closed futures in ``ResponseHandler`` that will never be awaited -- by :user:`bdraco`. diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 2d8c2e578c4..7d00b366a79 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -1,6 +1,6 @@ import asyncio from contextlib import suppress -from typing import Any, Optional, Tuple +from typing import Any, Optional, Tuple, Union from .base_protocol import BaseProtocol from .client_exceptions import ( @@ -45,7 +45,27 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._read_timeout_handle: Optional[asyncio.TimerHandle] = None self._timeout_ceil_threshold: Optional[float] = 5 - self.closed: asyncio.Future[None] = self._loop.create_future() + + self._closed: Union[None, asyncio.Future[None]] = None + self._connection_lost_called = False + + @property + def closed(self) -> Union[None, asyncio.Future[None]]: + """Future that is set when the connection is closed. + + This property returns a Future that will be completed when the connection + is closed. The Future is created lazily on first access to avoid creating + futures that will never be awaited. + + Returns: + - A Future[None] if the connection is still open or was closed after + this property was accessed + - None if connection_lost() was already called before this property + was ever accessed (indicating no one is waiting for the closure) + """ + if self._closed is None and not self._connection_lost_called: + self._closed = self._loop.create_future() + return self._closed @property def upgraded(self) -> bool: @@ -79,6 +99,7 @@ def is_connected(self) -> bool: return self.transport is not None and not self.transport.is_closing() def connection_lost(self, exc: Optional[BaseException]) -> None: + self._connection_lost_called = True self._drop_timeout() original_connection_error = exc @@ -86,23 +107,23 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: connection_closed_cleanly = original_connection_error is None - if connection_closed_cleanly: - set_result(self.closed, None) - else: - assert original_connection_error is not None - set_exception( - self.closed, - ClientConnectionError( - f"Connection lost: {original_connection_error !s}", - ), - original_connection_error, - ) - # Mark the exception as retrieved to prevent - # "Future exception was never retrieved" warnings - # The exception is always passed on through - # other means, so this is safe - with suppress(Exception): - self.closed.exception() + if self._closed is not None: + # If someone is waiting for the closed future, + # we should set it to None or an exception. If + # self._closed is None, it means that + # connection_lost() was called already + # or nobody is waiting for it. + if connection_closed_cleanly: + set_result(self._closed, None) + else: + assert original_connection_error is not None + set_exception( + self._closed, + ClientConnectionError( + f"Connection lost: {original_connection_error !s}", + ), + original_connection_error, + ) if self._payload_parser is not None: with suppress(Exception): # FIXME: log this somehow? diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 6fa75d31a98..11bd36c487e 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -507,11 +507,13 @@ def _close(self) -> List[Awaitable[object]]: for data in self._conns.values(): for proto, _ in data: proto.close() - waiters.append(proto.closed) + if closed := proto.closed: + waiters.append(closed) for proto in self._acquired: proto.close() - waiters.append(proto.closed) + if closed := proto.closed: + waiters.append(closed) for transport in self._cleanup_closed_transports: if transport is not None: diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index c7fb79a5f44..2a42996950f 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -256,13 +256,50 @@ async def test_connection_lost_exception_is_marked_retrieved( proto = ResponseHandler(loop=loop) proto.connection_made(mock.Mock()) + # Access closed property before connection_lost to ensure future is created + closed_future = proto.closed + assert closed_future is not None + # Simulate an SSL shutdown timeout error ssl_error = TimeoutError("SSL shutdown timed out") proto.connection_lost(ssl_error) # Verify the exception was set on the closed future - assert proto.closed.done() - exc = proto.closed.exception() + assert closed_future.done() + exc = closed_future.exception() assert exc is not None assert "Connection lost: SSL shutdown timed out" in str(exc) assert exc.__cause__ is ssl_error + + +async def test_closed_property_lazy_creation( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that closed future is created lazily.""" + proto = ResponseHandler(loop=loop) + + # Initially, the closed future should not be created + assert proto._closed is None + + # Accessing the property should create the future + closed_future = proto.closed + assert closed_future is not None + assert isinstance(closed_future, asyncio.Future) + assert not closed_future.done() + + # Subsequent access should return the same future + assert proto.closed is closed_future + + +async def test_closed_property_after_connection_lost( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that closed property returns None after connection_lost if never accessed.""" + proto = ResponseHandler(loop=loop) + proto.connection_made(mock.Mock()) + + # Don't access proto.closed before connection_lost + proto.connection_lost(None) + + # After connection_lost, closed should return None if it was never accessed + assert proto.closed is None diff --git a/tests/test_connector.py b/tests/test_connector.py index 3b2d28ea46c..6fad9e4ccff 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -331,6 +331,28 @@ async def test_close_with_exception_during_closing( assert "Error while closing connector" in caplog.records[0].message assert "RuntimeError('Connection close failed')" in caplog.records[0].message + +async def test_close_with_proto_closed_none(key: ConnectionKey) -> None: + """Test close when protocol.closed is None.""" + # Create protocols where closed property returns None + proto1 = mock.create_autospec(ResponseHandler, instance=True) + proto1.closed = None + proto1.close = mock.Mock() + + proto2 = mock.create_autospec(ResponseHandler, instance=True) + proto2.closed = None + proto2.close = mock.Mock() + + conn = aiohttp.BaseConnector() + conn._conns[key] = deque([(proto1, 0)]) + conn._acquired.add(proto2) + + # Close the connector - this should handle the case where proto.closed is None + await conn.close() + + # Verify close was called on both protocols + assert proto1.close.called + assert proto2.close.called assert conn.closed From a57ff76e933434b6142c0bcc6770b7d695fa109e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 2 Jun 2025 03:48:43 -0500 Subject: [PATCH 1481/1511] [PR #11114/758738e backport][3.12] Downgrade connector close error to debug (#11115) --- CHANGES/11114.misc.rst | 1 + aiohttp/connector.py | 4 ++-- tests/test_connector.py | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 CHANGES/11114.misc.rst diff --git a/CHANGES/11114.misc.rst b/CHANGES/11114.misc.rst new file mode 100644 index 00000000000..2fcb1468c67 --- /dev/null +++ b/CHANGES/11114.misc.rst @@ -0,0 +1 @@ +Downgraded the logging level for connector close errors from ERROR to DEBUG, as these are expected behavior with TLS 1.3 connections -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 11bd36c487e..62b418a4bed 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -1,6 +1,5 @@ import asyncio import functools -import logging import random import socket import sys @@ -60,6 +59,7 @@ set_exception, set_result, ) +from .log import client_logger from .resolver import DefaultResolver if sys.version_info >= (3, 12): @@ -137,7 +137,7 @@ async def _wait_for_close(waiters: List[Awaitable[object]]) -> None: results = await asyncio.gather(*waiters, return_exceptions=True) for res in results: if isinstance(res, Exception): - logging.error("Error while closing connector: %r", res) + client_logger.debug("Error while closing connector: %r", res) class Connection: diff --git a/tests/test_connector.py b/tests/test_connector.py index 6fad9e4ccff..54da8743ed7 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -2,6 +2,7 @@ import asyncio import gc import hashlib +import logging import platform import socket import ssl @@ -309,6 +310,7 @@ async def test_close_with_exception_during_closing( loop: asyncio.AbstractEventLoop, caplog: pytest.LogCaptureFixture ) -> None: """Test that exceptions during connection closing are logged.""" + caplog.set_level(logging.DEBUG) proto = create_mocked_conn() # Make the closed future raise an exception when awaited @@ -327,7 +329,7 @@ async def test_close_with_exception_during_closing( # Check that the error was logged assert len(caplog.records) == 1 - assert caplog.records[0].levelname == "ERROR" + assert caplog.records[0].levelname == "DEBUG" assert "Error while closing connector" in caplog.records[0].message assert "RuntimeError('Connection close failed')" in caplog.records[0].message From 741cb61dfa3e70af0609599c272ec80a2f9eed7d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 2 Jun 2025 03:58:01 -0500 Subject: [PATCH 1482/1511] PR #11112/8edec63 backport][3.12] Fix cookie parsing issues (#11117) --- CHANGES/11112.bugfix.rst | 8 + CHANGES/2683.bugfix.rst | 1 + CHANGES/5397.bugfix.rst | 1 + CHANGES/7993.bugfix.rst | 1 + aiohttp/_cookie_helpers.py | 221 +++++++ aiohttp/abc.py | 26 +- aiohttp/client_reqrep.py | 21 +- aiohttp/cookiejar.py | 45 +- aiohttp/web_request.py | 9 +- docs/spelling_wordlist.txt | 1 + pyproject.toml | 2 +- tests/test_client_functional.py | 38 +- tests/test_client_request.py | 53 ++ tests/test_cookie_helpers.py | 1031 +++++++++++++++++++++++++++++++ tests/test_cookiejar.py | 225 ++++++- tests/test_web_request.py | 151 +++++ 16 files changed, 1769 insertions(+), 65 deletions(-) create mode 100644 CHANGES/11112.bugfix.rst create mode 120000 CHANGES/2683.bugfix.rst create mode 120000 CHANGES/5397.bugfix.rst create mode 120000 CHANGES/7993.bugfix.rst create mode 100644 aiohttp/_cookie_helpers.py create mode 100644 tests/test_cookie_helpers.py diff --git a/CHANGES/11112.bugfix.rst b/CHANGES/11112.bugfix.rst new file mode 100644 index 00000000000..6edea1c9b23 --- /dev/null +++ b/CHANGES/11112.bugfix.rst @@ -0,0 +1,8 @@ +Fixed cookie parsing to be more lenient when handling cookies with special characters +in names or values. Cookies with characters like ``{``, ``}``, and ``/`` in names are now +accepted instead of causing a :exc:`~http.cookies.CookieError` and 500 errors. Additionally, +cookies with mismatched quotes in values are now parsed correctly, and quoted cookie +values are now handled consistently whether or not they include special attributes +like ``Domain``. Also fixed :class:`~aiohttp.CookieJar` to ensure shared cookies (domain="", path="") +respect the ``quote_cookie`` parameter, making cookie quoting behavior consistent for +all cookies -- by :user:`bdraco`. diff --git a/CHANGES/2683.bugfix.rst b/CHANGES/2683.bugfix.rst new file mode 120000 index 00000000000..fac3861027d --- /dev/null +++ b/CHANGES/2683.bugfix.rst @@ -0,0 +1 @@ +11112.bugfix.rst \ No newline at end of file diff --git a/CHANGES/5397.bugfix.rst b/CHANGES/5397.bugfix.rst new file mode 120000 index 00000000000..fac3861027d --- /dev/null +++ b/CHANGES/5397.bugfix.rst @@ -0,0 +1 @@ +11112.bugfix.rst \ No newline at end of file diff --git a/CHANGES/7993.bugfix.rst b/CHANGES/7993.bugfix.rst new file mode 120000 index 00000000000..fac3861027d --- /dev/null +++ b/CHANGES/7993.bugfix.rst @@ -0,0 +1 @@ +11112.bugfix.rst \ No newline at end of file diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py new file mode 100644 index 00000000000..8184cc9bdc1 --- /dev/null +++ b/aiohttp/_cookie_helpers.py @@ -0,0 +1,221 @@ +""" +Internal cookie handling helpers. + +This module contains internal utilities for cookie parsing and manipulation. +These are not part of the public API and may change without notice. +""" + +import re +import sys +from http.cookies import Morsel +from typing import List, Optional, Sequence, Tuple, cast + +from .log import internal_logger + +__all__ = ("parse_cookie_headers", "preserve_morsel_with_coded_value") + +# Cookie parsing constants +# Allow more characters in cookie names to handle real-world cookies +# that don't strictly follow RFC standards (fixes #2683) +# RFC 6265 defines cookie-name token as per RFC 2616 Section 2.2, +# but many servers send cookies with characters like {} [] () etc. +# This makes the cookie parser more tolerant of real-world cookies +# while still providing some validation to catch obviously malformed names. +_COOKIE_NAME_RE = re.compile(r"^[!#$%&\'()*+\-./0-9:<=>?@A-Z\[\]^_`a-z{|}~]+$") +_COOKIE_KNOWN_ATTRS = frozenset( # AKA Morsel._reserved + ( + "path", + "domain", + "max-age", + "expires", + "secure", + "httponly", + "samesite", + "partitioned", + "version", + "comment", + ) +) +_COOKIE_BOOL_ATTRS = frozenset( # AKA Morsel._flags + ("secure", "httponly", "partitioned") +) + +# SimpleCookie's pattern for parsing cookies with relaxed validation +# Based on http.cookies pattern but extended to allow more characters in cookie names +# to handle real-world cookies (fixes #2683) +_COOKIE_PATTERN = re.compile( + r""" + \s* # Optional whitespace at start of cookie + (?P<key> # Start of group 'key' + # aiohttp has extended to include [] for compatibility with real-world cookies + [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]+? # Any word of at least one letter + ) # End of group 'key' + ( # Optional group: there may not be a value. + \s*=\s* # Equal Sign + (?P<val> # Start of group 'val' + "(?:[^\\"]|\\.)*" # Any double-quoted string (properly closed) + | # or + "[^";]* # Unmatched opening quote (differs from SimpleCookie - issue #7993) + | # or + # Special case for "expires" attr - RFC 822, RFC 850, RFC 1036, RFC 1123 + (\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day (with comma) + [\w\d\s-]{9,11}\s[\d:]{8}\s # Date and time in specific format + (GMT|[+-]\d{4}) # Timezone: GMT or RFC 2822 offset like -0000, +0100 + # NOTE: RFC 2822 timezone support is an aiohttp extension + # for issue #4493 - SimpleCookie does NOT support this + | # or + # ANSI C asctime() format: "Wed Jun 9 10:18:14 2021" + # NOTE: This is an aiohttp extension for issue #4327 - SimpleCookie does NOT support this format + \w{3}\s+\w{3}\s+[\s\d]\d\s+\d{2}:\d{2}:\d{2}\s+\d{4} + | # or + [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]* # Any word or empty string + ) # End of group 'val' + )? # End of optional value group + \s* # Any number of spaces. + (\s+|;|$) # Ending either at space, semicolon, or EOS. + """, + re.VERBOSE | re.ASCII, +) + + +def preserve_morsel_with_coded_value(cookie: Morsel[str]) -> Morsel[str]: + """ + Preserve a Morsel's coded_value exactly as received from the server. + + This function ensures that cookie encoding is preserved exactly as sent by + the server, which is critical for compatibility with old servers that have + strict requirements about cookie formats. + + This addresses the issue described in https://github.com/aio-libs/aiohttp/pull/1453 + where Python's SimpleCookie would re-encode cookies, breaking authentication + with certain servers. + + Args: + cookie: A Morsel object from SimpleCookie + + Returns: + A Morsel object with preserved coded_value + + """ + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + # We use __setstate__ instead of the public set() API because it allows us to + # bypass validation and set already validated state. This is more stable than + # setting protected attributes directly and unlikely to change since it would + # break pickling. + mrsl_val.__setstate__( # type: ignore[attr-defined] + {"key": cookie.key, "value": cookie.value, "coded_value": cookie.coded_value} + ) + return mrsl_val + + +def _unquote(text: str) -> str: + """ + Unquote a cookie value. + + Vendored from http.cookies._unquote to ensure compatibility. + """ + # If there are no quotes, return as-is + if len(text) < 2 or text[0] != '"' or text[-1] != '"': + return text + # Remove quotes and handle escaped characters + text = text[1:-1] + # Replace escaped quotes and backslashes + text = text.replace('\\"', '"').replace("\\\\", "\\") + return text + + +def parse_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]: + """ + Parse cookie headers using a vendored version of SimpleCookie parsing. + + This implementation is based on SimpleCookie.__parse_string to ensure + compatibility with how SimpleCookie parses cookies, including handling + of malformed cookies with missing semicolons. + + This function is used for both Cookie and Set-Cookie headers in order to be + forgiving. Ideally we would have followed RFC 6265 Section 5.2 (for Cookie + headers) and RFC 6265 Section 4.2.1 (for Set-Cookie headers), but the + real world data makes it impossible since we need to be a bit more forgiving. + + NOTE: This implementation differs from SimpleCookie in handling unmatched quotes. + SimpleCookie will stop parsing when it encounters a cookie value with an unmatched + quote (e.g., 'cookie="value'), causing subsequent cookies to be silently dropped. + This implementation handles unmatched quotes more gracefully to prevent cookie loss. + See https://github.com/aio-libs/aiohttp/issues/7993 + """ + parsed_cookies: List[Tuple[str, Morsel[str]]] = [] + + for header in headers: + if not header: + continue + + # Parse cookie string using SimpleCookie's algorithm + i = 0 + n = len(header) + current_morsel: Optional[Morsel[str]] = None + morsel_seen = False + + while 0 <= i < n: + # Start looking for a cookie + match = _COOKIE_PATTERN.match(header, i) + if not match: + # No more cookies + break + + key, value = match.group("key"), match.group("val") + i = match.end(0) + lower_key = key.lower() + + if key[0] == "$": + if not morsel_seen: + # We ignore attributes which pertain to the cookie + # mechanism as a whole, such as "$Version". + continue + # Process as attribute + if current_morsel is not None: + attr_lower_key = lower_key[1:] + if attr_lower_key in _COOKIE_KNOWN_ATTRS: + current_morsel[attr_lower_key] = value or "" + elif lower_key in _COOKIE_KNOWN_ATTRS: + if not morsel_seen: + # Invalid cookie string - attribute before cookie + break + if lower_key in _COOKIE_BOOL_ATTRS: + # Boolean attribute with any value should be True + if current_morsel is not None: + if lower_key == "partitioned" and sys.version_info < (3, 14): + dict.__setitem__(current_morsel, lower_key, True) + else: + current_morsel[lower_key] = True + elif value is None: + # Invalid cookie string - non-boolean attribute without value + break + elif current_morsel is not None: + # Regular attribute with value + current_morsel[lower_key] = _unquote(value) + elif value is not None: + # This is a cookie name=value pair + # Validate the name + if key in _COOKIE_KNOWN_ATTRS or not _COOKIE_NAME_RE.match(key): + internal_logger.warning( + "Can not load cookies: Illegal cookie name %r", key + ) + current_morsel = None + else: + # Create new morsel + current_morsel = Morsel() + # Preserve the original value as coded_value (with quotes if present) + # We use __setstate__ instead of the public set() API because it allows us to + # bypass validation and set already validated state. This is more stable than + # setting protected attributes directly and unlikely to change since it would + # break pickling. + current_morsel.__setstate__( # type: ignore[attr-defined] + {"key": key, "value": _unquote(value), "coded_value": value} + ) + parsed_cookies.append((key, current_morsel)) + morsel_seen = True + else: + # Invalid cookie string - no value for non-attribute + break + + return parsed_cookies diff --git a/aiohttp/abc.py b/aiohttp/abc.py index 353c18be266..ba371c61b01 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -3,7 +3,7 @@ import socket from abc import ABC, abstractmethod from collections.abc import Sized -from http.cookies import BaseCookie, CookieError, Morsel, SimpleCookie +from http.cookies import BaseCookie, Morsel from typing import ( TYPE_CHECKING, Any, @@ -23,7 +23,7 @@ from multidict import CIMultiDict from yarl import URL -from .log import client_logger +from ._cookie_helpers import parse_cookie_headers from .typedefs import LooseCookies if TYPE_CHECKING: @@ -197,26 +197,8 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No def update_cookies_from_headers( self, headers: Sequence[str], response_url: URL ) -> None: - """ - Update cookies from raw Set-Cookie headers. - - Default implementation parses each header separately to preserve - cookies with same name but different domain/path. - """ - # Default implementation for backward compatibility - cookies_to_update: List[Tuple[str, Morsel[str]]] = [] - for cookie_header in headers: - tmp_cookie = SimpleCookie() - try: - tmp_cookie.load(cookie_header) - # Collect all cookies as tuples (name, morsel) - for name, morsel in tmp_cookie.items(): - cookies_to_update.append((name, morsel)) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) - - # Update all cookies at once for efficiency - if cookies_to_update: + """Update cookies from raw Set-Cookie headers.""" + if headers and (cookies_to_update := parse_cookie_headers(headers)): self.update_cookies(cookies_to_update, response_url) @abstractmethod diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 01835260cc5..793864b95a5 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -9,7 +9,7 @@ import warnings from collections.abc import Mapping from hashlib import md5, sha1, sha256 -from http.cookies import CookieError, Morsel, SimpleCookie +from http.cookies import Morsel, SimpleCookie from types import MappingProxyType, TracebackType from typing import ( TYPE_CHECKING, @@ -31,6 +31,7 @@ from yarl import URL from . import hdrs, helpers, http, multipart, payload +from ._cookie_helpers import parse_cookie_headers, preserve_morsel_with_coded_value from .abc import AbstractStreamWriter from .client_exceptions import ( ClientConnectionError, @@ -62,7 +63,6 @@ HttpVersion11, StreamWriter, ) -from .log import client_logger from .streams import StreamReader from .typedefs import ( DEFAULT_JSON_DECODER, @@ -376,11 +376,9 @@ def cookies(self) -> SimpleCookie: if self._raw_cookie_headers is not None: # Parse cookies for response.cookies (SimpleCookie for backward compatibility) cookies = SimpleCookie() - for hdr in self._raw_cookie_headers: - try: - cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) + # Use parse_cookie_headers for more lenient parsing that handles + # malformed cookies better than SimpleCookie.load + cookies.update(parse_cookie_headers(self._raw_cookie_headers)) self._cookies = cookies else: self._cookies = SimpleCookie() @@ -1095,7 +1093,8 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: c = SimpleCookie() if hdrs.COOKIE in self.headers: - c.load(self.headers.get(hdrs.COOKIE, "")) + # parse_cookie_headers already preserves coded values + c.update(parse_cookie_headers((self.headers.get(hdrs.COOKIE, ""),))) del self.headers[hdrs.COOKIE] if isinstance(cookies, Mapping): @@ -1104,10 +1103,8 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: iter_cookies = cookies # type: ignore[assignment] for name, value in iter_cookies: if isinstance(value, Morsel): - # Preserve coded_value - mrsl_val = value.get(value.key, Morsel()) - mrsl_val.set(value.key, value.value, value.coded_value) - c[name] = mrsl_val + # Use helper to preserve coded_value exactly as sent by server + c[name] = preserve_morsel_with_coded_value(value) else: c[name] = value # type: ignore[assignment] diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index a755a893409..193648d4309 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -23,11 +23,11 @@ Set, Tuple, Union, - cast, ) from yarl import URL +from ._cookie_helpers import preserve_morsel_with_coded_value from .abc import AbstractCookieJar, ClearCookiePredicate from .helpers import is_ip_address from .typedefs import LooseCookies, PathLike, StrOrURL @@ -45,6 +45,7 @@ # the expiration heap. This is a performance optimization to avoid cleaning up the # heap too often when there are only a few scheduled expirations. _MIN_SCHEDULED_COOKIE_EXPIRATION = 100 +_SIMPLE_COOKIE = SimpleCookie() class CookieJar(AbstractCookieJar): @@ -304,9 +305,10 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": """Returns this jar's cookies filtered by their attributes.""" - filtered: Union[SimpleCookie, "BaseCookie[str]"] = ( - SimpleCookie() if self._quote_cookie else BaseCookie() - ) + # We always use BaseCookie now since all + # cookies set on on filtered are fully constructed + # Morsels, not just names and values. + filtered: BaseCookie[str] = BaseCookie() if not self._cookies: # Skip do_expiration() if there are no cookies. return filtered @@ -332,8 +334,17 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": is_not_secure = request_origin not in self._treat_as_secure_origin # Send shared cookie - for c in self._cookies[("", "")].values(): - filtered[c.key] = c.value + key = ("", "") + for c in self._cookies[key].values(): + # Check cache first + if c.key in self._morsel_cache[key]: + filtered[c.key] = self._morsel_cache[key][c.key] + continue + + # Build and cache the morsel + mrsl_val = self._build_morsel(c) + self._morsel_cache[key][c.key] = mrsl_val + filtered[c.key] = mrsl_val if is_ip_address(hostname): if not self._unsafe: @@ -373,15 +384,29 @@ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": filtered[name] = self._morsel_cache[p][name] continue - # It's critical we use the Morsel so the coded_value - # (based on cookie version) is preserved - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + # Build and cache the morsel + mrsl_val = self._build_morsel(cookie) self._morsel_cache[p][name] = mrsl_val filtered[name] = mrsl_val return filtered + def _build_morsel(self, cookie: Morsel[str]) -> Morsel[str]: + """Build a morsel for sending, respecting quote_cookie setting.""" + if self._quote_cookie and cookie.coded_value and cookie.coded_value[0] == '"': + return preserve_morsel_with_coded_value(cookie) + morsel: Morsel[str] = Morsel() + if self._quote_cookie: + value, coded_value = _SIMPLE_COOKIE.value_encode(cookie.value) + else: + coded_value = value = cookie.value + # We use __setstate__ instead of the public set() API because it allows us to + # bypass validation and set already validated state. This is more stable than + # setting protected attributes directly and unlikely to change since it would + # break pickling. + morsel.__setstate__({"key": cookie.key, "value": value, "coded_value": coded_value}) # type: ignore[attr-defined] + return morsel + @staticmethod def _is_domain_match(domain: str, hostname: str) -> bool: """Implements domain matching adhering to RFC 6265.""" diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 6bf5a9dea74..0c5576823f1 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -7,7 +7,6 @@ import tempfile import types import warnings -from http.cookies import SimpleCookie from types import MappingProxyType from typing import ( TYPE_CHECKING, @@ -36,6 +35,7 @@ from yarl import URL from . import hdrs +from ._cookie_helpers import parse_cookie_headers from .abc import AbstractStreamWriter from .helpers import ( _SENTINEL, @@ -589,9 +589,10 @@ def cookies(self) -> Mapping[str, str]: A read-only dictionary-like object. """ - raw = self.headers.get(hdrs.COOKIE, "") - parsed = SimpleCookie(raw) - return MappingProxyType({key: val.value for key, val in parsed.items()}) + # Use parse_cookie_headers for more lenient parsing that accepts + # special characters in cookie names (fixes #2683) + parsed = parse_cookie_headers((self.headers.get(hdrs.COOKIE, ""),)) + return MappingProxyType({name: morsel.value for name, morsel in parsed}) @reify def http_range(self) -> slice: diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 8b389cc11f6..b495a07cb6f 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -336,6 +336,7 @@ toplevel towncrier tp tuples +ue UI un unawaited diff --git a/pyproject.toml b/pyproject.toml index 3ef37b5978b..df8b8465348 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ skip = "pp*" [tool.codespell] skip = '.git,*.pdf,*.svg,Makefile,CONTRIBUTORS.txt,venvs,_build' -ignore-words-list = 'te' +ignore-words-list = 'te,ue' [tool.slotscheck] # TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index ca1a7dd1d6b..5c18178b714 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -5,6 +5,7 @@ import http.cookies import io import json +import logging import pathlib import socket import ssl @@ -2691,15 +2692,16 @@ async def handler(request): assert 200 == resp.status -async def test_set_cookies(aiohttp_client) -> None: - async def handler(request): +async def test_set_cookies( + aiohttp_client: AiohttpClient, caplog: pytest.LogCaptureFixture +) -> None: + async def handler(request: web.Request) -> web.Response: ret = web.Response() ret.set_cookie("c1", "cookie1") ret.set_cookie("c2", "cookie2") ret.headers.add( "Set-Cookie", - "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=" - "{925EC0B8-CB17-4BEB-8A35-1033813B0523}; " + "invalid,cookie=value; " # Comma character is not allowed "HttpOnly; Path=/", ) return ret @@ -2708,14 +2710,38 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - with mock.patch("aiohttp.client_reqrep.client_logger") as m_log: + with caplog.at_level(logging.WARNING): async with client.get("/") as resp: assert 200 == resp.status cookie_names = {c.key for c in client.session.cookie_jar} _ = resp.cookies assert cookie_names == {"c1", "c2"} - m_log.warning.assert_called_with("Can not load response cookies: %s", mock.ANY) + assert "Can not load cookies: Illegal cookie name 'invalid,cookie'" in caplog.text + + +async def test_set_cookies_with_curly_braces(aiohttp_client: AiohttpClient) -> None: + """Test that cookies with curly braces in names are now accepted (#2683).""" + + async def handler(request: web.Request) -> web.Response: + ret = web.Response() + ret.set_cookie("c1", "cookie1") + ret.headers.add( + "Set-Cookie", + "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=" + "{925EC0B8-CB17-4BEB-8A35-1033813B0523}; " + "HttpOnly; Path=/", + ) + return ret + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert 200 == resp.status + cookie_names = {c.key for c in client.session.cookie_jar} + assert cookie_names == {"c1", "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}"} async def test_set_cookies_expired(aiohttp_client) -> None: diff --git a/tests/test_client_request.py b/tests/test_client_request.py index b3eb55d921b..2af540599f8 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -612,6 +612,59 @@ def test_gen_netloc_no_port(make_request) -> None: ) +def test_cookie_coded_value_preserved(loop: asyncio.AbstractEventLoop) -> None: + """Verify the coded value of a cookie is preserved.""" + # https://github.com/aio-libs/aiohttp/pull/1453 + req = ClientRequest("get", URL("http://python.org"), loop=loop) + req.update_cookies(cookies=SimpleCookie('ip-cookie="second"; Domain=127.0.0.1;')) + assert req.headers["COOKIE"] == 'ip-cookie="second"' + + +def test_update_cookies_with_special_chars_in_existing_header( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that update_cookies handles existing cookies with special characters.""" + # Create request with a cookie that has special characters (real-world example) + req = ClientRequest( + "get", + URL("http://python.org"), + headers={"Cookie": "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=value1"}, + loop=loop, + ) + + # Update with another cookie + req.update_cookies(cookies={"normal_cookie": "value2"}) + + # Both cookies should be preserved in the exact order + assert ( + req.headers["COOKIE"] + == "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=value1; normal_cookie=value2" + ) + + +def test_update_cookies_with_quoted_existing_header( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that update_cookies handles existing cookies with quoted values.""" + # Create request with cookies that have quoted values + req = ClientRequest( + "get", + URL("http://python.org"), + headers={"Cookie": 'session="value;with;semicolon"; token=abc123'}, + loop=loop, + ) + + # Update with another cookie + req.update_cookies(cookies={"new_cookie": "new_value"}) + + # All cookies should be preserved with their original coded values + # The quoted value should be preserved as-is + assert ( + req.headers["COOKIE"] + == 'new_cookie=new_value; session="value;with;semicolon"; token=abc123' + ) + + async def test_connection_header( loop: asyncio.AbstractEventLoop, conn: mock.Mock ) -> None: diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py new file mode 100644 index 00000000000..7a2ac7493ee --- /dev/null +++ b/tests/test_cookie_helpers.py @@ -0,0 +1,1031 @@ +"""Tests for internal cookie helper functions.""" + +from http.cookies import CookieError, Morsel, SimpleCookie + +import pytest + +from aiohttp import _cookie_helpers as helpers +from aiohttp._cookie_helpers import ( + parse_cookie_headers, + preserve_morsel_with_coded_value, +) + + +def test_known_attrs_is_superset_of_morsel_reserved() -> None: + """Test that _COOKIE_KNOWN_ATTRS contains all Morsel._reserved attributes.""" + # Get Morsel._reserved attributes (lowercase) + morsel_reserved = {attr.lower() for attr in Morsel._reserved} # type: ignore[attr-defined] + + # _COOKIE_KNOWN_ATTRS should be a superset of morsel_reserved + assert ( + helpers._COOKIE_KNOWN_ATTRS >= morsel_reserved + ), f"_COOKIE_KNOWN_ATTRS is missing: {morsel_reserved - helpers._COOKIE_KNOWN_ATTRS}" + + +def test_bool_attrs_is_superset_of_morsel_flags() -> None: + """Test that _COOKIE_BOOL_ATTRS contains all Morsel._flags attributes.""" + # Get Morsel._flags attributes (lowercase) + morsel_flags = {attr.lower() for attr in Morsel._flags} # type: ignore[attr-defined] + + # _COOKIE_BOOL_ATTRS should be a superset of morsel_flags + assert ( + helpers._COOKIE_BOOL_ATTRS >= morsel_flags + ), f"_COOKIE_BOOL_ATTRS is missing: {morsel_flags - helpers._COOKIE_BOOL_ATTRS}" + + +def test_preserve_morsel_with_coded_value() -> None: + """Test preserve_morsel_with_coded_value preserves coded_value exactly.""" + # Create a cookie with a coded_value different from value + cookie: Morsel[str] = Morsel() + cookie.set("test_cookie", "decoded value", "encoded%20value") + + # Preserve the coded_value + result = preserve_morsel_with_coded_value(cookie) + + # Check that all values are preserved + assert result.key == "test_cookie" + assert result.value == "decoded value" + assert result.coded_value == "encoded%20value" + + # Should be a different Morsel instance + assert result is not cookie + + +def test_preserve_morsel_with_coded_value_no_coded_value() -> None: + """Test preserve_morsel_with_coded_value when coded_value is same as value.""" + cookie: Morsel[str] = Morsel() + cookie.set("test_cookie", "simple_value", "simple_value") + + result = preserve_morsel_with_coded_value(cookie) + + assert result.key == "test_cookie" + assert result.value == "simple_value" + assert result.coded_value == "simple_value" + + +def test_parse_cookie_headers_simple() -> None: + """Test parse_cookie_headers with simple cookies.""" + headers = ["name=value", "session=abc123"] + + result = parse_cookie_headers(headers) + + assert len(result) == 2 + assert result[0][0] == "name" + assert result[0][1].key == "name" + assert result[0][1].value == "value" + assert result[1][0] == "session" + assert result[1][1].key == "session" + assert result[1][1].value == "abc123" + + +def test_parse_cookie_headers_with_attributes() -> None: + """Test parse_cookie_headers with cookie attributes.""" + headers = [ + "sessionid=value123; Path=/; HttpOnly; Secure", + "user=john; Domain=.example.com; Max-Age=3600", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 2 + + # First cookie + name1, morsel1 = result[0] + assert name1 == "sessionid" + assert morsel1.value == "value123" + assert morsel1["path"] == "/" + assert morsel1["httponly"] is True + assert morsel1["secure"] is True + + # Second cookie + name2, morsel2 = result[1] + assert name2 == "user" + assert morsel2.value == "john" + assert morsel2["domain"] == ".example.com" + assert morsel2["max-age"] == "3600" + + +def test_parse_cookie_headers_special_chars_in_names() -> None: + """Test parse_cookie_headers accepts special characters in names (#2683).""" + # These should be accepted with relaxed validation + headers = [ + "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=value1", + "cookie[index]=value2", + "cookie(param)=value3", + "cookie:name=value4", + "cookie@domain=value5", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 5 + expected_names = [ + "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}", + "cookie[index]", + "cookie(param)", + "cookie:name", + "cookie@domain", + ] + + for i, (name, morsel) in enumerate(result): + assert name == expected_names[i] + assert morsel.key == expected_names[i] + assert morsel.value == f"value{i+1}" + + +def test_parse_cookie_headers_invalid_names() -> None: + """Test parse_cookie_headers rejects truly invalid cookie names.""" + # These should be rejected even with relaxed validation + headers = [ + "invalid\tcookie=value", # Tab character + "invalid\ncookie=value", # Newline + "invalid\rcookie=value", # Carriage return + "\x00badname=value", # Null character + "name with spaces=value", # Spaces in name + ] + + result = parse_cookie_headers(headers) + + # All should be skipped + assert len(result) == 0 + + +def test_parse_cookie_headers_empty_and_invalid() -> None: + """Test parse_cookie_headers handles empty and invalid formats.""" + headers = [ + "", # Empty header + " ", # Whitespace only + "=value", # No name + "name=", # Empty value (should be accepted) + "justname", # No value (should be skipped) + "path=/", # Reserved attribute as name (should be skipped) + "Domain=.com", # Reserved attribute as name (should be skipped) + ] + + result = parse_cookie_headers(headers) + + # Only "name=" should be accepted + assert len(result) == 1 + assert result[0][0] == "name" + assert result[0][1].value == "" + + +def test_parse_cookie_headers_quoted_values() -> None: + """Test parse_cookie_headers handles quoted values correctly.""" + headers = [ + 'name="quoted value"', + 'session="with;semicolon"', + 'data="with\\"escaped\\""', + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 3 + assert result[0][1].value == "quoted value" + assert result[1][1].value == "with;semicolon" + assert result[2][1].value == 'with"escaped"' + + +@pytest.mark.parametrize( + "header", + [ + 'session="abc;xyz"; token=123', + 'data="value;with;multiple;semicolons"; next=cookie', + 'complex="a=b;c=d"; simple=value', + ], +) +def test_parse_cookie_headers_semicolon_in_quoted_values(header: str) -> None: + """ + Test that semicolons inside properly quoted values are handled correctly. + + Cookie values can contain semicolons when properly quoted. This test ensures + that our parser handles these cases correctly, matching SimpleCookie behavior. + """ + # Test with SimpleCookie + sc = SimpleCookie() + sc.load(header) + + # Test with our parser + result = parse_cookie_headers([header]) + + # Should parse the same number of cookies + assert len(result) == len(sc) + + # Verify each cookie matches SimpleCookie + for (name, morsel), (sc_name, sc_morsel) in zip(result, sc.items()): + assert name == sc_name + assert morsel.value == sc_morsel.value + + +def test_parse_cookie_headers_multiple_cookies_same_header() -> None: + """Test parse_cookie_headers with multiple cookies in one header.""" + # Note: SimpleCookie includes the comma as part of the first cookie's value + headers = ["cookie1=value1, cookie2=value2"] + + result = parse_cookie_headers(headers) + + # Should parse as two separate cookies + assert len(result) == 2 + assert result[0][0] == "cookie1" + assert result[0][1].value == "value1," # Comma is included in the value + assert result[1][0] == "cookie2" + assert result[1][1].value == "value2" + + +@pytest.mark.parametrize( + "header", + [ + # Standard cookies + "session=abc123", + "user=john; Path=/", + "token=xyz; Secure; HttpOnly", + # Empty values + "empty=", + # Quoted values + 'quoted="value with spaces"', + # Multiple attributes + "complex=value; Domain=.example.com; Path=/app; Max-Age=3600", + ], +) +def test_parse_cookie_headers_compatibility_with_simple_cookie(header: str) -> None: + """Test parse_cookie_headers is bug-for-bug compatible with SimpleCookie.load.""" + # Parse with SimpleCookie + sc = SimpleCookie() + sc.load(header) + + # Parse with our function + result = parse_cookie_headers([header]) + + # Should have same number of cookies + assert len(result) == len(sc) + + # Compare each cookie + for name, morsel in result: + assert name in sc + sc_morsel = sc[name] + + # Compare values + assert morsel.value == sc_morsel.value + assert morsel.key == sc_morsel.key + + # Compare attributes (only those that SimpleCookie would set) + for attr in ["path", "domain", "max-age"]: + assert morsel.get(attr) == sc_morsel.get(attr) + + # Boolean attributes are handled differently + # SimpleCookie sets them to empty string when not present, True when present + for bool_attr in ["secure", "httponly"]: + # Only check if SimpleCookie has the attribute set to True + if sc_morsel.get(bool_attr) is True: + assert morsel.get(bool_attr) is True + + +def test_parse_cookie_headers_relaxed_validation_differences() -> None: + """Test where parse_cookie_headers differs from SimpleCookie (relaxed validation).""" + # Test cookies that SimpleCookie rejects with CookieError + rejected_by_simplecookie = [ + ("cookie{with}braces=value1", "cookie{with}braces", "value1"), + ("cookie(with)parens=value3", "cookie(with)parens", "value3"), + ("cookie@with@at=value5", "cookie@with@at", "value5"), + ] + + for header, expected_name, expected_value in rejected_by_simplecookie: + # SimpleCookie should reject these with CookieError + sc = SimpleCookie() + with pytest.raises(CookieError): + sc.load(header) + + # Our parser should accept them + result = parse_cookie_headers([header]) + assert len(result) == 1 # We accept + assert result[0][0] == expected_name + assert result[0][1].value == expected_value + + # Test cookies that SimpleCookie accepts (but we handle more consistently) + accepted_by_simplecookie = [ + ("cookie[with]brackets=value2", "cookie[with]brackets", "value2"), + ("cookie:with:colons=value4", "cookie:with:colons", "value4"), + ] + + for header, expected_name, expected_value in accepted_by_simplecookie: + # SimpleCookie accepts these + sc = SimpleCookie() + sc.load(header) + # May or may not parse correctly in SimpleCookie + + # Our parser should accept them consistently + result = parse_cookie_headers([header]) + assert len(result) == 1 + assert result[0][0] == expected_name + assert result[0][1].value == expected_value + + +def test_parse_cookie_headers_case_insensitive_attrs() -> None: + """Test that known attributes are handled case-insensitively.""" + headers = [ + "cookie1=value1; PATH=/test; DOMAIN=example.com", + "cookie2=value2; Secure; HTTPONLY; max-AGE=60", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 2 + + # First cookie - attributes should be recognized despite case + assert result[0][1]["path"] == "/test" + assert result[0][1]["domain"] == "example.com" + + # Second cookie + assert result[1][1]["secure"] is True + assert result[1][1]["httponly"] is True + assert result[1][1]["max-age"] == "60" + + +def test_parse_cookie_headers_unknown_attrs_ignored() -> None: + """Test that unknown attributes are treated as new cookies (same as SimpleCookie).""" + headers = [ + "cookie=value; Path=/; unknownattr=ignored; HttpOnly", + ] + + result = parse_cookie_headers(headers) + + # SimpleCookie treats unknown attributes with values as new cookies + assert len(result) == 2 + + # First cookie + assert result[0][0] == "cookie" + assert result[0][1]["path"] == "/" + assert result[0][1]["httponly"] == "" # Not set on first cookie + + # Second cookie (the unknown attribute) + assert result[1][0] == "unknownattr" + assert result[1][1].value == "ignored" + assert result[1][1]["httponly"] is True # HttpOnly applies to this cookie + + +def test_parse_cookie_headers_complex_real_world() -> None: + """Test parse_cookie_headers with complex real-world examples.""" + headers = [ + # AWS ELB cookie + "AWSELB=ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890; Path=/", + # Google Analytics + "_ga=GA1.2.1234567890.1234567890; Domain=.example.com; Path=/; Expires=Thu, 31-Dec-2025 23:59:59 GMT", + # Session with all attributes + "session_id=s%3AabcXYZ123.signature123; Path=/; Secure; HttpOnly; SameSite=Strict", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 3 + + # Check each cookie parsed correctly + assert result[0][0] == "AWSELB" + assert result[1][0] == "_ga" + assert result[2][0] == "session_id" + + # Session cookie should have all attributes + session_morsel = result[2][1] + assert session_morsel["secure"] is True + assert session_morsel["httponly"] is True + assert session_morsel.get("samesite") == "Strict" + + +def test_parse_cookie_headers_boolean_attrs() -> None: + """Test that boolean attributes (secure, httponly) work correctly.""" + # Test secure attribute variations + headers = [ + "cookie1=value1; Secure", + "cookie2=value2; Secure=", + "cookie3=value3; Secure=true", # Non-standard but might occur + ] + + result = parse_cookie_headers(headers) + assert len(result) == 3 + + # All should have secure=True + for name, morsel in result: + assert morsel.get("secure") is True, f"{name} should have secure=True" + + # Test httponly attribute variations + headers = [ + "cookie4=value4; HttpOnly", + "cookie5=value5; HttpOnly=", + ] + + result = parse_cookie_headers(headers) + assert len(result) == 2 + + # All should have httponly=True + for name, morsel in result: + assert morsel.get("httponly") is True, f"{name} should have httponly=True" + + +def test_parse_cookie_headers_boolean_attrs_with_partitioned() -> None: + """Test that boolean attributes including partitioned work correctly.""" + # Test secure attribute variations + secure_headers = [ + "cookie1=value1; Secure", + "cookie2=value2; Secure=", + "cookie3=value3; Secure=true", # Non-standard but might occur + ] + + result = parse_cookie_headers(secure_headers) + assert len(result) == 3 + for name, morsel in result: + assert morsel.get("secure") is True, f"{name} should have secure=True" + + # Test httponly attribute variations + httponly_headers = [ + "cookie4=value4; HttpOnly", + "cookie5=value5; HttpOnly=", + ] + + result = parse_cookie_headers(httponly_headers) + assert len(result) == 2 + for name, morsel in result: + assert morsel.get("httponly") is True, f"{name} should have httponly=True" + + # Test partitioned attribute variations + partitioned_headers = [ + "cookie6=value6; Partitioned", + "cookie7=value7; Partitioned=", + "cookie8=value8; Partitioned=yes", # Non-standard but might occur + ] + + result = parse_cookie_headers(partitioned_headers) + assert len(result) == 3 + for name, morsel in result: + assert morsel.get("partitioned") is True, f"{name} should have partitioned=True" + + +def test_parse_cookie_headers_encoded_values() -> None: + """Test that parse_cookie_headers preserves encoded values.""" + headers = [ + "encoded=hello%20world", + "url=https%3A%2F%2Fexample.com%2Fpath", + "special=%21%40%23%24%25%5E%26*%28%29", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 3 + # Values should be preserved as-is (not decoded) + assert result[0][1].value == "hello%20world" + assert result[1][1].value == "https%3A%2F%2Fexample.com%2Fpath" + assert result[2][1].value == "%21%40%23%24%25%5E%26*%28%29" + + +def test_parse_cookie_headers_partitioned() -> None: + """ + Test that parse_cookie_headers handles partitioned attribute correctly. + + This tests the fix for issue #10380 - partitioned cookies support. + The partitioned attribute is a boolean flag like secure and httponly. + """ + headers = [ + "cookie1=value1; Partitioned", + "cookie2=value2; Partitioned=", + "cookie3=value3; Partitioned=true", # Non-standard but might occur + "cookie4=value4; Secure; Partitioned; HttpOnly", + "cookie5=value5; Domain=.example.com; Path=/; Partitioned", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 5 + + # All cookies should have partitioned=True + for i, (name, morsel) in enumerate(result): + assert ( + morsel.get("partitioned") is True + ), f"Cookie {i+1} should have partitioned=True" + assert name == f"cookie{i+1}" + assert morsel.value == f"value{i+1}" + + # Cookie 4 should also have secure and httponly + assert result[3][1].get("secure") is True + assert result[3][1].get("httponly") is True + + # Cookie 5 should also have domain and path + assert result[4][1].get("domain") == ".example.com" + assert result[4][1].get("path") == "/" + + +def test_parse_cookie_headers_partitioned_case_insensitive() -> None: + """Test that partitioned attribute is recognized case-insensitively.""" + headers = [ + "cookie1=value1; partitioned", # lowercase + "cookie2=value2; PARTITIONED", # uppercase + "cookie3=value3; Partitioned", # title case + "cookie4=value4; PaRtItIoNeD", # mixed case + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 4 + + # All should be recognized as partitioned + for i, (_, morsel) in enumerate(result): + assert ( + morsel.get("partitioned") is True + ), f"Cookie {i+1} should have partitioned=True" + + +def test_parse_cookie_headers_partitioned_not_set() -> None: + """Test that cookies without partitioned attribute don't have it set.""" + headers = [ + "normal=value; Secure; HttpOnly", + "regular=cookie; Path=/", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 2 + + # Check that partitioned is not set (empty string is the default for flags in Morsel) + assert result[0][1].get("partitioned", "") == "" + assert result[1][1].get("partitioned", "") == "" + + +# Tests that don't require partitioned support in SimpleCookie +def test_parse_cookie_headers_partitioned_with_other_attrs_manual() -> None: + """ + Test parsing logic for partitioned cookies combined with all other attributes. + + This test verifies our parsing logic handles partitioned correctly as a boolean + attribute regardless of SimpleCookie support. + """ + # Test that our parser recognizes partitioned in _COOKIE_KNOWN_ATTRS and _COOKIE_BOOL_ATTRS + assert "partitioned" in helpers._COOKIE_KNOWN_ATTRS + assert "partitioned" in helpers._COOKIE_BOOL_ATTRS + + # Test a simple case that won't trigger SimpleCookie errors + headers = ["session=abc123; Secure; HttpOnly"] + result = parse_cookie_headers(headers) + + assert len(result) == 1 + assert result[0][0] == "session" + assert result[0][1]["secure"] is True + assert result[0][1]["httponly"] is True + + +def test_cookie_helpers_constants_include_partitioned() -> None: + """Test that cookie helper constants include partitioned attribute.""" + # Test our constants include partitioned + assert "partitioned" in helpers._COOKIE_KNOWN_ATTRS + assert "partitioned" in helpers._COOKIE_BOOL_ATTRS + + +@pytest.mark.parametrize( + "test_string", + [ + " Partitioned ", + " partitioned ", + " PARTITIONED ", + " Partitioned; ", + " Partitioned= ", + " Partitioned=true ", + ], +) +def test_cookie_pattern_matches_partitioned_attribute(test_string: str) -> None: + """Test that the cookie pattern regex matches various partitioned attribute formats.""" + pattern = helpers._COOKIE_PATTERN + match = pattern.match(test_string) + assert match is not None, f"Pattern should match '{test_string}'" + assert match.group("key").lower() == "partitioned" + + +def test_parse_cookie_headers_issue_7993_double_quotes() -> None: + """ + Test that cookies with unmatched opening quotes don't break parsing of subsequent cookies. + + This reproduces issue #7993 where a cookie containing an unmatched opening double quote + causes subsequent cookies to be silently dropped. + NOTE: This only fixes the specific case where a value starts with a quote but doesn't + end with one (e.g., 'cookie="value'). Other malformed quote cases still behave like + SimpleCookie for compatibility. + """ + # Test case from the issue + headers = ['foo=bar; baz="qux; foo2=bar2'] + + result = parse_cookie_headers(headers) + + # Should parse all cookies correctly + assert len(result) == 3 + assert result[0][0] == "foo" + assert result[0][1].value == "bar" + assert result[1][0] == "baz" + assert result[1][1].value == '"qux' # Unmatched quote included + assert result[2][0] == "foo2" + assert result[2][1].value == "bar2" + + +def test_parse_cookie_headers_empty_headers() -> None: + """Test handling of empty headers in the sequence.""" + # Empty header should be skipped + result = parse_cookie_headers(["", "name=value"]) + assert len(result) == 1 + assert result[0][0] == "name" + assert result[0][1].value == "value" + + # Multiple empty headers + result = parse_cookie_headers(["", "", ""]) + assert result == [] + + # Empty headers mixed with valid cookies + result = parse_cookie_headers(["", "a=1", "", "b=2", ""]) + assert len(result) == 2 + assert result[0][0] == "a" + assert result[1][0] == "b" + + +def test_parse_cookie_headers_invalid_cookie_syntax() -> None: + """Test handling of invalid cookie syntax.""" + # No valid cookie pattern + result = parse_cookie_headers(["@#$%^&*()"]) + assert result == [] + + # Cookie name without value + result = parse_cookie_headers(["name"]) + assert result == [] + + # Multiple invalid patterns + result = parse_cookie_headers(["!!!!", "????", "name", "@@@"]) + assert result == [] + + +def test_parse_cookie_headers_illegal_cookie_names( + caplog: pytest.LogCaptureFixture, +) -> None: + """ + Test that illegal cookie names are rejected. + + Note: When a known attribute name is used as a cookie name at the start, + parsing stops early (before any warning can be logged). Warnings are only + logged when illegal names appear after a valid cookie. + """ + # Cookie name that is a known attribute (illegal) - parsing stops early + result = parse_cookie_headers(["path=value; domain=test"]) + assert result == [] + + # Cookie name that doesn't match the pattern + result = parse_cookie_headers(["=value"]) + assert result == [] + + # Valid cookie after illegal one - parsing stops at illegal + result = parse_cookie_headers(["domain=bad; good=value"]) + assert result == [] + + # Illegal cookie name that appears after a valid cookie triggers warning + result = parse_cookie_headers(["good=value; Path=/; invalid,cookie=value;"]) + assert len(result) == 1 + assert result[0][0] == "good" + assert "Illegal cookie name 'invalid,cookie'" in caplog.text + + +def test_parse_cookie_headers_attributes_before_cookie() -> None: + """Test that attributes before any cookie are invalid.""" + # Path attribute before cookie + result = parse_cookie_headers(["Path=/; name=value"]) + assert result == [] + + # Domain attribute before cookie + result = parse_cookie_headers(["Domain=.example.com; name=value"]) + assert result == [] + + # Multiple attributes before cookie + result = parse_cookie_headers(["Path=/; Domain=.example.com; Secure; name=value"]) + assert result == [] + + +def test_parse_cookie_headers_attributes_without_values() -> None: + """Test handling of attributes with missing values.""" + # Boolean attribute without value (valid) + result = parse_cookie_headers(["name=value; Secure"]) + assert len(result) == 1 + assert result[0][1]["secure"] is True + + # Non-boolean attribute without value (invalid, stops parsing) + result = parse_cookie_headers(["name=value; Path"]) + assert len(result) == 1 + # Path without value stops further attribute parsing + + # Multiple cookies, invalid attribute in middle + result = parse_cookie_headers(["name=value; Path; Secure"]) + assert len(result) == 1 + # Secure is not parsed because Path without value stops parsing + + +def test_parse_cookie_headers_dollar_prefixed_names() -> None: + """Test handling of cookie names starting with $.""" + # $Version without preceding cookie (ignored) + result = parse_cookie_headers(["$Version=1; name=value"]) + assert len(result) == 1 + assert result[0][0] == "name" + + # Multiple $ prefixed without cookie (all ignored) + result = parse_cookie_headers(["$Version=1; $Path=/; $Domain=.com; name=value"]) + assert len(result) == 1 + assert result[0][0] == "name" + + # $ prefix at start is ignored, cookie follows + result = parse_cookie_headers(["$Unknown=123; valid=cookie"]) + assert len(result) == 1 + assert result[0][0] == "valid" + + +def test_parse_cookie_headers_dollar_attributes() -> None: + """Test handling of $ prefixed attributes after cookies.""" + # Test multiple $ attributes with cookie (case-insensitive like SimpleCookie) + result = parse_cookie_headers(["name=value; $Path=/test; $Domain=.example.com"]) + assert len(result) == 1 + assert result[0][0] == "name" + assert result[0][1]["path"] == "/test" + assert result[0][1]["domain"] == ".example.com" + + # Test unknown $ attribute (should be ignored) + result = parse_cookie_headers(["name=value; $Unknown=test"]) + assert len(result) == 1 + assert result[0][0] == "name" + # $Unknown should not be set + + # Test $ attribute with empty value + result = parse_cookie_headers(["name=value; $Path="]) + assert len(result) == 1 + assert result[0][1]["path"] == "" + + # Test case sensitivity compatibility with SimpleCookie + result = parse_cookie_headers(["test=value; $path=/lower; $PATH=/upper"]) + assert len(result) == 1 + # Last one wins, and it's case-insensitive + assert result[0][1]["path"] == "/upper" + + +def test_parse_cookie_headers_attributes_after_illegal_cookie() -> None: + """ + Test that attributes after an illegal cookie name are handled correctly. + + This covers the branches where current_morsel is None because an illegal + cookie name was encountered. + """ + # Illegal cookie followed by $ attribute + result = parse_cookie_headers(["good=value; invalid,cookie=bad; $Path=/test"]) + assert len(result) == 1 + assert result[0][0] == "good" + # $Path should be ignored since current_morsel is None after illegal cookie + + # Illegal cookie followed by boolean attribute + result = parse_cookie_headers(["good=value; invalid,cookie=bad; HttpOnly"]) + assert len(result) == 1 + assert result[0][0] == "good" + # HttpOnly should be ignored since current_morsel is None + + # Illegal cookie followed by regular attribute with value + result = parse_cookie_headers(["good=value; invalid,cookie=bad; Max-Age=3600"]) + assert len(result) == 1 + assert result[0][0] == "good" + # Max-Age should be ignored since current_morsel is None + + # Multiple attributes after illegal cookie + result = parse_cookie_headers( + ["good=value; invalid,cookie=bad; $Path=/; HttpOnly; Max-Age=60; Domain=.com"] + ) + assert len(result) == 1 + assert result[0][0] == "good" + # All attributes should be ignored after illegal cookie + + +def test_parse_cookie_headers_unmatched_quotes_compatibility() -> None: + """ + Test that most unmatched quote scenarios behave like SimpleCookie. + + For compatibility, we only handle the specific case of unmatched opening quotes + (e.g., 'cookie="value'). Other cases behave the same as SimpleCookie. + """ + # Cases that SimpleCookie and our parser both fail to parse completely + incompatible_cases = [ + 'cookie1=val"ue; cookie2=value2', # codespell:ignore + 'cookie1=value"; cookie2=value2', + 'cookie1=va"l"ue"; cookie2=value2', # codespell:ignore + 'cookie1=value1; cookie2=val"ue; cookie3=value3', # codespell:ignore + ] + + for header in incompatible_cases: + # Test SimpleCookie behavior + sc = SimpleCookie() + sc.load(header) + sc_cookies = list(sc.items()) + + # Test our parser behavior + result = parse_cookie_headers([header]) + + # Both should parse the same cookies (partial parsing) + assert len(result) == len(sc_cookies), ( + f"Header: {header}\n" + f"SimpleCookie parsed: {len(sc_cookies)} cookies\n" + f"Our parser parsed: {len(result)} cookies" + ) + + # The case we specifically fix (unmatched opening quote) + fixed_case = 'cookie1=value1; cookie2="unmatched; cookie3=value3' + + # SimpleCookie fails to parse cookie3 + sc = SimpleCookie() + sc.load(fixed_case) + assert len(sc) == 1 # Only cookie1 + + # Our parser handles it better + result = parse_cookie_headers([fixed_case]) + assert len(result) == 3 # All three cookies + assert result[0][0] == "cookie1" + assert result[0][1].value == "value1" + assert result[1][0] == "cookie2" + assert result[1][1].value == '"unmatched' + assert result[2][0] == "cookie3" + assert result[2][1].value == "value3" + + +def test_parse_cookie_headers_expires_attribute() -> None: + """Test parse_cookie_headers handles expires attribute with date formats.""" + headers = [ + "session=abc; Expires=Wed, 09 Jun 2021 10:18:14 GMT", + "user=xyz; expires=Wednesday, 09-Jun-21 10:18:14 GMT", + "token=123; EXPIRES=Wed, 09 Jun 2021 10:18:14 GMT", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 3 + for _, morsel in result: + assert "expires" in morsel + assert "GMT" in morsel["expires"] + + +def test_parse_cookie_headers_edge_cases() -> None: + """Test various edge cases.""" + # Very long cookie values + long_value = "x" * 4096 + result = parse_cookie_headers([f"name={long_value}"]) + assert len(result) == 1 + assert result[0][1].value == long_value + + +def test_parse_cookie_headers_various_date_formats_issue_4327() -> None: + """ + Test that parse_cookie_headers handles various date formats per RFC 6265. + + This tests the fix for issue #4327 - support for RFC 822, RFC 850, + and ANSI C asctime() date formats in cookie expiration. + """ + # Test various date formats + headers = [ + # RFC 822 format (preferred format) + "cookie1=value1; Expires=Wed, 09 Jun 2021 10:18:14 GMT", + # RFC 850 format (obsolete but still used) + "cookie2=value2; Expires=Wednesday, 09-Jun-21 10:18:14 GMT", + # RFC 822 with dashes + "cookie3=value3; Expires=Wed, 09-Jun-2021 10:18:14 GMT", + # ANSI C asctime() format (aiohttp extension - not supported by SimpleCookie) + "cookie4=value4; Expires=Wed Jun 9 10:18:14 2021", + # Various other formats seen in the wild + "cookie5=value5; Expires=Thu, 01 Jan 2030 00:00:00 GMT", + "cookie6=value6; Expires=Mon, 31-Dec-99 23:59:59 GMT", + "cookie7=value7; Expires=Tue, 01-Jan-30 00:00:00 GMT", + ] + + result = parse_cookie_headers(headers) + + # All cookies should be parsed + assert len(result) == 7 + + # Check each cookie was parsed with its expires attribute + expected_cookies = [ + ("cookie1", "value1", "Wed, 09 Jun 2021 10:18:14 GMT"), + ("cookie2", "value2", "Wednesday, 09-Jun-21 10:18:14 GMT"), + ("cookie3", "value3", "Wed, 09-Jun-2021 10:18:14 GMT"), + ("cookie4", "value4", "Wed Jun 9 10:18:14 2021"), + ("cookie5", "value5", "Thu, 01 Jan 2030 00:00:00 GMT"), + ("cookie6", "value6", "Mon, 31-Dec-99 23:59:59 GMT"), + ("cookie7", "value7", "Tue, 01-Jan-30 00:00:00 GMT"), + ] + + for (name, morsel), (exp_name, exp_value, exp_expires) in zip( + result, expected_cookies + ): + assert name == exp_name + assert morsel.value == exp_value + assert morsel.get("expires") == exp_expires + + +def test_parse_cookie_headers_ansi_c_asctime_format() -> None: + """ + Test parsing of ANSI C asctime() format. + + This tests support for ANSI C asctime() format (e.g., "Wed Jun 9 10:18:14 2021"). + NOTE: This is an aiohttp extension - SimpleCookie does NOT support this format. + """ + headers = ["cookie1=value1; Expires=Wed Jun 9 10:18:14 2021"] + + result = parse_cookie_headers(headers) + + # Should parse correctly with the expires attribute preserved + assert len(result) == 1 + assert result[0][0] == "cookie1" + assert result[0][1].value == "value1" + assert result[0][1]["expires"] == "Wed Jun 9 10:18:14 2021" + + +def test_parse_cookie_headers_rfc2822_timezone_issue_4493() -> None: + """ + Test that parse_cookie_headers handles RFC 2822 timezone formats. + + This tests the fix for issue #4493 - support for RFC 2822-compliant dates + with timezone offsets like -0000, +0100, etc. + NOTE: This is an aiohttp extension - SimpleCookie does NOT support this format. + """ + headers = [ + # RFC 2822 with -0000 timezone (common in some APIs) + "hello=world; expires=Wed, 15 Jan 2020 09:45:07 -0000", + # RFC 2822 with positive offset + "session=abc123; expires=Thu, 01 Feb 2024 14:30:00 +0100", + # RFC 2822 with negative offset + "token=xyz789; expires=Fri, 02 Mar 2025 08:15:30 -0500", + # Standard GMT for comparison + "classic=cookie; expires=Sat, 03 Apr 2026 12:00:00 GMT", + ] + + result = parse_cookie_headers(headers) + + # All cookies should be parsed + assert len(result) == 4 + + # Check each cookie was parsed with its expires attribute + assert result[0][0] == "hello" + assert result[0][1].value == "world" + assert result[0][1]["expires"] == "Wed, 15 Jan 2020 09:45:07 -0000" + + assert result[1][0] == "session" + assert result[1][1].value == "abc123" + assert result[1][1]["expires"] == "Thu, 01 Feb 2024 14:30:00 +0100" + + assert result[2][0] == "token" + assert result[2][1].value == "xyz789" + assert result[2][1]["expires"] == "Fri, 02 Mar 2025 08:15:30 -0500" + + assert result[3][0] == "classic" + assert result[3][1].value == "cookie" + assert result[3][1]["expires"] == "Sat, 03 Apr 2026 12:00:00 GMT" + + +def test_parse_cookie_headers_rfc2822_with_attributes() -> None: + """Test that RFC 2822 dates work correctly with other cookie attributes.""" + headers = [ + "session=abc123; expires=Wed, 15 Jan 2020 09:45:07 -0000; Path=/; HttpOnly; Secure", + "token=xyz789; expires=Thu, 01 Feb 2024 14:30:00 +0100; Domain=.example.com; SameSite=Strict", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 2 + + # First cookie + assert result[0][0] == "session" + assert result[0][1].value == "abc123" + assert result[0][1]["expires"] == "Wed, 15 Jan 2020 09:45:07 -0000" + assert result[0][1]["path"] == "/" + assert result[0][1]["httponly"] is True + assert result[0][1]["secure"] is True + + # Second cookie + assert result[1][0] == "token" + assert result[1][1].value == "xyz789" + assert result[1][1]["expires"] == "Thu, 01 Feb 2024 14:30:00 +0100" + assert result[1][1]["domain"] == ".example.com" + assert result[1][1]["samesite"] == "Strict" + + +def test_parse_cookie_headers_date_formats_with_attributes() -> None: + """Test that date formats work correctly with other cookie attributes.""" + headers = [ + "session=abc123; Expires=Wed, 09 Jun 2030 10:18:14 GMT; Path=/; HttpOnly; Secure", + "token=xyz789; Expires=Wednesday, 09-Jun-30 10:18:14 GMT; Domain=.example.com; SameSite=Strict", + ] + + result = parse_cookie_headers(headers) + + assert len(result) == 2 + + # First cookie + assert result[0][0] == "session" + assert result[0][1].value == "abc123" + assert result[0][1]["expires"] == "Wed, 09 Jun 2030 10:18:14 GMT" + assert result[0][1]["path"] == "/" + assert result[0][1]["httponly"] is True + assert result[0][1]["secure"] is True + + # Second cookie + assert result[1][0] == "token" + assert result[1][1].value == "xyz789" + assert result[1][1]["expires"] == "Wednesday, 09-Jun-30 10:18:14 GMT" + assert result[1][1]["domain"] == ".example.com" + assert result[1][1]["samesite"] == "Strict" diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index e1b6e351e3d..15557085b4e 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -371,6 +371,8 @@ async def test_domain_filter_ip_cookie_receive(cookies_to_receive) -> None: ("custom-cookie=value/one;", 'Cookie: custom-cookie="value/one"', True), ("custom-cookie=value1;", "Cookie: custom-cookie=value1", True), ("custom-cookie=value/one;", "Cookie: custom-cookie=value/one", False), + ('foo="quoted_value"', 'Cookie: foo="quoted_value"', True), + ('foo="quoted_value"; domain=127.0.0.1', 'Cookie: foo="quoted_value"', True), ], ids=( "IP domain preserved", @@ -378,6 +380,8 @@ async def test_domain_filter_ip_cookie_receive(cookies_to_receive) -> None: "quoted cookie with special char", "quoted cookie w/o special char", "unquoted cookie with special char", + "pre-quoted cookie", + "pre-quoted cookie with domain", ), ) async def test_quotes_correctly_based_on_input( @@ -1225,7 +1229,7 @@ async def test_update_cookies_from_headers_duplicate_names() -> None: url: URL = URL("http://www.example.com/") # Headers with duplicate names but different domains - headers: List[str] = [ + headers = [ "session-id=123456; Domain=.example.com; Path=/", "session-id=789012; Domain=.www.example.com; Path=/", "user-pref=light; Domain=.example.com", @@ -1255,11 +1259,10 @@ async def test_update_cookies_from_headers_invalid_cookies( url: URL = URL("http://example.com/") # Mix of valid and invalid cookies - headers: List[str] = [ + headers = [ "valid-cookie=value123", - "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=" - "{925EC0B8-CB17-4BEB-8A35-1033813B0523}; " - "HttpOnly; Path=/", # This cookie with curly braces causes CookieError + "invalid,cookie=value; " # Comma character is not allowed + "HttpOnly; Path=/", "another-valid=value456", ] @@ -1268,7 +1271,7 @@ async def test_update_cookies_from_headers_invalid_cookies( jar.update_cookies_from_headers(headers, url) # Check that we logged warnings for invalid cookies - assert "Can not load response cookies" in caplog.text + assert "Can not load cookies" in caplog.text # Valid cookies should still be added assert len(jar) >= 2 # At least the two clearly valid cookies @@ -1277,6 +1280,52 @@ async def test_update_cookies_from_headers_invalid_cookies( assert "another-valid" in filtered +async def test_update_cookies_from_headers_with_curly_braces() -> None: + """Test that cookies with curly braces in names are now accepted (#2683).""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/") + + # Cookie names with curly braces should now be accepted + headers = [ + "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=" + "{925EC0B8-CB17-4BEB-8A35-1033813B0523}; " + "HttpOnly; Path=/", + "regular-cookie=value123", + ] + + jar.update_cookies_from_headers(headers, url) + + # Both cookies should be added + assert len(jar) == 2 + filtered: BaseCookie[str] = jar.filter_cookies(url) + assert "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}" in filtered + assert "regular-cookie" in filtered + + +async def test_update_cookies_from_headers_with_special_chars() -> None: + """Test that cookies with various special characters are accepted.""" + jar: CookieJar = CookieJar() + url: URL = URL("http://example.com/") + + # Various special characters that should now be accepted + headers = [ + "cookie_with_parens=(value)=test123", + "cookie-with-brackets[index]=value456", + "cookie@with@at=value789", + "cookie:with:colons=value000", + ] + + jar.update_cookies_from_headers(headers, url) + + # All cookies should be added + assert len(jar) == 4 + filtered: BaseCookie[str] = jar.filter_cookies(url) + assert "cookie_with_parens" in filtered + assert "cookie-with-brackets[index]" in filtered + assert "cookie@with@at" in filtered + assert "cookie:with:colons" in filtered + + async def test_update_cookies_from_headers_empty_list() -> None: """Test that empty header list is handled gracefully.""" jar: CookieJar = CookieJar() @@ -1293,7 +1342,7 @@ async def test_update_cookies_from_headers_with_attributes() -> None: jar: CookieJar = CookieJar() url: URL = URL("https://secure.example.com/app/page") - headers: List[str] = [ + headers = [ "secure-cookie=value1; Secure; HttpOnly; SameSite=Strict", "expiring-cookie=value2; Max-Age=3600; Path=/app", "domain-cookie=value3; Domain=.example.com; Path=/", @@ -1348,7 +1397,7 @@ async def test_update_cookies_from_headers_preserves_existing() -> None: ) # Add more cookies via headers - headers: List[str] = [ + headers = [ "new-cookie1=value3", "new-cookie2=value4", ] @@ -1373,7 +1422,7 @@ async def test_update_cookies_from_headers_overwrites_same_cookie() -> None: jar.update_cookies({"session": "old-value"}, url) # Update with new value via headers - headers: List[str] = ["session=new-value"] + headers = ["session=new-value"] jar.update_cookies_from_headers(headers, url) # Should still have just 1 cookie with updated value @@ -1387,7 +1436,7 @@ async def test_dummy_cookie_jar_update_cookies_from_headers() -> None: jar: DummyCookieJar = DummyCookieJar() url: URL = URL("http://example.com/") - headers: List[str] = [ + headers = [ "cookie1=value1", "cookie2=value2", ] @@ -1398,3 +1447,159 @@ async def test_dummy_cookie_jar_update_cookies_from_headers() -> None: assert len(jar) == 0 filtered: BaseCookie[str] = jar.filter_cookies(url) assert len(filtered) == 0 + + +async def test_shared_cookie_cache_population() -> None: + """Test that shared cookies are cached correctly.""" + jar = CookieJar(unsafe=True) + + # Create a shared cookie (no domain/path restrictions) + sc = SimpleCookie() + sc["shared"] = "value" + sc["shared"]["path"] = "/" # Will be stripped to "" + + # Update with empty URL to avoid domain being set + jar.update_cookies(sc, URL()) + + # Verify cookie is stored at shared key + assert ("", "") in jar._cookies + assert "shared" in jar._cookies[("", "")] + + # Filter cookies to populate cache + filtered = jar.filter_cookies(URL("http://example.com/")) + assert "shared" in filtered + assert filtered["shared"].value == "value" + + # Verify cache was populated + assert ("", "") in jar._morsel_cache + assert "shared" in jar._morsel_cache[("", "")] + + # Verify the cached morsel is the same one returned + cached_morsel = jar._morsel_cache[("", "")]["shared"] + assert cached_morsel is filtered["shared"] + + +async def test_shared_cookie_cache_clearing_on_update() -> None: + """Test that shared cookie cache is cleared when cookie is updated.""" + jar = CookieJar(unsafe=True) + + # Create initial shared cookie + sc = SimpleCookie() + sc["shared"] = "value1" + sc["shared"]["path"] = "/" + jar.update_cookies(sc, URL()) + + # Filter to populate cache + filtered1 = jar.filter_cookies(URL("http://example.com/")) + assert filtered1["shared"].value == "value1" + assert "shared" in jar._morsel_cache[("", "")] + + # Update the cookie with new value + sc2 = SimpleCookie() + sc2["shared"] = "value2" + sc2["shared"]["path"] = "/" + jar.update_cookies(sc2, URL()) + + # Verify cache was cleared + assert "shared" not in jar._morsel_cache[("", "")] + + # Filter again to verify new value + filtered2 = jar.filter_cookies(URL("http://example.com/")) + assert filtered2["shared"].value == "value2" + + # Verify cache was repopulated with new value + assert "shared" in jar._morsel_cache[("", "")] + + +async def test_shared_cookie_cache_clearing_on_delete() -> None: + """Test that shared cookie cache is cleared when cookies are deleted.""" + jar = CookieJar(unsafe=True) + + # Create multiple shared cookies + sc = SimpleCookie() + sc["shared1"] = "value1" + sc["shared1"]["path"] = "/" + sc["shared2"] = "value2" + sc["shared2"]["path"] = "/" + jar.update_cookies(sc, URL()) + + # Filter to populate cache + jar.filter_cookies(URL("http://example.com/")) + assert "shared1" in jar._morsel_cache[("", "")] + assert "shared2" in jar._morsel_cache[("", "")] + + # Delete one cookie using internal method + jar._delete_cookies([("", "", "shared1")]) + + # Verify cookie and its cache entry were removed + assert "shared1" not in jar._cookies[("", "")] + assert "shared1" not in jar._morsel_cache[("", "")] + + # Verify other cookie remains + assert "shared2" in jar._cookies[("", "")] + assert "shared2" in jar._morsel_cache[("", "")] + + +async def test_shared_cookie_cache_clearing_on_clear() -> None: + """Test that shared cookie cache is cleared when jar is cleared.""" + jar = CookieJar(unsafe=True) + + # Create shared and domain-specific cookies + # Shared cookie + sc1 = SimpleCookie() + sc1["shared"] = "shared_value" + sc1["shared"]["path"] = "/" + jar.update_cookies(sc1, URL()) + + # Domain-specific cookie + sc2 = SimpleCookie() + sc2["domain_cookie"] = "domain_value" + jar.update_cookies(sc2, URL("http://example.com/")) + + # Filter to populate caches + jar.filter_cookies(URL("http://example.com/")) + + # Verify caches are populated + assert ("", "") in jar._morsel_cache + assert "shared" in jar._morsel_cache[("", "")] + assert ("example.com", "") in jar._morsel_cache + assert "domain_cookie" in jar._morsel_cache[("example.com", "")] + + # Clear all cookies + jar.clear() + + # Verify all caches are cleared + assert len(jar._morsel_cache) == 0 + assert len(jar._cookies) == 0 + + # Verify filtering returns no cookies + filtered = jar.filter_cookies(URL("http://example.com/")) + assert len(filtered) == 0 + + +async def test_shared_cookie_with_multiple_domains() -> None: + """Test that shared cookies work across different domains.""" + jar = CookieJar(unsafe=True) + + # Create a truly shared cookie + sc = SimpleCookie() + sc["universal"] = "everywhere" + sc["universal"]["path"] = "/" + jar.update_cookies(sc, URL()) + + # Test filtering for different domains + domains = [ + "http://example.com/", + "http://test.org/", + "http://localhost/", + "http://192.168.1.1/", # IP address (requires unsafe=True) + ] + + for domain_url in domains: + filtered = jar.filter_cookies(URL(domain_url)) + assert "universal" in filtered + assert filtered["universal"].value == "everywhere" + + # Verify cache is reused efficiently + assert ("", "") in jar._morsel_cache + assert "universal" in jar._morsel_cache[("", "")] diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 6c9e3826d73..758b8b1f98a 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -303,6 +303,157 @@ def test_request_cookie__set_item() -> None: req.cookies["my"] = "value" +def test_request_cookies_with_special_characters() -> None: + """Test that cookies with special characters in names are accepted. + + This tests the fix for issue #2683 where cookies with special characters + like {, }, / in their names would cause a 500 error. The fix makes the + cookie parser more tolerant to handle real-world cookies. + """ + # Test cookie names with curly braces (e.g., ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}) + headers = CIMultiDict(COOKIE="{test}=value1; normal=value2") + req = make_mocked_request("GET", "/", headers=headers) + # Both cookies should be parsed successfully + assert req.cookies == {"{test}": "value1", "normal": "value2"} + + # Test cookie names with forward slash + headers = CIMultiDict(COOKIE="test/name=value1; valid=value2") + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"test/name": "value1", "valid": "value2"} + + # Test cookie names with various special characters + headers = CIMultiDict( + COOKIE="test{foo}bar=value1; test/path=value2; normal_cookie=value3" + ) + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == { + "test{foo}bar": "value1", + "test/path": "value2", + "normal_cookie": "value3", + } + + +def test_request_cookies_real_world_examples() -> None: + """Test handling of real-world cookie examples from issue #2683.""" + # Example from the issue: ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E} + headers = CIMultiDict( + COOKIE="ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}=val1; normal_cookie=val2" + ) + req = make_mocked_request("GET", "/", headers=headers) + # All cookies should be parsed successfully + assert req.cookies == { + "ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}": "val1", + "normal_cookie": "val2", + } + + # Multiple cookies with special characters + headers = CIMultiDict( + COOKIE="{cookie1}=val1; cookie/2=val2; cookie[3]=val3; cookie(4)=val4" + ) + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == { + "{cookie1}": "val1", + "cookie/2": "val2", + "cookie[3]": "val3", + "cookie(4)": "val4", + } + + +def test_request_cookies_edge_cases() -> None: + """Test edge cases for cookie parsing.""" + # Empty cookie value + headers = CIMultiDict(COOKIE="test=; normal=value") + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"test": "", "normal": "value"} + + # Cookie with quoted value + headers = CIMultiDict(COOKIE='test="quoted value"; normal=unquoted') + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"test": "quoted value", "normal": "unquoted"} + + +def test_request_cookies_no_500_error() -> None: + """Test that cookies with special characters don't cause 500 errors. + + This specifically tests that issue #2683 is fixed - previously cookies + with characters like { } would cause CookieError and 500 responses. + """ + # This cookie format previously caused 500 errors + headers = CIMultiDict(COOKIE="ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}=test") + + # Should not raise any exception when accessing cookies + req = make_mocked_request("GET", "/", headers=headers) + cookies = req.cookies # This used to raise CookieError + + # Verify the cookie was parsed successfully + assert "ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}" in cookies + assert cookies["ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}"] == "test" + + +def test_request_cookies_quoted_values() -> None: + """Test that quoted cookie values are handled consistently. + + This tests the fix for issue #5397 where quoted cookie values were + handled inconsistently based on whether domain attributes were present. + The new parser should always unquote cookie values consistently. + """ + # Test simple quoted cookie value + headers = CIMultiDict(COOKIE='sess="quoted_value"') + req = make_mocked_request("GET", "/", headers=headers) + # Quotes should be removed consistently + assert req.cookies == {"sess": "quoted_value"} + + # Test quoted cookie with semicolon in value + headers = CIMultiDict(COOKIE='data="value;with;semicolons"') + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"data": "value;with;semicolons"} + + # Test mixed quoted and unquoted cookies + headers = CIMultiDict( + COOKIE='quoted="value1"; unquoted=value2; also_quoted="value3"' + ) + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == { + "quoted": "value1", + "unquoted": "value2", + "also_quoted": "value3", + } + + # Test escaped quotes in cookie value + headers = CIMultiDict(COOKIE=r'escaped="value with \" quote"') + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"escaped": 'value with " quote'} + + # Test empty quoted value + headers = CIMultiDict(COOKIE='empty=""') + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"empty": ""} + + +def test_request_cookies_with_attributes() -> None: + """Test that cookie attributes don't affect value parsing. + + Related to issue #5397 - ensures that the presence of domain or other + attributes doesn't change how cookie values are parsed. + """ + # Cookie with domain attribute - quotes should still be removed + headers = CIMultiDict(COOKIE='sess="quoted_value"; Domain=.example.com') + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"sess": "quoted_value"} + + # Cookie with multiple attributes + headers = CIMultiDict(COOKIE='token="abc123"; Path=/; Secure; HttpOnly') + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"token": "abc123"} + + # Multiple cookies with different attributes + headers = CIMultiDict( + COOKIE='c1="v1"; Domain=.example.com; c2="v2"; Path=/api; c3=v3; Secure' + ) + req = make_mocked_request("GET", "/", headers=headers) + assert req.cookies == {"c1": "v1", "c2": "v2", "c3": "v3"} + + def test_match_info() -> None: req = make_mocked_request("GET", "/") assert req._match_info is req.match_info From 5facb3d805fa71381efcd452a1bca2ec7a4fd3fa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 2 Jun 2025 10:28:41 +0100 Subject: [PATCH 1483/1511] Release 3.12.7rc0 (#11119) --- CHANGES.rst | 62 ++++++++++++++++++++++++++++++++++++++++ CHANGES/11105.bugfix.rst | 10 ------- CHANGES/11106.bugfix.rst | 1 - CHANGES/11107.misc.rst | 1 - CHANGES/11112.bugfix.rst | 8 ------ CHANGES/11114.misc.rst | 1 - CHANGES/2683.bugfix.rst | 1 - CHANGES/4486.bugfix.rst | 1 - CHANGES/5397.bugfix.rst | 1 - CHANGES/7993.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 11 files changed, 63 insertions(+), 26 deletions(-) delete mode 100644 CHANGES/11105.bugfix.rst delete mode 120000 CHANGES/11106.bugfix.rst delete mode 100644 CHANGES/11107.misc.rst delete mode 100644 CHANGES/11112.bugfix.rst delete mode 100644 CHANGES/11114.misc.rst delete mode 120000 CHANGES/2683.bugfix.rst delete mode 120000 CHANGES/4486.bugfix.rst delete mode 120000 CHANGES/5397.bugfix.rst delete mode 120000 CHANGES/7993.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 0e10454a3d1..b08deb3942c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,68 @@ .. towncrier release notes start +3.12.7rc0 (2025-06-02) +====================== + +Bug fixes +--------- + +- Fixed cookie parsing to be more lenient when handling cookies with special characters + in names or values. Cookies with characters like ``{``, ``}``, and ``/`` in names are now + accepted instead of causing a :exc:`~http.cookies.CookieError` and 500 errors. Additionally, + cookies with mismatched quotes in values are now parsed correctly, and quoted cookie + values are now handled consistently whether or not they include special attributes + like ``Domain``. Also fixed :class:`~aiohttp.CookieJar` to ensure shared cookies (domain="", path="") + respect the ``quote_cookie`` parameter, making cookie quoting behavior consistent for + all cookies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`2683`, :issue:`5397`, :issue:`7993`, :issue:`11112`. + + + +- Fixed an issue where cookies with duplicate names but different domains or paths + were lost when updating the cookie jar. The :class:`~aiohttp.ClientSession` + cookie jar now correctly stores all cookies even if they have the same name but + different domain or path, following the :rfc:`6265#section-5.3` storage model -- by :user:`bdraco`. + + Note that :attr:`ClientResponse.cookies <aiohttp.ClientResponse.cookies>` returns + a :class:`~http.cookies.SimpleCookie` which uses the cookie name as a key, so + only the last cookie with each name is accessible via this interface. All cookies + can be accessed via :meth:`ClientResponse.headers.getall('Set-Cookie') + <multidict.MultiDictProxy.getall>` if needed. + + + *Related issues and pull requests on GitHub:* + :issue:`4486`, :issue:`11105`, :issue:`11106`. + + + + +Miscellaneous internal changes +------------------------------ + +- Avoided creating closed futures in ``ResponseHandler`` that will never be awaited -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11107`. + + + +- Downgraded the logging level for connector close errors from ERROR to DEBUG, as these are expected behavior with TLS 1.3 connections -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11114`. + + + + +---- + + 3.12.6 (2025-05-31) =================== diff --git a/CHANGES/11105.bugfix.rst b/CHANGES/11105.bugfix.rst deleted file mode 100644 index 33578aa7a95..00000000000 --- a/CHANGES/11105.bugfix.rst +++ /dev/null @@ -1,10 +0,0 @@ -Fixed an issue where cookies with duplicate names but different domains or paths -were lost when updating the cookie jar. The :class:`~aiohttp.ClientSession` -cookie jar now correctly stores all cookies even if they have the same name but -different domain or path, following the :rfc:`6265#section-5.3` storage model -- by :user:`bdraco`. - -Note that :attr:`ClientResponse.cookies <aiohttp.ClientResponse.cookies>` returns -a :class:`~http.cookies.SimpleCookie` which uses the cookie name as a key, so -only the last cookie with each name is accessible via this interface. All cookies -can be accessed via :meth:`ClientResponse.headers.getall('Set-Cookie') -<multidict.MultiDictProxy.getall>` if needed. diff --git a/CHANGES/11106.bugfix.rst b/CHANGES/11106.bugfix.rst deleted file mode 120000 index 3e5efb0f3f3..00000000000 --- a/CHANGES/11106.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11105.bugfix.rst \ No newline at end of file diff --git a/CHANGES/11107.misc.rst b/CHANGES/11107.misc.rst deleted file mode 100644 index 37ac4622bd9..00000000000 --- a/CHANGES/11107.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Avoided creating closed futures in ``ResponseHandler`` that will never be awaited -- by :user:`bdraco`. diff --git a/CHANGES/11112.bugfix.rst b/CHANGES/11112.bugfix.rst deleted file mode 100644 index 6edea1c9b23..00000000000 --- a/CHANGES/11112.bugfix.rst +++ /dev/null @@ -1,8 +0,0 @@ -Fixed cookie parsing to be more lenient when handling cookies with special characters -in names or values. Cookies with characters like ``{``, ``}``, and ``/`` in names are now -accepted instead of causing a :exc:`~http.cookies.CookieError` and 500 errors. Additionally, -cookies with mismatched quotes in values are now parsed correctly, and quoted cookie -values are now handled consistently whether or not they include special attributes -like ``Domain``. Also fixed :class:`~aiohttp.CookieJar` to ensure shared cookies (domain="", path="") -respect the ``quote_cookie`` parameter, making cookie quoting behavior consistent for -all cookies -- by :user:`bdraco`. diff --git a/CHANGES/11114.misc.rst b/CHANGES/11114.misc.rst deleted file mode 100644 index 2fcb1468c67..00000000000 --- a/CHANGES/11114.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Downgraded the logging level for connector close errors from ERROR to DEBUG, as these are expected behavior with TLS 1.3 connections -- by :user:`bdraco`. diff --git a/CHANGES/2683.bugfix.rst b/CHANGES/2683.bugfix.rst deleted file mode 120000 index fac3861027d..00000000000 --- a/CHANGES/2683.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11112.bugfix.rst \ No newline at end of file diff --git a/CHANGES/4486.bugfix.rst b/CHANGES/4486.bugfix.rst deleted file mode 120000 index 3e5efb0f3f3..00000000000 --- a/CHANGES/4486.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11105.bugfix.rst \ No newline at end of file diff --git a/CHANGES/5397.bugfix.rst b/CHANGES/5397.bugfix.rst deleted file mode 120000 index fac3861027d..00000000000 --- a/CHANGES/5397.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11112.bugfix.rst \ No newline at end of file diff --git a/CHANGES/7993.bugfix.rst b/CHANGES/7993.bugfix.rst deleted file mode 120000 index fac3861027d..00000000000 --- a/CHANGES/7993.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11112.bugfix.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 78f22b4051f..b1e029241d7 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.7.dev0" +__version__ = "3.12.7rc0" from typing import TYPE_CHECKING, Tuple From 80bb38fae175ff3fc7b47b94b5150b069cfbf6b6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 2 Jun 2025 16:45:44 +0100 Subject: [PATCH 1484/1511] Release 3.12.7 (#11120) --- CHANGES.rst | 4 ++-- aiohttp/__init__.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index b08deb3942c..867cd041a55 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,8 +10,8 @@ .. towncrier release notes start -3.12.7rc0 (2025-06-02) -====================== +3.12.7 (2025-06-02) +=================== Bug fixes --------- diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index b1e029241d7..d4ba8ccb488 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.7rc0" +__version__ = "3.12.7" from typing import TYPE_CHECKING, Tuple From 1b9a3c638c219d42a6c0a4f903bf6fdd5ea8c9c1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 2 Jun 2025 22:42:22 +0100 Subject: [PATCH 1485/1511] Increment version to 3.12.8.dev0 (#11122) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index d4ba8ccb488..28981917a0e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.7" +__version__ = "3.12.8.dev0" from typing import TYPE_CHECKING, Tuple From 278fc1ea083e856722c424806653abf567ed6c73 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Jun 2025 09:51:53 +0100 Subject: [PATCH 1486/1511] [PR #11129/c0449bb5 backport][3.12] Add preemptive authentication support to DigestAuthMiddleware (#11131) Co-authored-by: J. Nick Koston <nick@koston.org> Fixes #11128 --- CHANGES/11128.feature.rst | 9 + CHANGES/11129.feature.rst | 1 + aiohttp/client_middleware_digest_auth.py | 62 ++- docs/client_reference.rst | 34 +- tests/test_client_middleware_digest_auth.py | 421 ++++++++++++++++++++ 5 files changed, 523 insertions(+), 4 deletions(-) create mode 100644 CHANGES/11128.feature.rst create mode 120000 CHANGES/11129.feature.rst diff --git a/CHANGES/11128.feature.rst b/CHANGES/11128.feature.rst new file mode 100644 index 00000000000..0f99d2b8a11 --- /dev/null +++ b/CHANGES/11128.feature.rst @@ -0,0 +1,9 @@ +Added preemptive digest authentication to :class:`~aiohttp.DigestAuthMiddleware` -- by :user:`bdraco`. + +The middleware now reuses authentication credentials for subsequent requests to the same +protection space, improving efficiency by avoiding extra authentication round trips. +This behavior matches how web browsers handle digest authentication and follows +:rfc:`7616#section-3.6`. + +Preemptive authentication is enabled by default but can be disabled by passing +``preemptive=False`` to the middleware constructor. diff --git a/CHANGES/11129.feature.rst b/CHANGES/11129.feature.rst new file mode 120000 index 00000000000..692d28ba9ce --- /dev/null +++ b/CHANGES/11129.feature.rst @@ -0,0 +1 @@ +11128.feature.rst \ No newline at end of file diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index b2daf76e6bb..35f462f180b 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -38,6 +38,8 @@ class DigestAuthChallenge(TypedDict, total=False): qop: str algorithm: str opaque: str + domain: str + stale: str DigestFunctions: Dict[str, Callable[[bytes], "hashlib._Hash"]] = { @@ -81,13 +83,17 @@ class DigestAuthChallenge(TypedDict, total=False): # RFC 7616: Challenge parameters to extract CHALLENGE_FIELDS: Final[ - Tuple[Literal["realm", "nonce", "qop", "algorithm", "opaque"], ...] + Tuple[ + Literal["realm", "nonce", "qop", "algorithm", "opaque", "domain", "stale"], ... + ] ] = ( "realm", "nonce", "qop", "algorithm", "opaque", + "domain", + "stale", ) # Supported digest authentication algorithms @@ -159,6 +165,7 @@ class DigestAuthMiddleware: - Supports 'auth' and 'auth-int' quality of protection modes - Properly handles quoted strings and parameter parsing - Includes replay attack protection with client nonce count tracking + - Supports preemptive authentication per RFC 7616 Section 3.6 Standards compliance: - RFC 7616: HTTP Digest Access Authentication (primary reference) @@ -175,6 +182,7 @@ def __init__( self, login: str, password: str, + preemptive: bool = True, ) -> None: if login is None: raise ValueError("None is not allowed as login value") @@ -192,6 +200,9 @@ def __init__( self._last_nonce_bytes = b"" self._nonce_count = 0 self._challenge: DigestAuthChallenge = {} + self._preemptive: bool = preemptive + # Set of URLs defining the protection space + self._protection_space: List[str] = [] async def _encode( self, method: str, url: URL, body: Union[Payload, Literal[b""]] @@ -354,6 +365,26 @@ def KD(s: bytes, d: bytes) -> bytes: return f"Digest {', '.join(pairs)}" + def _in_protection_space(self, url: URL) -> bool: + """ + Check if the given URL is within the current protection space. + + According to RFC 7616, a URI is in the protection space if any URI + in the protection space is a prefix of it (after both have been made absolute). + """ + request_str = str(url) + for space_str in self._protection_space: + # Check if request starts with space URL + if not request_str.startswith(space_str): + continue + # Exact match or space ends with / (proper directory prefix) + if len(request_str) == len(space_str) or space_str[-1] == "/": + return True + # Check next char is / to ensure proper path boundary + if request_str[len(space_str)] == "/": + return True + return False + def _authenticate(self, response: ClientResponse) -> bool: """ Takes the given response and tries digest-auth, if needed. @@ -391,6 +422,25 @@ def _authenticate(self, response: ClientResponse) -> bool: if value := header_pairs.get(field): self._challenge[field] = value + # Update protection space based on domain parameter or default to origin + origin = response.url.origin() + + if domain := self._challenge.get("domain"): + # Parse space-separated list of URIs + self._protection_space = [] + for uri in domain.split(): + # Remove quotes if present + uri = uri.strip('"') + if uri.startswith("/"): + # Path-absolute, relative to origin + self._protection_space.append(str(origin.join(URL(uri)))) + else: + # Absolute URI + self._protection_space.append(str(URL(uri))) + else: + # No domain specified, protection space is entire origin + self._protection_space = [str(origin)] + # Return True only if we found at least one challenge parameter return bool(self._challenge) @@ -400,8 +450,14 @@ async def __call__( """Run the digest auth middleware.""" response = None for retry_count in range(2): - # Apply authorization header if we have a challenge (on second attempt) - if retry_count > 0: + # Apply authorization header if: + # 1. This is a retry after 401 (retry_count > 0), OR + # 2. Preemptive auth is enabled AND we have a challenge AND the URL is in protection space + if retry_count > 0 or ( + self._preemptive + and self._challenge + and self._in_protection_space(request.url) + ): request.headers[hdrs.AUTHORIZATION] = await self._encode( request.method, request.url, request.body ) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 8a721f514cd..1644c57054b 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2300,12 +2300,13 @@ Utilities :return: encoded authentication data, :class:`str`. -.. class:: DigestAuthMiddleware(login, password) +.. class:: DigestAuthMiddleware(login, password, *, preemptive=True) HTTP digest authentication client middleware. :param str login: login :param str password: password + :param bool preemptive: Enable preemptive authentication (default: ``True``) This middleware supports HTTP digest authentication with both `auth` and `auth-int` quality of protection (qop) modes, and a variety of hashing algorithms. @@ -2315,6 +2316,31 @@ Utilities - Parsing 401 Unauthorized responses with `WWW-Authenticate: Digest` headers - Generating appropriate `Authorization: Digest` headers on retry - Maintaining nonce counts and challenge data per request + - When ``preemptive=True``, reusing authentication credentials for subsequent + requests to the same protection space (following RFC 7616 Section 3.6) + + **Preemptive Authentication** + + By default (``preemptive=True``), the middleware remembers successful authentication + challenges and automatically includes the Authorization header in subsequent requests + to the same protection space. This behavior: + + - Improves server efficiency by avoiding extra round trips + - Matches how modern web browsers handle digest authentication + - Follows the recommendation in RFC 7616 Section 3.6 + + The server may still respond with a 401 status and ``stale=true`` if the nonce + has expired, in which case the middleware will automatically retry with the new nonce. + + To disable preemptive authentication and require a 401 challenge for every request, + set ``preemptive=False``:: + + # Default behavior - preemptive auth enabled + digest_auth_middleware = DigestAuthMiddleware(login="user", password="pass") + + # Disable preemptive auth - always wait for 401 challenge + digest_auth_middleware = DigestAuthMiddleware(login="user", password="pass", + preemptive=False) Usage:: @@ -2324,7 +2350,13 @@ Utilities # The middleware automatically handles the digest auth handshake assert resp.status == 200 + # Subsequent requests include auth header preemptively + async with session.get("http://protected.example.com/other") as resp: + assert resp.status == 200 # No 401 round trip needed + .. versionadded:: 3.12 + .. versionchanged:: 3.12.8 + Added ``preemptive`` parameter to enable/disable preemptive authentication. .. class:: CookieJar(*, unsafe=False, quote_cookie=True, treat_as_secure_origin = []) diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index b649e0b601f..16959aecdf4 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -778,6 +778,332 @@ async def handler(request: Request) -> Response: assert request_count == 2 +async def test_preemptive_auth_disabled( + aiohttp_server: AiohttpServer, +) -> None: + """Test that preemptive authentication can be disabled.""" + digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=False) + request_count = 0 + auth_headers = [] + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + auth_headers.append(request.headers.get(hdrs.AUTHORIZATION)) + + if not request.headers.get(hdrs.AUTHORIZATION): + # Return 401 with digest challenge + challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + return Response(text="OK") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + # First request will get 401 and store challenge + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK" + + # Second request should NOT send auth preemptively (preemptive=False) + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK" + + # With preemptive disabled, each request needs 401 challenge first + assert request_count == 4 # 2 requests * 2 (401 + retry) + assert auth_headers[0] is None # First request has no auth + assert auth_headers[1] is not None # Second request has auth after 401 + assert auth_headers[2] is None # Third request has no auth (preemptive disabled) + assert auth_headers[3] is not None # Fourth request has auth after 401 + + +async def test_preemptive_auth_with_stale_nonce( + aiohttp_server: AiohttpServer, +) -> None: + """Test preemptive auth handles stale nonce responses correctly.""" + digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True) + request_count = 0 + current_nonce = 0 + + async def handler(request: Request) -> Response: + nonlocal request_count, current_nonce + request_count += 1 + + auth_header = request.headers.get(hdrs.AUTHORIZATION) + + if not auth_header: + # First request without auth + current_nonce = 1 + challenge = f'Digest realm="test", nonce="nonce{current_nonce}", qop="auth", algorithm=MD5' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + # For the second set of requests, always consider the first nonce stale + if request_count == 3 and current_nonce == 1: + # Stale nonce - request new auth with stale=true + current_nonce = 2 + challenge = f'Digest realm="test", nonce="nonce{current_nonce}", qop="auth", algorithm=MD5, stale=true' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized - Stale nonce", + ) + + return Response(text="OK") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + # First request - will get 401, then retry with auth + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK" + + # Second request - will use preemptive auth with nonce1, get 401 stale, retry with nonce2 + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + text = await resp.text() + assert text == "OK" + + # Verify the expected flow: + # Request 1: no auth -> 401 + # Request 2: retry with auth -> 200 + # Request 3: preemptive auth with old nonce -> 401 stale + # Request 4: retry with new nonce -> 200 + assert request_count == 4 + + +async def test_preemptive_auth_updates_nonce_count( + aiohttp_server: AiohttpServer, +) -> None: + """Test that preemptive auth properly increments nonce count.""" + digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True) + request_count = 0 + nonce_counts = [] + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + + auth_header = request.headers.get(hdrs.AUTHORIZATION) + + if not auth_header: + # First request without auth + challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + # Extract nc (nonce count) from auth header + nc_match = auth_header.split("nc=")[1].split(",")[0].strip() + nonce_counts.append(nc_match) + + return Response(text="OK") + + app = Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + # Make multiple requests to see nonce count increment + for _ in range(3): + async with session.get(server.make_url("/")) as resp: + assert resp.status == 200 + await resp.text() + + # First request has no auth, then gets 401 and retries with nc=00000001 + # Second and third requests use preemptive auth with nc=00000002 and nc=00000003 + assert len(nonce_counts) == 3 + assert nonce_counts[0] == "00000001" + assert nonce_counts[1] == "00000002" + assert nonce_counts[2] == "00000003" + + +async def test_preemptive_auth_respects_protection_space( + aiohttp_server: AiohttpServer, +) -> None: + """Test that preemptive auth only applies to URLs within the protection space.""" + digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True) + request_count = 0 + auth_headers = [] + requested_paths = [] + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + auth_headers.append(request.headers.get(hdrs.AUTHORIZATION)) + requested_paths.append(request.path) + + if not request.headers.get(hdrs.AUTHORIZATION): + # Return 401 with digest challenge including domain parameter + challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5, domain="/api /admin"' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + return Response(text="OK") + + app = Application() + app.router.add_get("/api/endpoint", handler) + app.router.add_get("/admin/panel", handler) + app.router.add_get("/public/page", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + # First request to /api/endpoint - should get 401 and retry with auth + async with session.get(server.make_url("/api/endpoint")) as resp: + assert resp.status == 200 + + # Second request to /api/endpoint - should use preemptive auth (in protection space) + async with session.get(server.make_url("/api/endpoint")) as resp: + assert resp.status == 200 + + # Third request to /admin/panel - should use preemptive auth (in protection space) + async with session.get(server.make_url("/admin/panel")) as resp: + assert resp.status == 200 + + # Fourth request to /public/page - should NOT use preemptive auth (outside protection space) + async with session.get(server.make_url("/public/page")) as resp: + assert resp.status == 200 + + # Verify auth headers + assert auth_headers[0] is None # First request to /api/endpoint - no auth + assert auth_headers[1] is not None # Retry with auth + assert ( + auth_headers[2] is not None + ) # Second request to /api/endpoint - preemptive auth + assert auth_headers[3] is not None # Request to /admin/panel - preemptive auth + assert auth_headers[4] is None # First request to /public/page - no preemptive auth + assert auth_headers[5] is not None # Retry with auth + + # Verify paths + assert requested_paths == [ + "/api/endpoint", # Initial request + "/api/endpoint", # Retry with auth + "/api/endpoint", # Second request with preemptive auth + "/admin/panel", # Request with preemptive auth + "/public/page", # Initial request (no preemptive auth) + "/public/page", # Retry with auth + ] + + +async def test_preemptive_auth_with_absolute_domain_uris( + aiohttp_server: AiohttpServer, +) -> None: + """Test preemptive auth with absolute URIs in domain parameter.""" + digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True) + request_count = 0 + auth_headers = [] + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + auth_headers.append(request.headers.get(hdrs.AUTHORIZATION)) + + if not request.headers.get(hdrs.AUTHORIZATION): + # Return 401 with digest challenge including absolute URI in domain + server_url = str(request.url.with_path("/protected")) + challenge = f'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5, domain="{server_url}"' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + return Response(text="OK") + + app = Application() + app.router.add_get("/protected/resource", handler) + app.router.add_get("/unprotected/resource", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + # First request to protected resource + async with session.get(server.make_url("/protected/resource")) as resp: + assert resp.status == 200 + + # Second request to protected resource - should use preemptive auth + async with session.get(server.make_url("/protected/resource")) as resp: + assert resp.status == 200 + + # Request to unprotected resource - should NOT use preemptive auth + async with session.get(server.make_url("/unprotected/resource")) as resp: + assert resp.status == 200 + + # Verify auth pattern + assert auth_headers[0] is None # First request - no auth + assert auth_headers[1] is not None # Retry with auth + assert auth_headers[2] is not None # Second request - preemptive auth + assert auth_headers[3] is None # Unprotected resource - no preemptive auth + assert auth_headers[4] is not None # Retry with auth + + +async def test_preemptive_auth_without_domain_uses_origin( + aiohttp_server: AiohttpServer, +) -> None: + """Test that preemptive auth without domain parameter applies to entire origin.""" + digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True) + request_count = 0 + auth_headers = [] + + async def handler(request: Request) -> Response: + nonlocal request_count + request_count += 1 + auth_headers.append(request.headers.get(hdrs.AUTHORIZATION)) + + if not request.headers.get(hdrs.AUTHORIZATION): + # Return 401 with digest challenge without domain parameter + challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5' + return Response( + status=401, + headers={"WWW-Authenticate": challenge}, + text="Unauthorized", + ) + + return Response(text="OK") + + app = Application() + app.router.add_get("/path1", handler) + app.router.add_get("/path2", handler) + server = await aiohttp_server(app) + + async with ClientSession(middlewares=(digest_auth_mw,)) as session: + # First request + async with session.get(server.make_url("/path1")) as resp: + assert resp.status == 200 + + # Second request to different path - should still use preemptive auth + async with session.get(server.make_url("/path2")) as resp: + assert resp.status == 200 + + # Verify auth pattern + assert auth_headers[0] is None # First request - no auth + assert auth_headers[1] is not None # Retry with auth + assert ( + auth_headers[2] is not None + ) # Second request - preemptive auth (entire origin) + + @pytest.mark.parametrize( ("status", "headers", "expected"), [ @@ -810,3 +1136,98 @@ def test_authenticate_with_malformed_headers( result = digest_auth_mw._authenticate(response) assert result == expected + + +@pytest.mark.parametrize( + ("protection_space_url", "request_url", "expected"), + [ + # Exact match + ("http://example.com/app1", "http://example.com/app1", True), + # Path with trailing slash should match + ("http://example.com/app1", "http://example.com/app1/", True), + # Subpaths should match + ("http://example.com/app1", "http://example.com/app1/resource", True), + ("http://example.com/app1", "http://example.com/app1/sub/path", True), + # Should NOT match different paths that start with same prefix + ("http://example.com/app1", "http://example.com/app1xx", False), + ("http://example.com/app1", "http://example.com/app123", False), + # Protection space with trailing slash + ("http://example.com/app1/", "http://example.com/app1/", True), + ("http://example.com/app1/", "http://example.com/app1/resource", True), + ( + "http://example.com/app1/", + "http://example.com/app1", + False, + ), # No trailing slash + # Root protection space + ("http://example.com/", "http://example.com/", True), + ("http://example.com/", "http://example.com/anything", True), + ("http://example.com/", "http://example.com", False), # No trailing slash + # Different origins should not match + ("http://example.com/app1", "https://example.com/app1", False), + ("http://example.com/app1", "http://other.com/app1", False), + ("http://example.com:8080/app1", "http://example.com/app1", False), + ], + ids=[ + "exact_match", + "path_with_trailing_slash", + "subpath_match", + "deep_subpath_match", + "no_match_app1xx", + "no_match_app123", + "protection_with_slash_exact", + "protection_with_slash_subpath", + "protection_with_slash_no_match_without", + "root_protection_exact", + "root_protection_subpath", + "root_protection_no_match_without_slash", + "different_scheme", + "different_host", + "different_port", + ], +) +def test_in_protection_space( + digest_auth_mw: DigestAuthMiddleware, + protection_space_url: str, + request_url: str, + expected: bool, +) -> None: + """Test _in_protection_space method with various URL patterns.""" + digest_auth_mw._protection_space = [protection_space_url] + result = digest_auth_mw._in_protection_space(URL(request_url)) + assert result == expected + + +def test_in_protection_space_multiple_spaces( + digest_auth_mw: DigestAuthMiddleware, +) -> None: + """Test _in_protection_space with multiple protection spaces.""" + digest_auth_mw._protection_space = [ + "http://example.com/api", + "http://example.com/admin/", + "http://example.com/secure/area", + ] + + # Test various URLs + assert digest_auth_mw._in_protection_space(URL("http://example.com/api")) is True + assert digest_auth_mw._in_protection_space(URL("http://example.com/api/v1")) is True + assert ( + digest_auth_mw._in_protection_space(URL("http://example.com/admin/panel")) + is True + ) + assert ( + digest_auth_mw._in_protection_space( + URL("http://example.com/secure/area/resource") + ) + is True + ) + + # These should not match + assert digest_auth_mw._in_protection_space(URL("http://example.com/apiv2")) is False + assert ( + digest_auth_mw._in_protection_space(URL("http://example.com/admin")) is False + ) # No trailing slash + assert ( + digest_auth_mw._in_protection_space(URL("http://example.com/secure")) is False + ) + assert digest_auth_mw._in_protection_space(URL("http://example.com/other")) is False From 47bc2a4ba49b583b8e0ff5822d9fd0fd665c6399 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 4 Jun 2025 10:12:00 +0100 Subject: [PATCH 1487/1511] Release 3.12.8 (#11133) --- CHANGES.rst | 26 ++++++++++++++++++++++++++ CHANGES/11128.feature.rst | 9 --------- CHANGES/11129.feature.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 27 insertions(+), 11 deletions(-) delete mode 100644 CHANGES/11128.feature.rst delete mode 120000 CHANGES/11129.feature.rst diff --git a/CHANGES.rst b/CHANGES.rst index 867cd041a55..43e2173f8b8 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,32 @@ .. towncrier release notes start +3.12.8 (2025-06-04) +=================== + +Features +-------- + +- Added preemptive digest authentication to :class:`~aiohttp.DigestAuthMiddleware` -- by :user:`bdraco`. + + The middleware now reuses authentication credentials for subsequent requests to the same + protection space, improving efficiency by avoiding extra authentication round trips. + This behavior matches how web browsers handle digest authentication and follows + :rfc:`7616#section-3.6`. + + Preemptive authentication is enabled by default but can be disabled by passing + ``preemptive=False`` to the middleware constructor. + + + *Related issues and pull requests on GitHub:* + :issue:`11128`, :issue:`11129`. + + + + +---- + + 3.12.7 (2025-06-02) =================== diff --git a/CHANGES/11128.feature.rst b/CHANGES/11128.feature.rst deleted file mode 100644 index 0f99d2b8a11..00000000000 --- a/CHANGES/11128.feature.rst +++ /dev/null @@ -1,9 +0,0 @@ -Added preemptive digest authentication to :class:`~aiohttp.DigestAuthMiddleware` -- by :user:`bdraco`. - -The middleware now reuses authentication credentials for subsequent requests to the same -protection space, improving efficiency by avoiding extra authentication round trips. -This behavior matches how web browsers handle digest authentication and follows -:rfc:`7616#section-3.6`. - -Preemptive authentication is enabled by default but can be disabled by passing -``preemptive=False`` to the middleware constructor. diff --git a/CHANGES/11129.feature.rst b/CHANGES/11129.feature.rst deleted file mode 120000 index 692d28ba9ce..00000000000 --- a/CHANGES/11129.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11128.feature.rst \ No newline at end of file diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 28981917a0e..92b7a7b076a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.8.dev0" +__version__ = "3.12.8" from typing import TYPE_CHECKING, Tuple From 7ccc94df3dfb570539fb9deb50fe28f336f12f9b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 4 Jun 2025 15:18:16 +0000 Subject: [PATCH 1488/1511] [PR #11139/3dafd4c7 backport][3.12] Fix IOBasePayload reading entire files into memory instead of chunking (#11141) Co-authored-by: J. Nick Koston <nick@koston.org> Fixes #11138 --- CHANGES/11138.bugfix.rst | 3 + aiohttp/payload.py | 14 +++- tests/test_payload.py | 152 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 165 insertions(+), 4 deletions(-) create mode 100644 CHANGES/11138.bugfix.rst diff --git a/CHANGES/11138.bugfix.rst b/CHANGES/11138.bugfix.rst new file mode 100644 index 00000000000..6d8c634e51f --- /dev/null +++ b/CHANGES/11138.bugfix.rst @@ -0,0 +1,3 @@ +Fixed ``IOBasePayload`` and ``TextIOPayload`` reading entire files into memory when streaming large files -- by :user:`bdraco`. + +When using file-like objects with the aiohttp client, the entire file would be read into memory if the file size was provided in the ``Content-Length`` header. This could cause out-of-memory errors when uploading large files. The payload classes now correctly read data in chunks of ``READ_SIZE`` (64KB) regardless of the total content length. diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 7180fd2b430..d119d9beefc 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -514,7 +514,7 @@ def _read_and_available_len( self._set_or_restore_start_position() size = self.size # Call size only once since it does I/O return size, self._value.read( - min(size or READ_SIZE, remaining_content_len or READ_SIZE) + min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE) ) def _read(self, remaining_content_len: Optional[int]) -> bytes: @@ -617,7 +617,15 @@ async def write_with_length( return # Read next chunk - chunk = await loop.run_in_executor(None, self._read, remaining_content_len) + chunk = await loop.run_in_executor( + None, + self._read, + ( + min(READ_SIZE, remaining_content_len) + if remaining_content_len is not None + else READ_SIZE + ), + ) def _should_stop_writing( self, @@ -760,7 +768,7 @@ def _read_and_available_len( self._set_or_restore_start_position() size = self.size chunk = self._value.read( - min(size or READ_SIZE, remaining_content_len or READ_SIZE) + min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE) ) return size, chunk.encode(self._encoding) if self._encoding else chunk.encode() diff --git a/tests/test_payload.py b/tests/test_payload.py index b810a68f8b7..2fd0a0f60d9 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -6,13 +6,14 @@ from collections.abc import AsyncIterator from io import StringIO from pathlib import Path -from typing import Optional, TextIO, Union +from typing import List, Optional, TextIO, Union import pytest from multidict import CIMultiDict from aiohttp import payload from aiohttp.abc import AbstractStreamWriter +from aiohttp.payload import READ_SIZE class BufferWriter(AbstractStreamWriter): @@ -365,6 +366,155 @@ async def test_iobase_payload_exact_chunk_size_limit() -> None: assert written == data[:chunk_size] +async def test_iobase_payload_reads_in_chunks() -> None: + """Test IOBasePayload reads data in chunks of READ_SIZE, not all at once.""" + # Create a large file that's multiple times larger than READ_SIZE + large_data = b"x" * (READ_SIZE * 3 + 1000) # ~192KB + 1000 bytes + + # Mock the file-like object to track read calls + mock_file = unittest.mock.Mock(spec=io.BytesIO) + mock_file.tell.return_value = 0 + mock_file.fileno.side_effect = AttributeError # Make size return None + + # Track the sizes of read() calls + read_sizes = [] + + def mock_read(size: int) -> bytes: + read_sizes.append(size) + # Return data based on how many times read was called + call_count = len(read_sizes) + if call_count == 1: + return large_data[:size] + elif call_count == 2: + return large_data[READ_SIZE : READ_SIZE + size] + elif call_count == 3: + return large_data[READ_SIZE * 2 : READ_SIZE * 2 + size] + else: + return large_data[READ_SIZE * 3 :] + + mock_file.read.side_effect = mock_read + + payload_obj = payload.IOBasePayload(mock_file) + writer = MockStreamWriter() + + # Write with a large content_length + await payload_obj.write_with_length(writer, len(large_data)) + + # Verify that reads were limited to READ_SIZE + assert len(read_sizes) > 1 # Should have multiple reads + for read_size in read_sizes: + assert ( + read_size <= READ_SIZE + ), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}" + + +async def test_iobase_payload_large_content_length() -> None: + """Test IOBasePayload with very large content_length doesn't read all at once.""" + data = b"x" * (READ_SIZE + 1000) + + # Create a custom file-like object that tracks read sizes + class TrackingBytesIO(io.BytesIO): + def __init__(self, data: bytes) -> None: + super().__init__(data) + self.read_sizes: List[int] = [] + + def read(self, size: Optional[int] = -1) -> bytes: + self.read_sizes.append(size if size is not None else -1) + return super().read(size) + + tracking_file = TrackingBytesIO(data) + payload_obj = payload.IOBasePayload(tracking_file) + writer = MockStreamWriter() + + # Write with a very large content_length (simulating the bug scenario) + large_content_length = 10 * 1024 * 1024 # 10MB + await payload_obj.write_with_length(writer, large_content_length) + + # Verify no single read exceeded READ_SIZE + for read_size in tracking_file.read_sizes: + assert ( + read_size <= READ_SIZE + ), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}" + + # Verify the correct amount of data was written + assert writer.get_written_bytes() == data + + +async def test_textio_payload_reads_in_chunks() -> None: + """Test TextIOPayload reads data in chunks of READ_SIZE, not all at once.""" + # Create a large text file that's multiple times larger than READ_SIZE + large_text = "x" * (READ_SIZE * 3 + 1000) # ~192KB + 1000 chars + + # Mock the file-like object to track read calls + mock_file = unittest.mock.Mock(spec=io.StringIO) + mock_file.tell.return_value = 0 + mock_file.fileno.side_effect = AttributeError # Make size return None + mock_file.encoding = "utf-8" + + # Track the sizes of read() calls + read_sizes = [] + + def mock_read(size: int) -> str: + read_sizes.append(size) + # Return data based on how many times read was called + call_count = len(read_sizes) + if call_count == 1: + return large_text[:size] + elif call_count == 2: + return large_text[READ_SIZE : READ_SIZE + size] + elif call_count == 3: + return large_text[READ_SIZE * 2 : READ_SIZE * 2 + size] + else: + return large_text[READ_SIZE * 3 :] + + mock_file.read.side_effect = mock_read + + payload_obj = payload.TextIOPayload(mock_file) + writer = MockStreamWriter() + + # Write with a large content_length + await payload_obj.write_with_length(writer, len(large_text.encode("utf-8"))) + + # Verify that reads were limited to READ_SIZE + assert len(read_sizes) > 1 # Should have multiple reads + for read_size in read_sizes: + assert ( + read_size <= READ_SIZE + ), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}" + + +async def test_textio_payload_large_content_length() -> None: + """Test TextIOPayload with very large content_length doesn't read all at once.""" + text_data = "x" * (READ_SIZE + 1000) + + # Create a custom file-like object that tracks read sizes + class TrackingStringIO(io.StringIO): + def __init__(self, data: str) -> None: + super().__init__(data) + self.read_sizes: List[int] = [] + + def read(self, size: Optional[int] = -1) -> str: + self.read_sizes.append(size if size is not None else -1) + return super().read(size) + + tracking_file = TrackingStringIO(text_data) + payload_obj = payload.TextIOPayload(tracking_file) + writer = MockStreamWriter() + + # Write with a very large content_length (simulating the bug scenario) + large_content_length = 10 * 1024 * 1024 # 10MB + await payload_obj.write_with_length(writer, large_content_length) + + # Verify no single read exceeded READ_SIZE + for read_size in tracking_file.read_sizes: + assert ( + read_size <= READ_SIZE + ), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}" + + # Verify the correct amount of data was written + assert writer.get_written_bytes() == text_data.encode("utf-8") + + async def test_async_iterable_payload_write_with_length_no_limit() -> None: """Test AsyncIterablePayload writing with no content length limit.""" From d40e7bb150ddf49958d9a63285716414f00b628e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 4 Jun 2025 16:45:46 +0100 Subject: [PATCH 1489/1511] Release 3.12.9 (#11143) --- CHANGES.rst | 20 ++++++++++++++++++++ CHANGES/11138.bugfix.rst | 3 --- aiohttp/__init__.py | 2 +- 3 files changed, 21 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/11138.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 43e2173f8b8..fd27e959e23 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,26 @@ .. towncrier release notes start +3.12.9 (2025-06-04) +=================== + +Bug fixes +--------- + +- Fixed ``IOBasePayload`` and ``TextIOPayload`` reading entire files into memory when streaming large files -- by :user:`bdraco`. + + When using file-like objects with the aiohttp client, the entire file would be read into memory if the file size was provided in the ``Content-Length`` header. This could cause out-of-memory errors when uploading large files. The payload classes now correctly read data in chunks of ``READ_SIZE`` (64KB) regardless of the total content length. + + + *Related issues and pull requests on GitHub:* + :issue:`11138`. + + + + +---- + + 3.12.8 (2025-06-04) =================== diff --git a/CHANGES/11138.bugfix.rst b/CHANGES/11138.bugfix.rst deleted file mode 100644 index 6d8c634e51f..00000000000 --- a/CHANGES/11138.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed ``IOBasePayload`` and ``TextIOPayload`` reading entire files into memory when streaming large files -- by :user:`bdraco`. - -When using file-like objects with the aiohttp client, the entire file would be read into memory if the file size was provided in the ``Content-Length`` header. This could cause out-of-memory errors when uploading large files. The payload classes now correctly read data in chunks of ``READ_SIZE`` (64KB) regardless of the total content length. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 92b7a7b076a..4df59028912 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.8" +__version__ = "3.12.9" from typing import TYPE_CHECKING, Tuple From c0e04a23f0776bfa6c5573601ed41272fdfe6141 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Wed, 4 Jun 2025 17:45:35 +0100 Subject: [PATCH 1490/1511] Increment version to 3.12.10.dev0 (#11146) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 4df59028912..b86fbfc8167 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.9" +__version__ = "3.12.10.dev0" from typing import TYPE_CHECKING, Tuple From c1eea5e786ad38fd4576d55773f9ed7db478c8a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vojt=C4=9Bch=20Bo=C4=8Dek?= <vbocek@gmail.com> Date: Sat, 7 Jun 2025 13:12:26 +0200 Subject: [PATCH 1491/1511] [PR #11150/996ad00 backport][3.12] fix: leak of aiodns.DNSResolver when ClientSession is closed (#11152) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11150.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 18 +++++++++++++++--- aiohttp/resolver.py | 5 +++-- tests/test_connector.py | 19 +++++++++++++++++++ 5 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 CHANGES/11150.bugfix.rst diff --git a/CHANGES/11150.bugfix.rst b/CHANGES/11150.bugfix.rst new file mode 100644 index 00000000000..8a51b2e4f0c --- /dev/null +++ b/CHANGES/11150.bugfix.rst @@ -0,0 +1,3 @@ +Fixed leak of ``aiodns.DNSResolver`` when :py:class:`~aiohttp.TCPConnector` is closed and no resolver was passed when creating the connector -- by :user:`Tasssadar`. + +This was a regression introduced in version 3.12.0 (:pr:`10897`). diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 2e2ab140122..6b9f7b124bd 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -368,6 +368,7 @@ Vladimir Shulyak Vladimir Zakharov Vladyslav Bohaichuk Vladyslav Bondar +Vojtěch Boček W. Trevor King Wei Lin Weiwei Wang diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 62b418a4bed..075ef95c814 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -926,9 +926,14 @@ def __init__( ) self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + self._resolver: AbstractResolver if resolver is None: - resolver = DefaultResolver(loop=self._loop) - self._resolver = resolver + self._resolver = DefaultResolver(loop=self._loop) + self._resolver_owner = True + else: + self._resolver = resolver + self._resolver_owner = False self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) @@ -956,6 +961,12 @@ def _close(self) -> List[Awaitable[object]]: return waiters + async def close(self) -> None: + """Close all opened transports.""" + if self._resolver_owner: + await self._resolver.close() + await super().close() + @property def family(self) -> int: """Socket family like AF_INET.""" @@ -1709,7 +1720,8 @@ def __init__( loop=loop, ) if not isinstance( - self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + self._loop, + asyncio.ProactorEventLoop, # type: ignore[attr-defined] ): raise RuntimeError( "Named Pipes only available in proactor loop under windows" diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 118bf8cbff7..b20e5672ce5 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -258,9 +258,10 @@ def release_resolver( loop: The event loop the resolver was using. """ # Remove client from its loop's tracking - if loop not in self._loop_data: + current_loop_data = self._loop_data.get(loop) + if current_loop_data is None: return - resolver, client_set = self._loop_data[loop] + resolver, client_set = current_loop_data client_set.discard(client) # If no more clients for this loop, cancel and remove its resolver if not client_set: diff --git a/tests/test_connector.py b/tests/test_connector.py index 54da8743ed7..79776380297 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1270,6 +1270,7 @@ async def test_tcp_connector_dns_cache_not_expired(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) m_resolver().resolve.return_value = dns_response() + m_resolver().close = mock.AsyncMock() await conn._resolve_host("localhost", 8080) await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) @@ -1281,6 +1282,7 @@ async def test_tcp_connector_dns_cache_forever(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) m_resolver().resolve.return_value = dns_response() + m_resolver().close = mock.AsyncMock() await conn._resolve_host("localhost", 8080) await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) @@ -1292,6 +1294,7 @@ async def test_tcp_connector_use_dns_cache_disabled(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=False) m_resolver().resolve.side_effect = [dns_response(), dns_response()] + m_resolver().close = mock.AsyncMock() await conn._resolve_host("localhost", 8080) await conn._resolve_host("localhost", 8080) m_resolver().resolve.assert_has_calls( @@ -1308,6 +1311,7 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) m_resolver().resolve.return_value = dns_response() + m_resolver().close = mock.AsyncMock() loop.create_task(conn._resolve_host("localhost", 8080)) loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) @@ -1322,6 +1326,7 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) e = Exception() m_resolver().resolve.side_effect = e + m_resolver().close = mock.AsyncMock() r1 = loop.create_task(conn._resolve_host("localhost", 8080)) r2 = loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) @@ -1341,6 +1346,7 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) m_resolver().resolve.return_value = dns_response() + m_resolver().close = mock.AsyncMock() loop.create_task(conn._resolve_host("localhost", 8080)) f = loop.create_task(conn._resolve_host("localhost", 8080)) @@ -1384,6 +1390,7 @@ def exception_handler(loop, context): use_dns_cache=False, ) m_resolver().resolve.return_value = dns_response_error() + m_resolver().close = mock.AsyncMock() f = loop.create_task(conn._create_direct_connection(req, [], ClientTimeout(0))) await asyncio.sleep(0) @@ -1419,6 +1426,7 @@ async def test_tcp_connector_dns_tracing(loop, dns_response) -> None: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) m_resolver().resolve.return_value = dns_response() + m_resolver().close = mock.AsyncMock() await conn._resolve_host("localhost", 8080, traces=traces) on_dns_resolvehost_start.assert_called_once_with( @@ -1460,6 +1468,7 @@ async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response) -> N conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=False) m_resolver().resolve.side_effect = [dns_response(), dns_response()] + m_resolver().close = mock.AsyncMock() await conn._resolve_host("localhost", 8080, traces=traces) @@ -1514,6 +1523,7 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True, ttl_dns_cache=10) m_resolver().resolve.return_value = dns_response() + m_resolver().close = mock.AsyncMock() loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) await asyncio.sleep(0) @@ -1528,6 +1538,14 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - await conn.close() +async def test_tcp_connector_close_resolver() -> None: + m_resolver = mock.AsyncMock() + with mock.patch("aiohttp.connector.DefaultResolver", return_value=m_resolver): + conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10) + await conn.close() + m_resolver.close.assert_awaited_once() + + async def test_dns_error(loop) -> None: connector = aiohttp.TCPConnector(loop=loop) connector._resolve_host = mock.AsyncMock( @@ -3691,6 +3709,7 @@ async def resolve_response() -> List[ResolveResult]: with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver: m_resolver().resolve.return_value = resolve_response() + m_resolver().close = mock.AsyncMock() connector = TCPConnector() traces = [DummyTracer()] From dcc0ba2122e7dda7e43692396fde8485426e8385 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 7 Jun 2025 06:38:13 -0500 Subject: [PATCH 1492/1511] Release 3.12.10 (#11153) --- CHANGES.rst | 20 ++++++++++++++++++++ CHANGES/11150.bugfix.rst | 3 --- aiohttp/__init__.py | 2 +- 3 files changed, 21 insertions(+), 4 deletions(-) delete mode 100644 CHANGES/11150.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index fd27e959e23..6fb4135456c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,26 @@ .. towncrier release notes start +3.12.10 (2025-06-07) +==================== + +Bug fixes +--------- + +- Fixed leak of ``aiodns.DNSResolver`` when :py:class:`~aiohttp.TCPConnector` is closed and no resolver was passed when creating the connector -- by :user:`Tasssadar`. + + This was a regression introduced in version 3.12.0 (:pr:`10897`). + + + *Related issues and pull requests on GitHub:* + :issue:`11150`. + + + + +---- + + 3.12.9 (2025-06-04) =================== diff --git a/CHANGES/11150.bugfix.rst b/CHANGES/11150.bugfix.rst deleted file mode 100644 index 8a51b2e4f0c..00000000000 --- a/CHANGES/11150.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed leak of ``aiodns.DNSResolver`` when :py:class:`~aiohttp.TCPConnector` is closed and no resolver was passed when creating the connector -- by :user:`Tasssadar`. - -This was a regression introduced in version 3.12.0 (:pr:`10897`). diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index b86fbfc8167..dccff3e0040 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.10.dev0" +__version__ = "3.12.10" from typing import TYPE_CHECKING, Tuple From ca39f6a05d6805bc6de956d11dce7baa67e63e41 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 7 Jun 2025 08:53:02 -0500 Subject: [PATCH 1493/1511] [PR #11148/86a9a38 backport][3.12] Abort ssl connections on close when ssl_shutdown_timeout is 0 (#11155) --- CHANGES/11148.deprecation.rst | 1 + CHANGES/11148.feature.rst | 10 + aiohttp/client.py | 9 +- aiohttp/client_proto.py | 9 + aiohttp/connector.py | 106 +++++++-- docs/client_reference.rst | 60 +++-- tests/test_client_functional.py | 5 +- tests/test_client_proto.py | 40 ++++ tests/test_client_session.py | 99 ++++++-- tests/test_connector.py | 396 +++++++++++++++++++++++++++++++- tests/test_proxy.py | 26 +-- 11 files changed, 670 insertions(+), 91 deletions(-) create mode 120000 CHANGES/11148.deprecation.rst create mode 100644 CHANGES/11148.feature.rst diff --git a/CHANGES/11148.deprecation.rst b/CHANGES/11148.deprecation.rst new file mode 120000 index 00000000000..f4ddfb298af --- /dev/null +++ b/CHANGES/11148.deprecation.rst @@ -0,0 +1 @@ +11148.feature.rst \ No newline at end of file diff --git a/CHANGES/11148.feature.rst b/CHANGES/11148.feature.rst new file mode 100644 index 00000000000..6c47c93c7ba --- /dev/null +++ b/CHANGES/11148.feature.rst @@ -0,0 +1,10 @@ +Improved SSL connection handling by changing the default ``ssl_shutdown_timeout`` +from ``0.1`` to ``0`` seconds. SSL connections now use Python's default graceful +shutdown during normal operation but are aborted immediately when the connector +is closed, providing optimal behavior for both cases. Also added support for +``ssl_shutdown_timeout=0`` on all Python versions. Previously, this value was +rejected on Python 3.11+ and ignored on earlier versions. Non-zero values on +Python < 3.11 now trigger a ``RuntimeWarning`` -- by :user:`bdraco`. + +The ``ssl_shutdown_timeout`` parameter is now deprecated and will be removed in +aiohttp 4.0 as there is no clear use case for changing the default. diff --git a/aiohttp/client.py b/aiohttp/client.py index 576a965ba5d..ce95e5cb39e 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -303,7 +303,7 @@ def __init__( max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", middlewares: Sequence[ClientMiddlewareType] = (), - ssl_shutdown_timeout: Optional[float] = 0.1, + ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, ) -> None: # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. @@ -361,6 +361,13 @@ def __init__( "timeout.connect" ) + if ssl_shutdown_timeout is not sentinel: + warnings.warn( + "The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0", + DeprecationWarning, + stacklevel=2, + ) + if connector is None: connector = TCPConnector(ssl_shutdown_timeout=ssl_shutdown_timeout) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 7d00b366a79..e2fb1ce64cb 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -95,6 +95,15 @@ def close(self) -> None: self._payload = None self._drop_timeout() + def abort(self) -> None: + self._exception = None # Break cyclic references + transport = self.transport + if transport is not None: + transport.abort() + self.transport = None + self._payload = None + self._drop_timeout() + def is_connected(self) -> bool: return self.transport is not None and not self.transport.is_closing() diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 075ef95c814..4479ae321bc 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -52,6 +52,7 @@ from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params from .helpers import ( + _SENTINEL, ceil_timeout, is_ip_address, noop, @@ -231,15 +232,19 @@ def closed(self) -> bool: class _TransportPlaceholder: """placeholder for BaseConnector.connect function""" - __slots__ = ("closed",) + __slots__ = ("closed", "transport") def __init__(self, closed_future: asyncio.Future[Optional[Exception]]) -> None: """Initialize a placeholder for a transport.""" self.closed = closed_future + self.transport = None def close(self) -> None: """Close the placeholder.""" + def abort(self) -> None: + """Abort the placeholder (does nothing).""" + class BaseConnector: """Base connector class. @@ -469,9 +474,14 @@ def _cleanup_closed(self) -> None: timeout_ceil_threshold=self._timeout_ceil_threshold, ) - def close(self) -> Awaitable[None]: - """Close all opened transports.""" - if not (waiters := self._close()): + def close(self, *, abort_ssl: bool = False) -> Awaitable[None]: + """Close all opened transports. + + :param abort_ssl: If True, SSL connections will be aborted immediately + without performing the shutdown handshake. This provides + faster cleanup at the cost of less graceful disconnection. + """ + if not (waiters := self._close(abort_ssl=abort_ssl)): # If there are no connections to close, we can return a noop # awaitable to avoid scheduling a task on the event loop. return _DeprecationWaiter(noop()) @@ -484,7 +494,7 @@ def close(self) -> Awaitable[None]: task = self._loop.create_task(coro) return _DeprecationWaiter(task) - def _close(self) -> List[Awaitable[object]]: + def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]: waiters: List[Awaitable[object]] = [] if self._closed: @@ -506,12 +516,26 @@ def _close(self) -> List[Awaitable[object]]: for data in self._conns.values(): for proto, _ in data: - proto.close() + if ( + abort_ssl + and proto.transport + and proto.transport.get_extra_info("sslcontext") is not None + ): + proto.abort() + else: + proto.close() if closed := proto.closed: waiters.append(closed) for proto in self._acquired: - proto.close() + if ( + abort_ssl + and proto.transport + and proto.transport.get_extra_info("sslcontext") is not None + ): + proto.abort() + else: + proto.close() if closed := proto.closed: waiters.append(closed) @@ -881,11 +905,12 @@ class TCPConnector(BaseConnector): socket_factory - A SocketFactoryType function that, if supplied, will be used to create sockets given an AddrInfoType. - ssl_shutdown_timeout - Grace period for SSL shutdown handshake on TLS - connections. Default is 0.1 seconds. This usually - allows for a clean SSL shutdown by notifying the - remote peer of connection closure, while avoiding - excessive delays during connector cleanup. + ssl_shutdown_timeout - DEPRECATED. Will be removed in aiohttp 4.0. + Grace period for SSL shutdown handshake on TLS + connections. Default is 0 seconds (immediate abort). + This parameter allowed for a clean SSL shutdown by + notifying the remote peer of connection closure, + while avoiding excessive delays during connector cleanup. Note: Only takes effect on Python 3.11+. """ @@ -913,7 +938,7 @@ def __init__( happy_eyeballs_delay: Optional[float] = 0.25, interleave: Optional[int] = None, socket_factory: Optional[SocketFactoryType] = None, - ssl_shutdown_timeout: Optional[float] = 0.1, + ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -946,14 +971,36 @@ def __init__( self._interleave = interleave self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() self._socket_factory = socket_factory - self._ssl_shutdown_timeout = ssl_shutdown_timeout + self._ssl_shutdown_timeout: Optional[float] + # Handle ssl_shutdown_timeout with warning for Python < 3.11 + if ssl_shutdown_timeout is sentinel: + self._ssl_shutdown_timeout = 0 + else: + # Deprecation warning for ssl_shutdown_timeout parameter + warnings.warn( + "The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0", + DeprecationWarning, + stacklevel=2, + ) + if ( + sys.version_info < (3, 11) + and ssl_shutdown_timeout is not None + and ssl_shutdown_timeout != 0 + ): + warnings.warn( + f"ssl_shutdown_timeout={ssl_shutdown_timeout} is ignored on Python < 3.11; " + "only ssl_shutdown_timeout=0 is supported. The timeout will be ignored.", + RuntimeWarning, + stacklevel=2, + ) + self._ssl_shutdown_timeout = ssl_shutdown_timeout - def _close(self) -> List[Awaitable[object]]: + def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]: """Close all ongoing DNS calls.""" for fut in chain.from_iterable(self._throttle_dns_futures.values()): fut.cancel() - waiters = super()._close() + waiters = super()._close(abort_ssl=abort_ssl) for t in self._resolve_host_tasks: t.cancel() @@ -961,11 +1008,20 @@ def _close(self) -> List[Awaitable[object]]: return waiters - async def close(self) -> None: - """Close all opened transports.""" + async def close(self, *, abort_ssl: bool = False) -> None: + """ + Close all opened transports. + + :param abort_ssl: If True, SSL connections will be aborted immediately + without performing the shutdown handshake. If False (default), + the behavior is determined by ssl_shutdown_timeout: + - If ssl_shutdown_timeout=0: connections are aborted + - If ssl_shutdown_timeout>0: graceful shutdown is performed + """ if self._resolver_owner: await self._resolver.close() - await super().close() + # Use abort_ssl param if explicitly set, otherwise use ssl_shutdown_timeout default + await super().close(abort_ssl=abort_ssl or self._ssl_shutdown_timeout == 0) @property def family(self) -> int: @@ -1200,7 +1256,7 @@ async def _wrap_create_connection( # Add ssl_shutdown_timeout for Python 3.11+ when SSL is used if ( kwargs.get("ssl") - and self._ssl_shutdown_timeout is not None + and self._ssl_shutdown_timeout and sys.version_info >= (3, 11) ): kwargs["ssl_shutdown_timeout"] = self._ssl_shutdown_timeout @@ -1343,10 +1399,7 @@ async def _start_tls_connection( ): try: # ssl_shutdown_timeout is only available in Python 3.11+ - if ( - sys.version_info >= (3, 11) - and self._ssl_shutdown_timeout is not None - ): + if sys.version_info >= (3, 11) and self._ssl_shutdown_timeout: tls_transport = await self._loop.start_tls( underlying_transport, tls_proto, @@ -1367,7 +1420,10 @@ async def _start_tls_connection( # We need to close the underlying transport since # `start_tls()` probably failed before it had a # chance to do this: - underlying_transport.close() + if self._ssl_shutdown_timeout == 0: + underlying_transport.abort() + else: + underlying_transport.close() raise if isinstance(tls_transport, asyncio.Transport): fingerprint = self._get_fingerprint(req) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 1644c57054b..ab16e35aed5 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -58,7 +58,7 @@ The client session supports the context manager protocol for self closing. max_line_size=8190, \ max_field_size=8190, \ fallback_charset_resolver=lambda r, b: "utf-8", \ - ssl_shutdown_timeout=0.1) + ssl_shutdown_timeout=0) The class for creating client sessions and making requests. @@ -257,16 +257,31 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.8.6 - :param float ssl_shutdown_timeout: Grace period for SSL shutdown handshake on TLS - connections (``0.1`` seconds by default). This usually provides sufficient time - to notify the remote peer of connection closure, helping prevent broken - connections on the server side, while minimizing delays during connector - cleanup. This timeout is passed to the underlying :class:`TCPConnector` - when one is created automatically. Note: This parameter only takes effect - on Python 3.11+. + :param float ssl_shutdown_timeout: **(DEPRECATED)** This parameter is deprecated + and will be removed in aiohttp 4.0. Grace period for SSL shutdown handshake on + TLS connections when the connector is closed (``0`` seconds by default). + By default (``0``), SSL connections are aborted immediately when the + connector is closed, without performing the shutdown handshake. During + normal operation, SSL connections use Python's default SSL shutdown + behavior. Setting this to a positive value (e.g., ``0.1``) will perform + a graceful shutdown when closing the connector, notifying the remote + peer which can help prevent "connection reset" errors at the cost of + additional cleanup time. This timeout is passed to the underlying + :class:`TCPConnector` when one is created automatically. + Note: On Python versions prior to 3.11, only a value of ``0`` is supported; + other values will trigger a warning. .. versionadded:: 3.12.5 + .. versionchanged:: 3.12.11 + Changed default from ``0.1`` to ``0`` to abort SSL connections + immediately when the connector is closed. Added support for + ``ssl_shutdown_timeout=0`` on all Python versions. A :exc:`RuntimeWarning` + is issued when non-zero values are passed on Python < 3.11. + + .. deprecated:: 3.12.11 + This parameter is deprecated and will be removed in aiohttp 4.0. + .. attribute:: closed ``True`` if the session has been closed, ``False`` otherwise. @@ -1196,7 +1211,7 @@ is controlled by *force_close* constructor's parameter). force_close=False, limit=100, limit_per_host=0, \ enable_cleanup_closed=False, timeout_ceil_threshold=5, \ happy_eyeballs_delay=0.25, interleave=None, loop=None, \ - socket_factory=None, ssl_shutdown_timeout=0.1) + socket_factory=None, ssl_shutdown_timeout=0) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. @@ -1323,16 +1338,29 @@ is controlled by *force_close* constructor's parameter). .. versionadded:: 3.12 - :param float ssl_shutdown_timeout: Grace period for SSL shutdown on TLS - connections (``0.1`` seconds by default). This parameter balances two - important considerations: usually providing sufficient time to notify - the remote server (which helps prevent "connection reset" errors), - while avoiding unnecessary delays during connector cleanup. - The default value provides a reasonable compromise for most use cases. - Note: This parameter only takes effect on Python 3.11+. + :param float ssl_shutdown_timeout: **(DEPRECATED)** This parameter is deprecated + and will be removed in aiohttp 4.0. Grace period for SSL shutdown on TLS + connections when the connector is closed (``0`` seconds by default). + By default (``0``), SSL connections are aborted immediately when the + connector is closed, without performing the shutdown handshake. During + normal operation, SSL connections use Python's default SSL shutdown + behavior. Setting this to a positive value (e.g., ``0.1``) will perform + a graceful shutdown when closing the connector, notifying the remote + server which can help prevent "connection reset" errors at the cost of + additional cleanup time. Note: On Python versions prior to 3.11, only + a value of ``0`` is supported; other values will trigger a warning. .. versionadded:: 3.12.5 + .. versionchanged:: 3.12.11 + Changed default from ``0.1`` to ``0`` to abort SSL connections + immediately when the connector is closed. Added support for + ``ssl_shutdown_timeout=0`` on all Python versions. A :exc:`RuntimeWarning` + is issued when non-zero values are passed on Python < 3.11. + + .. deprecated:: 3.12.11 + This parameter is deprecated and will be removed in aiohttp 4.0. + .. attribute:: family *TCP* socket family e.g. :data:`socket.AF_INET` or diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 5c18178b714..08cc5c97538 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -698,7 +698,10 @@ async def test_ssl_client_shutdown_timeout( ) -> None: # Test that ssl_shutdown_timeout is properly used during connection closure - connector = aiohttp.TCPConnector(ssl=client_ssl_ctx, ssl_shutdown_timeout=0.1) + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + connector = aiohttp.TCPConnector(ssl=client_ssl_ctx, ssl_shutdown_timeout=0.1) async def streaming_handler(request: web.Request) -> NoReturn: # Create a streaming response that continuously sends data diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index 2a42996950f..b75ebae1137 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -303,3 +303,43 @@ async def test_closed_property_after_connection_lost( # After connection_lost, closed should return None if it was never accessed assert proto.closed is None + + +async def test_abort(loop: asyncio.AbstractEventLoop) -> None: + """Test the abort() method.""" + proto = ResponseHandler(loop=loop) + + # Create a mock transport + transport = mock.Mock() + proto.connection_made(transport) + + # Set up some state + proto._payload = mock.Mock() + + # Mock _drop_timeout method using patch.object + with mock.patch.object(proto, "_drop_timeout") as mock_drop_timeout: + # Call abort + proto.abort() + + # Verify transport.abort() was called + transport.abort.assert_called_once() + + # Verify cleanup + assert proto.transport is None + assert proto._payload is None + assert proto._exception is None # type: ignore[unreachable] + mock_drop_timeout.assert_called_once() + + +async def test_abort_without_transport(loop: asyncio.AbstractEventLoop) -> None: + """Test abort() when transport is None.""" + proto = ResponseHandler(loop=loop) + + # Mock _drop_timeout method using patch.object + with mock.patch.object(proto, "_drop_timeout") as mock_drop_timeout: + # Call abort without transport + proto.abort() + + # Should not raise and should still clean up + assert proto._exception is None + mock_drop_timeout.assert_not_called() diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 2702350f132..754cac1b47e 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -3,6 +3,8 @@ import gc import io import json +import sys +import warnings from collections import deque from http.cookies import BaseCookie, SimpleCookie from typing import Any, Awaitable, Callable, Iterator, List, Optional, cast @@ -310,32 +312,91 @@ async def test_create_connector(create_session, loop, mocker) -> None: assert connector.close.called +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="Use test_ssl_shutdown_timeout_passed_to_connector_pre_311 for Python < 3.11", +) async def test_ssl_shutdown_timeout_passed_to_connector() -> None: - # Test default value + # Test default value (no warning expected) async with ClientSession() as session: assert isinstance(session.connector, TCPConnector) - assert session.connector._ssl_shutdown_timeout == 0.1 + assert session.connector._ssl_shutdown_timeout == 0 - # Test custom value - async with ClientSession(ssl_shutdown_timeout=1.0) as session: - assert isinstance(session.connector, TCPConnector) - assert session.connector._ssl_shutdown_timeout == 1.0 + # Test custom value - expect deprecation warning + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + async with ClientSession(ssl_shutdown_timeout=1.0) as session: + assert isinstance(session.connector, TCPConnector) + assert session.connector._ssl_shutdown_timeout == 1.0 - # Test None value - async with ClientSession(ssl_shutdown_timeout=None) as session: - assert isinstance(session.connector, TCPConnector) - assert session.connector._ssl_shutdown_timeout is None + # Test None value - expect deprecation warning + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + async with ClientSession(ssl_shutdown_timeout=None) as session: + assert isinstance(session.connector, TCPConnector) + assert session.connector._ssl_shutdown_timeout is None # Test that it doesn't affect when custom connector is provided - custom_conn = TCPConnector(ssl_shutdown_timeout=2.0) - async with ClientSession( - connector=custom_conn, ssl_shutdown_timeout=1.0 - ) as session: - assert session.connector is not None - assert isinstance(session.connector, TCPConnector) - assert ( - session.connector._ssl_shutdown_timeout == 2.0 - ) # Should use connector's value + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + custom_conn = TCPConnector(ssl_shutdown_timeout=2.0) + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + async with ClientSession( + connector=custom_conn, ssl_shutdown_timeout=1.0 + ) as session: + assert session.connector is not None + assert isinstance(session.connector, TCPConnector) + assert ( + session.connector._ssl_shutdown_timeout == 2.0 + ) # Should use connector's value + + +@pytest.mark.skipif( + sys.version_info >= (3, 11), + reason="This test is for Python < 3.11 runtime warning behavior", +) +async def test_ssl_shutdown_timeout_passed_to_connector_pre_311() -> None: + """Test that both deprecation and runtime warnings are issued on Python < 3.11.""" + # Test custom value - expect both deprecation and runtime warnings + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + async with ClientSession(ssl_shutdown_timeout=1.0) as session: + assert isinstance(session.connector, TCPConnector) + assert session.connector._ssl_shutdown_timeout == 1.0 + # Should have deprecation warnings (from ClientSession and TCPConnector) and runtime warning + # ClientSession emits 1 DeprecationWarning, TCPConnector emits 1 DeprecationWarning + 1 RuntimeWarning = 3 total + assert len(w) == 3 + deprecation_count = sum( + 1 for warn in w if issubclass(warn.category, DeprecationWarning) + ) + runtime_count = sum( + 1 for warn in w if issubclass(warn.category, RuntimeWarning) + ) + assert deprecation_count == 2 # One from ClientSession, one from TCPConnector + assert runtime_count == 1 # One from TCPConnector + + # Test with custom connector + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + custom_conn = TCPConnector(ssl_shutdown_timeout=2.0) + # Should have both deprecation and runtime warnings + assert len(w) == 2 + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + async with ClientSession( + connector=custom_conn, ssl_shutdown_timeout=1.0 + ) as session: + assert session.connector is not None + assert isinstance(session.connector, TCPConnector) + assert ( + session.connector._ssl_shutdown_timeout == 2.0 + ) # Should use connector's value def test_connector_loop(loop: asyncio.AbstractEventLoop) -> None: diff --git a/tests/test_connector.py b/tests/test_connector.py index 79776380297..c7938ed08e4 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -8,6 +8,7 @@ import ssl import sys import uuid +import warnings from collections import defaultdict, deque from concurrent import futures from contextlib import closing, suppress @@ -2044,25 +2045,55 @@ async def test_tcp_connector_ctor() -> None: await conn.close() +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="Use test_tcp_connector_ssl_shutdown_timeout_pre_311 for Python < 3.11", +) async def test_tcp_connector_ssl_shutdown_timeout( loop: asyncio.AbstractEventLoop, ) -> None: - # Test default value + # Test default value (no warning expected) conn = aiohttp.TCPConnector() - assert conn._ssl_shutdown_timeout == 0.1 + assert conn._ssl_shutdown_timeout == 0 await conn.close() - # Test custom value - conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0) + # Test custom value - expect deprecation warning + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0) assert conn._ssl_shutdown_timeout == 1.0 await conn.close() - # Test None value - conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None) + # Test None value - expect deprecation warning + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None) assert conn._ssl_shutdown_timeout is None await conn.close() +@pytest.mark.skipif( + sys.version_info >= (3, 11), + reason="This test is for Python < 3.11 runtime warning behavior", +) +async def test_tcp_connector_ssl_shutdown_timeout_pre_311( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that both deprecation and runtime warnings are issued on Python < 3.11.""" + # Test custom value - expect both deprecation and runtime warnings + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0) + # Should have both deprecation and runtime warnings + assert len(w) == 2 + assert any(issubclass(warn.category, DeprecationWarning) for warn in w) + assert any(issubclass(warn.category, RuntimeWarning) for warn in w) + assert conn._ssl_shutdown_timeout == 1.0 + await conn.close() + + @pytest.mark.skipif( sys.version_info < (3, 11), reason="ssl_shutdown_timeout requires Python 3.11+" ) @@ -2070,7 +2101,10 @@ async def test_tcp_connector_ssl_shutdown_timeout_passed_to_create_connection( loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock ) -> None: # Test that ssl_shutdown_timeout is passed to create_connection for SSL connections - conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -2085,7 +2119,10 @@ async def test_tcp_connector_ssl_shutdown_timeout_passed_to_create_connection( await conn.close() # Test with None value - conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None) + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None) with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -2101,7 +2138,10 @@ async def test_tcp_connector_ssl_shutdown_timeout_passed_to_create_connection( await conn.close() # Test that ssl_shutdown_timeout is NOT passed for non-SSL connections - conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -2122,7 +2162,178 @@ async def test_tcp_connector_ssl_shutdown_timeout_not_passed_pre_311( loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock ) -> None: # Test that ssl_shutdown_timeout is NOT passed to create_connection on Python < 3.11 - conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5) + # Should have both deprecation and runtime warnings + assert len(w) == 2 + assert any(issubclass(warn.category, DeprecationWarning) for warn in w) + assert any(issubclass(warn.category, RuntimeWarning) for warn in w) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + create_connection.return_value = mock.Mock(), mock.Mock() + + # Test with HTTPS + req = ClientRequest("GET", URL("https://example.com"), loop=loop) + with closing(await conn.connect(req, [], ClientTimeout())): + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + # Test with HTTP + req = ClientRequest("GET", URL("http://example.com"), loop=loop) + with closing(await conn.connect(req, [], ClientTimeout())): + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + await conn.close() + + +async def test_tcp_connector_close_abort_ssl_when_shutdown_timeout_zero( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that close() uses abort() for SSL connections when ssl_shutdown_timeout=0.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0) + + # Create a mock SSL protocol + proto = mock.create_autospec(ResponseHandler, instance=True) + proto.closed = None + + # Create mock SSL transport + transport = mock.Mock() + transport.get_extra_info.return_value = mock.Mock() # Returns SSL context + transport.is_closing.return_value = False + proto.transport = transport + + # Add the protocol to acquired connections + conn._acquired.add(proto) + + # Close the connector + await conn.close() + + # Verify abort was called instead of close for SSL connection + proto.abort.assert_called_once() + proto.close.assert_not_called() + + +async def test_tcp_connector_close_doesnt_abort_non_ssl_when_shutdown_timeout_zero( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that close() still uses close() for non-SSL connections even when ssl_shutdown_timeout=0.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0) + + # Create a mock non-SSL protocol + proto = mock.create_autospec(ResponseHandler, instance=True) + proto.closed = None + + # Create mock non-SSL transport + transport = mock.Mock() + transport.get_extra_info.return_value = None # No SSL context + transport.is_closing.return_value = False + proto.transport = transport + + # Add the protocol to acquired connections + conn._acquired.add(proto) + + # Close the connector + await conn.close() + + # Verify close was called for non-SSL connection + proto.close.assert_called_once() + proto.abort.assert_not_called() + + +async def test_tcp_connector_ssl_shutdown_timeout_warning_pre_311( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that a warning is issued for non-zero ssl_shutdown_timeout on Python < 3.11.""" + with ( + mock.patch.object(sys, "version_info", (3, 10, 0)), + warnings.catch_warnings(record=True) as w, + ): + warnings.simplefilter("always") + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=5.0) + + # We should get two warnings: deprecation and runtime warning + assert len(w) == 2 + + # Find each warning type + deprecation_warning = next( + (warn for warn in w if issubclass(warn.category, DeprecationWarning)), None + ) + runtime_warning = next( + (warn for warn in w if issubclass(warn.category, RuntimeWarning)), None + ) + + assert deprecation_warning is not None + assert "ssl_shutdown_timeout parameter is deprecated" in str( + deprecation_warning.message + ) + + assert runtime_warning is not None + assert "ssl_shutdown_timeout=5.0 is ignored on Python < 3.11" in str( + runtime_warning.message + ) + assert "only ssl_shutdown_timeout=0 is supported" in str( + runtime_warning.message + ) + + # Verify the value is still stored + assert conn._ssl_shutdown_timeout == 5.0 + + await conn.close() + + +async def test_tcp_connector_ssl_shutdown_timeout_zero_no_warning_pre_311( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that no warning is issued for ssl_shutdown_timeout=0 on Python < 3.11.""" + with ( + mock.patch.object(sys, "version_info", (3, 10, 0)), + warnings.catch_warnings(record=True) as w, + ): + warnings.simplefilter("always") + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0) + + # We should get one warning: deprecation + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert "ssl_shutdown_timeout parameter is deprecated" in str(w[0].message) + assert conn._ssl_shutdown_timeout == 0 + + await conn.close() + + +async def test_tcp_connector_ssl_shutdown_timeout_sentinel_no_warning_pre_311( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that no warning is issued when sentinel is used on Python < 3.11.""" + with ( + mock.patch.object(sys, "version_info", (3, 10, 0)), + warnings.catch_warnings(record=True) as w, + ): + warnings.simplefilter("always") + conn = aiohttp.TCPConnector() # Uses sentinel by default + + assert len(w) == 0 + assert conn._ssl_shutdown_timeout == 0 # Default value + + await conn.close() + + +async def test_tcp_connector_ssl_shutdown_timeout_zero_not_passed( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + """Test that ssl_shutdown_timeout=0 is NOT passed to create_connection.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0) with mock.patch.object( conn._loop, "create_connection", autospec=True, spec_set=True @@ -2132,9 +2343,10 @@ async def test_tcp_connector_ssl_shutdown_timeout_not_passed_pre_311( # Test with HTTPS req = ClientRequest("GET", URL("https://example.com"), loop=loop) with closing(await conn.connect(req, [], ClientTimeout())): + # Verify ssl_shutdown_timeout was NOT passed assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs - # Test with HTTP + # Test with HTTP (should not have ssl_shutdown_timeout anyway) req = ClientRequest("GET", URL("http://example.com"), loop=loop) with closing(await conn.connect(req, [], ClientTimeout())): assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs @@ -2142,11 +2354,173 @@ async def test_tcp_connector_ssl_shutdown_timeout_not_passed_pre_311( await conn.close() +@pytest.mark.skipif( + sys.version_info < (3, 11), reason="ssl_shutdown_timeout requires Python 3.11+" +) +async def test_tcp_connector_ssl_shutdown_timeout_nonzero_passed( + loop: asyncio.AbstractEventLoop, start_connection: mock.AsyncMock +) -> None: + """Test that non-zero ssl_shutdown_timeout IS passed to create_connection on Python 3.11+.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=5.0) + + with mock.patch.object( + conn._loop, "create_connection", autospec=True, spec_set=True + ) as create_connection: + create_connection.return_value = mock.Mock(), mock.Mock() + + # Test with HTTPS + req = ClientRequest("GET", URL("https://example.com"), loop=loop) + with closing(await conn.connect(req, [], ClientTimeout())): + # Verify ssl_shutdown_timeout WAS passed + assert create_connection.call_args.kwargs["ssl_shutdown_timeout"] == 5.0 + + # Test with HTTP (should not have ssl_shutdown_timeout) + req = ClientRequest("GET", URL("http://example.com"), loop=loop) + with closing(await conn.connect(req, [], ClientTimeout())): + assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs + + await conn.close() + + +async def test_tcp_connector_close_abort_ssl_connections_in_conns( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that SSL connections in _conns are aborted when ssl_shutdown_timeout=0.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0) + + # Create mock SSL protocol + proto = mock.create_autospec(ResponseHandler, instance=True) + proto.closed = None + + # Create mock SSL transport + transport = mock.Mock() + transport.get_extra_info.return_value = mock.Mock() # Returns SSL context + proto.transport = transport + + # Add the protocol to _conns + key = ConnectionKey("host", 443, True, True, None, None, None) + conn._conns[key] = deque([(proto, loop.time())]) + + # Close the connector + await conn.close() + + # Verify abort was called for SSL connection + proto.abort.assert_called_once() + proto.close.assert_not_called() + + async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None: conn = aiohttp.TCPConnector() assert conn.allowed_protocol_schema_set == {"", "tcp", "http", "https", "ws", "wss"} +async def test_start_tls_exception_with_ssl_shutdown_timeout_zero( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test _start_tls_connection exception handling with ssl_shutdown_timeout=0.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0) + + underlying_transport = mock.Mock() + req = mock.Mock() + req.server_hostname = None + req.host = "example.com" + req.is_ssl = mock.Mock(return_value=True) + + # Patch _get_ssl_context to return a valid context and make start_tls fail + with ( + mock.patch.object( + conn, "_get_ssl_context", return_value=ssl.create_default_context() + ), + mock.patch.object(conn._loop, "start_tls", side_effect=OSError("TLS failed")), + ): + with pytest.raises(OSError): + await conn._start_tls_connection(underlying_transport, req, ClientTimeout()) + + # Should abort, not close + underlying_transport.abort.assert_called_once() + underlying_transport.close.assert_not_called() + + +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="Use test_start_tls_exception_with_ssl_shutdown_timeout_nonzero_pre_311 for Python < 3.11", +) +async def test_start_tls_exception_with_ssl_shutdown_timeout_nonzero( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test _start_tls_connection exception handling with ssl_shutdown_timeout>0.""" + with pytest.warns( + DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated" + ): + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0) + + underlying_transport = mock.Mock() + req = mock.Mock() + req.server_hostname = None + req.host = "example.com" + req.is_ssl = mock.Mock(return_value=True) + + # Patch _get_ssl_context to return a valid context and make start_tls fail + with ( + mock.patch.object( + conn, "_get_ssl_context", return_value=ssl.create_default_context() + ), + mock.patch.object(conn._loop, "start_tls", side_effect=OSError("TLS failed")), + ): + with pytest.raises(OSError): + await conn._start_tls_connection(underlying_transport, req, ClientTimeout()) + + # Should close, not abort + underlying_transport.close.assert_called_once() + underlying_transport.abort.assert_not_called() + + +@pytest.mark.skipif( + sys.version_info >= (3, 11), + reason="This test is for Python < 3.11 runtime warning behavior", +) +async def test_start_tls_exception_with_ssl_shutdown_timeout_nonzero_pre_311( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test _start_tls_connection exception handling with ssl_shutdown_timeout>0 on Python < 3.11.""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0) + # Should have both deprecation and runtime warnings + assert len(w) == 2 + assert any(issubclass(warn.category, DeprecationWarning) for warn in w) + assert any(issubclass(warn.category, RuntimeWarning) for warn in w) + + underlying_transport = mock.Mock() + req = mock.Mock() + req.server_hostname = None + req.host = "example.com" + req.is_ssl = mock.Mock(return_value=True) + + # Patch _get_ssl_context to return a valid context and make start_tls fail + with ( + mock.patch.object( + conn, "_get_ssl_context", return_value=ssl.create_default_context() + ), + mock.patch.object(conn._loop, "start_tls", side_effect=OSError("TLS failed")), + ): + with pytest.raises(OSError): + await conn._start_tls_connection(underlying_transport, req, ClientTimeout()) + + # Should close, not abort + underlying_transport.close.assert_called_once() + underlying_transport.abort.assert_not_called() + + async def test_invalid_ssl_param() -> None: with pytest.raises(TypeError): aiohttp.TCPConnector(ssl=object()) # type: ignore[arg-type] diff --git a/tests/test_proxy.py b/tests/test_proxy.py index f5ebf6adc4f..4c506cc5730 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -935,24 +935,14 @@ async def make_conn(): self.loop.run_until_complete( connector._create_connection(req, None, aiohttp.ClientTimeout()) ) - - if sys.version_info >= (3, 11): - self.loop.start_tls.assert_called_with( - mock.ANY, - mock.ANY, - _SSL_CONTEXT_VERIFIED, - server_hostname="www.python.org", - ssl_handshake_timeout=mock.ANY, - ssl_shutdown_timeout=0.1, - ) - else: - self.loop.start_tls.assert_called_with( - mock.ANY, - mock.ANY, - _SSL_CONTEXT_VERIFIED, - server_hostname="www.python.org", - ssl_handshake_timeout=mock.ANY, - ) + # ssl_shutdown_timeout=0 is not passed to start_tls + self.loop.start_tls.assert_called_with( + mock.ANY, + mock.ANY, + _SSL_CONTEXT_VERIFIED, + server_hostname="www.python.org", + ssl_handshake_timeout=mock.ANY, + ) self.assertEqual(req.url.path, "/") self.assertEqual(proxy_req.method, "CONNECT") From f9621dc9c9f902f83f20725d9f4ae09a63590516 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 7 Jun 2025 10:08:32 -0500 Subject: [PATCH 1494/1511] Release 3.12.11 (#11157) --- CHANGES.rst | 48 +++++++++++++++++++++++++++++++++++ CHANGES/11148.deprecation.rst | 1 - CHANGES/11148.feature.rst | 10 -------- aiohttp/__init__.py | 2 +- 4 files changed, 49 insertions(+), 12 deletions(-) delete mode 120000 CHANGES/11148.deprecation.rst delete mode 100644 CHANGES/11148.feature.rst diff --git a/CHANGES.rst b/CHANGES.rst index 6fb4135456c..859cfd1f6b0 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,54 @@ .. towncrier release notes start +3.12.11 (2025-06-07) +==================== + +Features +-------- + +- Improved SSL connection handling by changing the default ``ssl_shutdown_timeout`` + from ``0.1`` to ``0`` seconds. SSL connections now use Python's default graceful + shutdown during normal operation but are aborted immediately when the connector + is closed, providing optimal behavior for both cases. Also added support for + ``ssl_shutdown_timeout=0`` on all Python versions. Previously, this value was + rejected on Python 3.11+ and ignored on earlier versions. Non-zero values on + Python < 3.11 now trigger a ``RuntimeWarning`` -- by :user:`bdraco`. + + The ``ssl_shutdown_timeout`` parameter is now deprecated and will be removed in + aiohttp 4.0 as there is no clear use case for changing the default. + + + *Related issues and pull requests on GitHub:* + :issue:`11148`. + + + + +Deprecations (removal in next major release) +-------------------------------------------- + +- Improved SSL connection handling by changing the default ``ssl_shutdown_timeout`` + from ``0.1`` to ``0`` seconds. SSL connections now use Python's default graceful + shutdown during normal operation but are aborted immediately when the connector + is closed, providing optimal behavior for both cases. Also added support for + ``ssl_shutdown_timeout=0`` on all Python versions. Previously, this value was + rejected on Python 3.11+ and ignored on earlier versions. Non-zero values on + Python < 3.11 now trigger a ``RuntimeWarning`` -- by :user:`bdraco`. + + The ``ssl_shutdown_timeout`` parameter is now deprecated and will be removed in + aiohttp 4.0 as there is no clear use case for changing the default. + + + *Related issues and pull requests on GitHub:* + :issue:`11148`. + + + + +---- + + 3.12.10 (2025-06-07) ==================== diff --git a/CHANGES/11148.deprecation.rst b/CHANGES/11148.deprecation.rst deleted file mode 120000 index f4ddfb298af..00000000000 --- a/CHANGES/11148.deprecation.rst +++ /dev/null @@ -1 +0,0 @@ -11148.feature.rst \ No newline at end of file diff --git a/CHANGES/11148.feature.rst b/CHANGES/11148.feature.rst deleted file mode 100644 index 6c47c93c7ba..00000000000 --- a/CHANGES/11148.feature.rst +++ /dev/null @@ -1,10 +0,0 @@ -Improved SSL connection handling by changing the default ``ssl_shutdown_timeout`` -from ``0.1`` to ``0`` seconds. SSL connections now use Python's default graceful -shutdown during normal operation but are aborted immediately when the connector -is closed, providing optimal behavior for both cases. Also added support for -``ssl_shutdown_timeout=0`` on all Python versions. Previously, this value was -rejected on Python 3.11+ and ignored on earlier versions. Non-zero values on -Python < 3.11 now trigger a ``RuntimeWarning`` -- by :user:`bdraco`. - -The ``ssl_shutdown_timeout`` parameter is now deprecated and will be removed in -aiohttp 4.0 as there is no clear use case for changing the default. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index dccff3e0040..9872fa1bb00 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.10" +__version__ = "3.12.11" from typing import TYPE_CHECKING, Tuple From 2a7ed29e3d92bc52502ff04fc80a38b54635165b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 7 Jun 2025 11:30:52 -0500 Subject: [PATCH 1495/1511] Increment version to 3.12.12.dev0 (#11159) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 9872fa1bb00..3f9b1234cb3 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.11" +__version__ = "3.12.12.dev0" from typing import TYPE_CHECKING, Tuple From 608d8ff454e5d0b54dbc4045e8b61fb3b4acfd0f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 10 Jun 2025 03:17:17 +0000 Subject: [PATCH 1496/1511] [PR #11173/85b0df43 backport][3.12] Fix cookie unquoting regression (#11179) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11173.bugfix.rst | 1 + aiohttp/_cookie_helpers.py | 47 +++++-- docs/spelling_wordlist.txt | 1 + tests/test_cookie_helpers.py | 240 ++++++++++++++++++++++++++++++++++- 4 files changed, 279 insertions(+), 10 deletions(-) create mode 100644 CHANGES/11173.bugfix.rst diff --git a/CHANGES/11173.bugfix.rst b/CHANGES/11173.bugfix.rst new file mode 100644 index 00000000000..9214080d267 --- /dev/null +++ b/CHANGES/11173.bugfix.rst @@ -0,0 +1 @@ +Fixed cookie unquoting to properly handle octal escape sequences in cookie values (e.g., ``\012`` for newline) by vendoring the correct ``_unquote`` implementation from Python's ``http.cookies`` module -- by :user:`bdraco`. diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 8184cc9bdc1..a5b4f81c78f 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -108,20 +108,49 @@ def preserve_morsel_with_coded_value(cookie: Morsel[str]) -> Morsel[str]: return mrsl_val -def _unquote(text: str) -> str: +_unquote_sub = re.compile(r"\\(?:([0-3][0-7][0-7])|(.))").sub + + +def _unquote_replace(m: re.Match[str]) -> str: + """ + Replace function for _unquote_sub regex substitution. + + Handles escaped characters in cookie values: + - Octal sequences are converted to their character representation + - Other escaped characters are unescaped by removing the backslash + """ + if m[1]: + return chr(int(m[1], 8)) + return m[2] + + +def _unquote(value: str) -> str: """ Unquote a cookie value. Vendored from http.cookies._unquote to ensure compatibility. + + Note: The original implementation checked for None, but we've removed + that check since all callers already ensure the value is not None. """ - # If there are no quotes, return as-is - if len(text) < 2 or text[0] != '"' or text[-1] != '"': - return text - # Remove quotes and handle escaped characters - text = text[1:-1] - # Replace escaped quotes and backslashes - text = text.replace('\\"', '"').replace("\\\\", "\\") - return text + # If there aren't any doublequotes, + # then there can't be any special characters. See RFC 2109. + if len(value) < 2: + return value + if value[0] != '"' or value[-1] != '"': + return value + + # We have to assume that we must decode this string. + # Down to work. + + # Remove the "s + value = value[1:-1] + + # Check for special sequences. Examples: + # \012 --> \n + # \" --> " + # + return _unquote_sub(_unquote_replace, value) def parse_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]: diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index b495a07cb6f..3fd6cdd00fc 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -363,6 +363,7 @@ uvloop uWSGI vcvarsall vendored +vendoring waituntil wakeup wakeups diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 7a2ac7493ee..41e3eed8085 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1,11 +1,17 @@ """Tests for internal cookie helper functions.""" -from http.cookies import CookieError, Morsel, SimpleCookie +from http.cookies import ( + CookieError, + Morsel, + SimpleCookie, + _unquote as simplecookie_unquote, +) import pytest from aiohttp import _cookie_helpers as helpers from aiohttp._cookie_helpers import ( + _unquote, parse_cookie_headers, preserve_morsel_with_coded_value, ) @@ -1029,3 +1035,235 @@ def test_parse_cookie_headers_date_formats_with_attributes() -> None: assert result[1][1]["expires"] == "Wednesday, 09-Jun-30 10:18:14 GMT" assert result[1][1]["domain"] == ".example.com" assert result[1][1]["samesite"] == "Strict" + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Unquoted strings should remain unchanged + ("simple", "simple"), + ("with spaces", "with spaces"), + ("", ""), + ('"', '"'), # String too short to be quoted + ('some"text', 'some"text'), # Quotes not at beginning/end + ('text"with"quotes', 'text"with"quotes'), + ], +) +def test_unquote_basic(input_str: str, expected: str) -> None: + """Test basic _unquote functionality.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Basic quoted strings + ('"quoted"', "quoted"), + ('"with spaces"', "with spaces"), + ('""', ""), # Empty quoted string + # Quoted string with special characters + ('"hello, world!"', "hello, world!"), + ('"path=/test"', "path=/test"), + ], +) +def test_unquote_quoted_strings(input_str: str, expected: str) -> None: + """Test _unquote with quoted strings.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Escaped quotes should be unescaped + (r'"say \"hello\""', 'say "hello"'), + (r'"nested \"quotes\" here"', 'nested "quotes" here'), + # Multiple escaped quotes + (r'"\"start\" middle \"end\""', '"start" middle "end"'), + ], +) +def test_unquote_escaped_quotes(input_str: str, expected: str) -> None: + """Test _unquote with escaped quotes.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Single escaped backslash + (r'"path\\to\\file"', "path\\to\\file"), + # Backslash before quote + (r'"end with slash\\"', "end with slash\\"), + # Mixed escaped characters + (r'"path\\to\\\"file\""', 'path\\to\\"file"'), + ], +) +def test_unquote_escaped_backslashes(input_str: str, expected: str) -> None: + """Test _unquote with escaped backslashes.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Common octal sequences + (r'"\012"', "\n"), # newline + (r'"\011"', "\t"), # tab + (r'"\015"', "\r"), # carriage return + (r'"\040"', " "), # space + # Octal sequences in context + (r'"line1\012line2"', "line1\nline2"), + (r'"tab\011separated"', "tab\tseparated"), + # Multiple octal sequences + (r'"\012\011\015"', "\n\t\r"), + # Mixed octal and regular text + (r'"hello\040world\041"', "hello world!"), + ], +) +def test_unquote_octal_sequences(input_str: str, expected: str) -> None: + """Test _unquote with octal escape sequences.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Test boundary values + (r'"\000"', "\x00"), # null character + (r'"\001"', "\x01"), + (r'"\177"', "\x7f"), # DEL character + (r'"\200"', "\x80"), # Extended ASCII + (r'"\377"', "\xff"), # Max octal value + # Invalid octal sequences (not 3 digits or > 377) are treated as regular escapes + (r'"\400"', "400"), # 400 octal = 256 decimal, too large + (r'"\777"', "777"), # 777 octal = 511 decimal, too large + ], +) +def test_unquote_octal_full_range(input_str: str, expected: str) -> None: + """Test _unquote with full range of valid octal sequences.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # Mix of quotes, backslashes, and octal + (r'"say \"hello\"\012new line"', 'say "hello"\nnew line'), + (r'"path\\to\\file\011\011data"', "path\\to\\file\t\tdata"), + # Complex mixed example + (r'"\042quoted\042 and \134backslash\134"', '"quoted" and \\backslash\\'), + # Escaped characters that aren't special + (r'"\a\b\c"', "abc"), # \a, \b, \c -> a, b, c + ], +) +def test_unquote_mixed_escapes(input_str: str, expected: str) -> None: + """Test _unquote with mixed escape sequences.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # String that starts with quote but doesn't end with one + ('"not closed', '"not closed'), + # String that ends with quote but doesn't start with one + ('not opened"', 'not opened"'), + # Multiple quotes + ('"""', '"'), + ('""""', '""'), + # Backslash at the end without anything to escape + (r'"ends with\"', "ends with\\"), + # Empty escape + (r'"test\"', "test\\"), + # Just escaped characters + (r'"\"\"\""', '"""'), + ], +) +def test_unquote_edge_cases(input_str: str, expected: str) -> None: + """Test _unquote edge cases.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + ("input_str", "expected"), + [ + # JSON-like data + (r'"{\"user\":\"john\",\"id\":123}"', '{"user":"john","id":123}'), + # URL-encoded then quoted + ('"hello%20world"', "hello%20world"), + # Path with backslashes (Windows-style) + (r'"C:\\Users\\John\\Documents"', "C:\\Users\\John\\Documents"), + # Complex session data + ( + r'"session_data=\"user123\";expires=2024"', + 'session_data="user123";expires=2024', + ), + ], +) +def test_unquote_real_world_examples(input_str: str, expected: str) -> None: + """Test _unquote with real-world cookie value examples.""" + assert _unquote(input_str) == expected + + +@pytest.mark.parametrize( + "test_value", + [ + '""', + '"simple"', + r'"with \"quotes\""', + r'"with \\backslash\\"', + r'"\012newline"', + r'"complex\042quote\134slash\012"', + '"not-quoted', + 'also-not-quoted"', + r'"mixed\011\042\134test"', + ], +) +def test_unquote_compatibility_with_simplecookie(test_value: str) -> None: + """Test that _unquote behaves like SimpleCookie's unquoting.""" + assert _unquote(test_value) == simplecookie_unquote(test_value), ( + f"Mismatch for {test_value!r}: " + f"our={_unquote(test_value)!r}, " + f"SimpleCookie={simplecookie_unquote(test_value)!r}" + ) + + +@pytest.mark.parametrize( + ("header", "expected_name", "expected_value", "expected_coded"), + [ + # Test cookie values with octal escape sequences + (r'name="\012newline\012"', "name", "\nnewline\n", r'"\012newline\012"'), + ( + r'tab="\011separated\011values"', + "tab", + "\tseparated\tvalues", + r'"\011separated\011values"', + ), + ( + r'mixed="hello\040world\041"', + "mixed", + "hello world!", + r'"hello\040world\041"', + ), + ( + r'complex="\042quoted\042 text with \012 newline"', + "complex", + '"quoted" text with \n newline', + r'"\042quoted\042 text with \012 newline"', + ), + ], +) +def test_parse_cookie_headers_uses_unquote_with_octal( + header: str, expected_name: str, expected_value: str, expected_coded: str +) -> None: + """Test that parse_cookie_headers correctly unquotes values with octal sequences and preserves coded_value.""" + result = parse_cookie_headers([header]) + + assert len(result) == 1 + name, morsel = result[0] + + # Check that octal sequences were properly decoded in the value + assert name == expected_name + assert morsel.value == expected_value + + # Check that coded_value preserves the original quoted string + assert morsel.coded_value == expected_coded From e2b24d791b7a3a3437e3f7744c6351f0a94953d9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 10 Jun 2025 04:15:53 +0000 Subject: [PATCH 1497/1511] [PR #11178/915338c7 backport][3.12] Fix cookie header parser ignoring reserved names (#11181) Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES/11178.bugfix.rst | 1 + aiohttp/_cookie_helpers.py | 63 +++- aiohttp/abc.py | 4 +- aiohttp/client_reqrep.py | 14 +- aiohttp/web_request.py | 9 +- tests/test_cookie_helpers.py | 617 ++++++++++++++++++++++++++--------- tests/test_web_request.py | 26 +- 7 files changed, 556 insertions(+), 178 deletions(-) create mode 100644 CHANGES/11178.bugfix.rst diff --git a/CHANGES/11178.bugfix.rst b/CHANGES/11178.bugfix.rst new file mode 100644 index 00000000000..dc74cddde06 --- /dev/null +++ b/CHANGES/11178.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Cookie`` header parsing to treat attribute names as regular cookies per :rfc:`6265#section-5.4` -- by :user:`bdraco`. diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index a5b4f81c78f..4e9fc968814 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -12,7 +12,11 @@ from .log import internal_logger -__all__ = ("parse_cookie_headers", "preserve_morsel_with_coded_value") +__all__ = ( + "parse_set_cookie_headers", + "parse_cookie_header", + "preserve_morsel_with_coded_value", +) # Cookie parsing constants # Allow more characters in cookie names to handle real-world cookies @@ -153,7 +157,62 @@ def _unquote(value: str) -> str: return _unquote_sub(_unquote_replace, value) -def parse_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]: +def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: + """ + Parse a Cookie header according to RFC 6265 Section 5.4. + + Cookie headers contain only name-value pairs separated by semicolons. + There are no attributes in Cookie headers - even names that match + attribute names (like 'path' or 'secure') should be treated as cookies. + + This parser uses the same regex-based approach as parse_set_cookie_headers + to properly handle quoted values that may contain semicolons. + + Args: + header: The Cookie header value to parse + + Returns: + List of (name, Morsel) tuples for compatibility with SimpleCookie.update() + """ + if not header: + return [] + + cookies: List[Tuple[str, Morsel[str]]] = [] + i = 0 + n = len(header) + + while i < n: + # Use the same pattern as parse_set_cookie_headers to find cookies + match = _COOKIE_PATTERN.match(header, i) + if not match: + break + + key = match.group("key") + value = match.group("val") or "" + i = match.end(0) + + # Validate the name + if not key or not _COOKIE_NAME_RE.match(key): + internal_logger.warning("Can not load cookie: Illegal cookie name %r", key) + continue + + # Create new morsel + morsel: Morsel[str] = Morsel() + # Preserve the original value as coded_value (with quotes if present) + # We use __setstate__ instead of the public set() API because it allows us to + # bypass validation and set already validated state. This is more stable than + # setting protected attributes directly and unlikely to change since it would + # break pickling. + morsel.__setstate__( # type: ignore[attr-defined] + {"key": key, "value": _unquote(value), "coded_value": value} + ) + + cookies.append((key, morsel)) + + return cookies + + +def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]: """ Parse cookie headers using a vendored version of SimpleCookie parsing. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index ba371c61b01..2574ff93621 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -23,7 +23,7 @@ from multidict import CIMultiDict from yarl import URL -from ._cookie_helpers import parse_cookie_headers +from ._cookie_helpers import parse_set_cookie_headers from .typedefs import LooseCookies if TYPE_CHECKING: @@ -198,7 +198,7 @@ def update_cookies_from_headers( self, headers: Sequence[str], response_url: URL ) -> None: """Update cookies from raw Set-Cookie headers.""" - if headers and (cookies_to_update := parse_cookie_headers(headers)): + if headers and (cookies_to_update := parse_set_cookie_headers(headers)): self.update_cookies(cookies_to_update, response_url) @abstractmethod diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 793864b95a5..3209440b53d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -31,7 +31,11 @@ from yarl import URL from . import hdrs, helpers, http, multipart, payload -from ._cookie_helpers import parse_cookie_headers, preserve_morsel_with_coded_value +from ._cookie_helpers import ( + parse_cookie_header, + parse_set_cookie_headers, + preserve_morsel_with_coded_value, +) from .abc import AbstractStreamWriter from .client_exceptions import ( ClientConnectionError, @@ -376,9 +380,9 @@ def cookies(self) -> SimpleCookie: if self._raw_cookie_headers is not None: # Parse cookies for response.cookies (SimpleCookie for backward compatibility) cookies = SimpleCookie() - # Use parse_cookie_headers for more lenient parsing that handles + # Use parse_set_cookie_headers for more lenient parsing that handles # malformed cookies better than SimpleCookie.load - cookies.update(parse_cookie_headers(self._raw_cookie_headers)) + cookies.update(parse_set_cookie_headers(self._raw_cookie_headers)) self._cookies = cookies else: self._cookies = SimpleCookie() @@ -1093,8 +1097,8 @@ def update_cookies(self, cookies: Optional[LooseCookies]) -> None: c = SimpleCookie() if hdrs.COOKIE in self.headers: - # parse_cookie_headers already preserves coded values - c.update(parse_cookie_headers((self.headers.get(hdrs.COOKIE, ""),))) + # parse_cookie_header for RFC 6265 compliant Cookie header parsing + c.update(parse_cookie_header(self.headers.get(hdrs.COOKIE, ""))) del self.headers[hdrs.COOKIE] if isinstance(cookies, Mapping): diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 0c5576823f1..0bc69b74db9 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -35,7 +35,7 @@ from yarl import URL from . import hdrs -from ._cookie_helpers import parse_cookie_headers +from ._cookie_helpers import parse_cookie_header from .abc import AbstractStreamWriter from .helpers import ( _SENTINEL, @@ -589,9 +589,10 @@ def cookies(self) -> Mapping[str, str]: A read-only dictionary-like object. """ - # Use parse_cookie_headers for more lenient parsing that accepts - # special characters in cookie names (fixes #2683) - parsed = parse_cookie_headers((self.headers.get(hdrs.COOKIE, ""),)) + # Use parse_cookie_header for RFC 6265 compliant Cookie header parsing + # that accepts special characters in cookie names (fixes #2683) + parsed = parse_cookie_header(self.headers.get(hdrs.COOKIE, "")) + # Extract values from Morsel objects return MappingProxyType({name: morsel.value for name, morsel in parsed}) @reify diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 41e3eed8085..6deef6544c2 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -12,7 +12,8 @@ from aiohttp import _cookie_helpers as helpers from aiohttp._cookie_helpers import ( _unquote, - parse_cookie_headers, + parse_cookie_header, + parse_set_cookie_headers, preserve_morsel_with_coded_value, ) @@ -69,11 +70,11 @@ def test_preserve_morsel_with_coded_value_no_coded_value() -> None: assert result.coded_value == "simple_value" -def test_parse_cookie_headers_simple() -> None: - """Test parse_cookie_headers with simple cookies.""" +def test_parse_set_cookie_headers_simple() -> None: + """Test parse_set_cookie_headers with simple cookies.""" headers = ["name=value", "session=abc123"] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 assert result[0][0] == "name" @@ -84,14 +85,14 @@ def test_parse_cookie_headers_simple() -> None: assert result[1][1].value == "abc123" -def test_parse_cookie_headers_with_attributes() -> None: - """Test parse_cookie_headers with cookie attributes.""" +def test_parse_set_cookie_headers_with_attributes() -> None: + """Test parse_set_cookie_headers with cookie attributes.""" headers = [ "sessionid=value123; Path=/; HttpOnly; Secure", "user=john; Domain=.example.com; Max-Age=3600", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 @@ -111,8 +112,8 @@ def test_parse_cookie_headers_with_attributes() -> None: assert morsel2["max-age"] == "3600" -def test_parse_cookie_headers_special_chars_in_names() -> None: - """Test parse_cookie_headers accepts special characters in names (#2683).""" +def test_parse_set_cookie_headers_special_chars_in_names() -> None: + """Test parse_set_cookie_headers accepts special characters in names (#2683).""" # These should be accepted with relaxed validation headers = [ "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=value1", @@ -122,7 +123,7 @@ def test_parse_cookie_headers_special_chars_in_names() -> None: "cookie@domain=value5", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 5 expected_names = [ @@ -139,8 +140,8 @@ def test_parse_cookie_headers_special_chars_in_names() -> None: assert morsel.value == f"value{i+1}" -def test_parse_cookie_headers_invalid_names() -> None: - """Test parse_cookie_headers rejects truly invalid cookie names.""" +def test_parse_set_cookie_headers_invalid_names() -> None: + """Test parse_set_cookie_headers rejects truly invalid cookie names.""" # These should be rejected even with relaxed validation headers = [ "invalid\tcookie=value", # Tab character @@ -150,14 +151,14 @@ def test_parse_cookie_headers_invalid_names() -> None: "name with spaces=value", # Spaces in name ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # All should be skipped assert len(result) == 0 -def test_parse_cookie_headers_empty_and_invalid() -> None: - """Test parse_cookie_headers handles empty and invalid formats.""" +def test_parse_set_cookie_headers_empty_and_invalid() -> None: + """Test parse_set_cookie_headers handles empty and invalid formats.""" headers = [ "", # Empty header " ", # Whitespace only @@ -168,7 +169,7 @@ def test_parse_cookie_headers_empty_and_invalid() -> None: "Domain=.com", # Reserved attribute as name (should be skipped) ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # Only "name=" should be accepted assert len(result) == 1 @@ -176,15 +177,15 @@ def test_parse_cookie_headers_empty_and_invalid() -> None: assert result[0][1].value == "" -def test_parse_cookie_headers_quoted_values() -> None: - """Test parse_cookie_headers handles quoted values correctly.""" +def test_parse_set_cookie_headers_quoted_values() -> None: + """Test parse_set_cookie_headers handles quoted values correctly.""" headers = [ 'name="quoted value"', 'session="with;semicolon"', 'data="with\\"escaped\\""', ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 3 assert result[0][1].value == "quoted value" @@ -200,7 +201,7 @@ def test_parse_cookie_headers_quoted_values() -> None: 'complex="a=b;c=d"; simple=value', ], ) -def test_parse_cookie_headers_semicolon_in_quoted_values(header: str) -> None: +def test_parse_set_cookie_headers_semicolon_in_quoted_values(header: str) -> None: """ Test that semicolons inside properly quoted values are handled correctly. @@ -212,7 +213,7 @@ def test_parse_cookie_headers_semicolon_in_quoted_values(header: str) -> None: sc.load(header) # Test with our parser - result = parse_cookie_headers([header]) + result = parse_set_cookie_headers([header]) # Should parse the same number of cookies assert len(result) == len(sc) @@ -223,12 +224,12 @@ def test_parse_cookie_headers_semicolon_in_quoted_values(header: str) -> None: assert morsel.value == sc_morsel.value -def test_parse_cookie_headers_multiple_cookies_same_header() -> None: - """Test parse_cookie_headers with multiple cookies in one header.""" +def test_parse_set_cookie_headers_multiple_cookies_same_header() -> None: + """Test parse_set_cookie_headers with multiple cookies in one header.""" # Note: SimpleCookie includes the comma as part of the first cookie's value headers = ["cookie1=value1, cookie2=value2"] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # Should parse as two separate cookies assert len(result) == 2 @@ -253,14 +254,14 @@ def test_parse_cookie_headers_multiple_cookies_same_header() -> None: "complex=value; Domain=.example.com; Path=/app; Max-Age=3600", ], ) -def test_parse_cookie_headers_compatibility_with_simple_cookie(header: str) -> None: - """Test parse_cookie_headers is bug-for-bug compatible with SimpleCookie.load.""" +def test_parse_set_cookie_headers_compatibility_with_simple_cookie(header: str) -> None: + """Test parse_set_cookie_headers is bug-for-bug compatible with SimpleCookie.load.""" # Parse with SimpleCookie sc = SimpleCookie() sc.load(header) # Parse with our function - result = parse_cookie_headers([header]) + result = parse_set_cookie_headers([header]) # Should have same number of cookies assert len(result) == len(sc) @@ -286,8 +287,8 @@ def test_parse_cookie_headers_compatibility_with_simple_cookie(header: str) -> N assert morsel.get(bool_attr) is True -def test_parse_cookie_headers_relaxed_validation_differences() -> None: - """Test where parse_cookie_headers differs from SimpleCookie (relaxed validation).""" +def test_parse_set_cookie_headers_relaxed_validation_differences() -> None: + """Test where parse_set_cookie_headers differs from SimpleCookie (relaxed validation).""" # Test cookies that SimpleCookie rejects with CookieError rejected_by_simplecookie = [ ("cookie{with}braces=value1", "cookie{with}braces", "value1"), @@ -302,7 +303,7 @@ def test_parse_cookie_headers_relaxed_validation_differences() -> None: sc.load(header) # Our parser should accept them - result = parse_cookie_headers([header]) + result = parse_set_cookie_headers([header]) assert len(result) == 1 # We accept assert result[0][0] == expected_name assert result[0][1].value == expected_value @@ -320,20 +321,20 @@ def test_parse_cookie_headers_relaxed_validation_differences() -> None: # May or may not parse correctly in SimpleCookie # Our parser should accept them consistently - result = parse_cookie_headers([header]) + result = parse_set_cookie_headers([header]) assert len(result) == 1 assert result[0][0] == expected_name assert result[0][1].value == expected_value -def test_parse_cookie_headers_case_insensitive_attrs() -> None: +def test_parse_set_cookie_headers_case_insensitive_attrs() -> None: """Test that known attributes are handled case-insensitively.""" headers = [ "cookie1=value1; PATH=/test; DOMAIN=example.com", "cookie2=value2; Secure; HTTPONLY; max-AGE=60", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 @@ -347,13 +348,13 @@ def test_parse_cookie_headers_case_insensitive_attrs() -> None: assert result[1][1]["max-age"] == "60" -def test_parse_cookie_headers_unknown_attrs_ignored() -> None: +def test_parse_set_cookie_headers_unknown_attrs_ignored() -> None: """Test that unknown attributes are treated as new cookies (same as SimpleCookie).""" headers = [ "cookie=value; Path=/; unknownattr=ignored; HttpOnly", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # SimpleCookie treats unknown attributes with values as new cookies assert len(result) == 2 @@ -369,8 +370,8 @@ def test_parse_cookie_headers_unknown_attrs_ignored() -> None: assert result[1][1]["httponly"] is True # HttpOnly applies to this cookie -def test_parse_cookie_headers_complex_real_world() -> None: - """Test parse_cookie_headers with complex real-world examples.""" +def test_parse_set_cookie_headers_complex_real_world() -> None: + """Test parse_set_cookie_headers with complex real-world examples.""" headers = [ # AWS ELB cookie "AWSELB=ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890; Path=/", @@ -380,7 +381,7 @@ def test_parse_cookie_headers_complex_real_world() -> None: "session_id=s%3AabcXYZ123.signature123; Path=/; Secure; HttpOnly; SameSite=Strict", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 3 @@ -396,7 +397,7 @@ def test_parse_cookie_headers_complex_real_world() -> None: assert session_morsel.get("samesite") == "Strict" -def test_parse_cookie_headers_boolean_attrs() -> None: +def test_parse_set_cookie_headers_boolean_attrs() -> None: """Test that boolean attributes (secure, httponly) work correctly.""" # Test secure attribute variations headers = [ @@ -405,7 +406,7 @@ def test_parse_cookie_headers_boolean_attrs() -> None: "cookie3=value3; Secure=true", # Non-standard but might occur ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 3 # All should have secure=True @@ -418,7 +419,7 @@ def test_parse_cookie_headers_boolean_attrs() -> None: "cookie5=value5; HttpOnly=", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 # All should have httponly=True @@ -426,7 +427,7 @@ def test_parse_cookie_headers_boolean_attrs() -> None: assert morsel.get("httponly") is True, f"{name} should have httponly=True" -def test_parse_cookie_headers_boolean_attrs_with_partitioned() -> None: +def test_parse_set_cookie_headers_boolean_attrs_with_partitioned() -> None: """Test that boolean attributes including partitioned work correctly.""" # Test secure attribute variations secure_headers = [ @@ -435,7 +436,7 @@ def test_parse_cookie_headers_boolean_attrs_with_partitioned() -> None: "cookie3=value3; Secure=true", # Non-standard but might occur ] - result = parse_cookie_headers(secure_headers) + result = parse_set_cookie_headers(secure_headers) assert len(result) == 3 for name, morsel in result: assert morsel.get("secure") is True, f"{name} should have secure=True" @@ -446,7 +447,7 @@ def test_parse_cookie_headers_boolean_attrs_with_partitioned() -> None: "cookie5=value5; HttpOnly=", ] - result = parse_cookie_headers(httponly_headers) + result = parse_set_cookie_headers(httponly_headers) assert len(result) == 2 for name, morsel in result: assert morsel.get("httponly") is True, f"{name} should have httponly=True" @@ -458,21 +459,21 @@ def test_parse_cookie_headers_boolean_attrs_with_partitioned() -> None: "cookie8=value8; Partitioned=yes", # Non-standard but might occur ] - result = parse_cookie_headers(partitioned_headers) + result = parse_set_cookie_headers(partitioned_headers) assert len(result) == 3 for name, morsel in result: assert morsel.get("partitioned") is True, f"{name} should have partitioned=True" -def test_parse_cookie_headers_encoded_values() -> None: - """Test that parse_cookie_headers preserves encoded values.""" +def test_parse_set_cookie_headers_encoded_values() -> None: + """Test that parse_set_cookie_headers preserves encoded values.""" headers = [ "encoded=hello%20world", "url=https%3A%2F%2Fexample.com%2Fpath", "special=%21%40%23%24%25%5E%26*%28%29", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 3 # Values should be preserved as-is (not decoded) @@ -481,9 +482,9 @@ def test_parse_cookie_headers_encoded_values() -> None: assert result[2][1].value == "%21%40%23%24%25%5E%26*%28%29" -def test_parse_cookie_headers_partitioned() -> None: +def test_parse_set_cookie_headers_partitioned() -> None: """ - Test that parse_cookie_headers handles partitioned attribute correctly. + Test that parse_set_cookie_headers handles partitioned attribute correctly. This tests the fix for issue #10380 - partitioned cookies support. The partitioned attribute is a boolean flag like secure and httponly. @@ -496,7 +497,7 @@ def test_parse_cookie_headers_partitioned() -> None: "cookie5=value5; Domain=.example.com; Path=/; Partitioned", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 5 @@ -517,7 +518,7 @@ def test_parse_cookie_headers_partitioned() -> None: assert result[4][1].get("path") == "/" -def test_parse_cookie_headers_partitioned_case_insensitive() -> None: +def test_parse_set_cookie_headers_partitioned_case_insensitive() -> None: """Test that partitioned attribute is recognized case-insensitively.""" headers = [ "cookie1=value1; partitioned", # lowercase @@ -526,7 +527,7 @@ def test_parse_cookie_headers_partitioned_case_insensitive() -> None: "cookie4=value4; PaRtItIoNeD", # mixed case ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 4 @@ -537,14 +538,14 @@ def test_parse_cookie_headers_partitioned_case_insensitive() -> None: ), f"Cookie {i+1} should have partitioned=True" -def test_parse_cookie_headers_partitioned_not_set() -> None: +def test_parse_set_cookie_headers_partitioned_not_set() -> None: """Test that cookies without partitioned attribute don't have it set.""" headers = [ "normal=value; Secure; HttpOnly", "regular=cookie; Path=/", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 @@ -554,7 +555,7 @@ def test_parse_cookie_headers_partitioned_not_set() -> None: # Tests that don't require partitioned support in SimpleCookie -def test_parse_cookie_headers_partitioned_with_other_attrs_manual() -> None: +def test_parse_set_cookie_headers_partitioned_with_other_attrs_manual() -> None: """ Test parsing logic for partitioned cookies combined with all other attributes. @@ -567,7 +568,7 @@ def test_parse_cookie_headers_partitioned_with_other_attrs_manual() -> None: # Test a simple case that won't trigger SimpleCookie errors headers = ["session=abc123; Secure; HttpOnly"] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 1 assert result[0][0] == "session" @@ -601,7 +602,7 @@ def test_cookie_pattern_matches_partitioned_attribute(test_string: str) -> None: assert match.group("key").lower() == "partitioned" -def test_parse_cookie_headers_issue_7993_double_quotes() -> None: +def test_parse_set_cookie_headers_issue_7993_double_quotes() -> None: """ Test that cookies with unmatched opening quotes don't break parsing of subsequent cookies. @@ -614,7 +615,7 @@ def test_parse_cookie_headers_issue_7993_double_quotes() -> None: # Test case from the issue headers = ['foo=bar; baz="qux; foo2=bar2'] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # Should parse all cookies correctly assert len(result) == 3 @@ -626,41 +627,41 @@ def test_parse_cookie_headers_issue_7993_double_quotes() -> None: assert result[2][1].value == "bar2" -def test_parse_cookie_headers_empty_headers() -> None: +def test_parse_set_cookie_headers_empty_headers() -> None: """Test handling of empty headers in the sequence.""" # Empty header should be skipped - result = parse_cookie_headers(["", "name=value"]) + result = parse_set_cookie_headers(["", "name=value"]) assert len(result) == 1 assert result[0][0] == "name" assert result[0][1].value == "value" # Multiple empty headers - result = parse_cookie_headers(["", "", ""]) + result = parse_set_cookie_headers(["", "", ""]) assert result == [] # Empty headers mixed with valid cookies - result = parse_cookie_headers(["", "a=1", "", "b=2", ""]) + result = parse_set_cookie_headers(["", "a=1", "", "b=2", ""]) assert len(result) == 2 assert result[0][0] == "a" assert result[1][0] == "b" -def test_parse_cookie_headers_invalid_cookie_syntax() -> None: +def test_parse_set_cookie_headers_invalid_cookie_syntax() -> None: """Test handling of invalid cookie syntax.""" # No valid cookie pattern - result = parse_cookie_headers(["@#$%^&*()"]) + result = parse_set_cookie_headers(["@#$%^&*()"]) assert result == [] # Cookie name without value - result = parse_cookie_headers(["name"]) + result = parse_set_cookie_headers(["name"]) assert result == [] # Multiple invalid patterns - result = parse_cookie_headers(["!!!!", "????", "name", "@@@"]) + result = parse_set_cookie_headers(["!!!!", "????", "name", "@@@"]) assert result == [] -def test_parse_cookie_headers_illegal_cookie_names( +def test_parse_set_cookie_headers_illegal_cookie_names( caplog: pytest.LogCaptureFixture, ) -> None: """ @@ -671,103 +672,105 @@ def test_parse_cookie_headers_illegal_cookie_names( logged when illegal names appear after a valid cookie. """ # Cookie name that is a known attribute (illegal) - parsing stops early - result = parse_cookie_headers(["path=value; domain=test"]) + result = parse_set_cookie_headers(["path=value; domain=test"]) assert result == [] # Cookie name that doesn't match the pattern - result = parse_cookie_headers(["=value"]) + result = parse_set_cookie_headers(["=value"]) assert result == [] # Valid cookie after illegal one - parsing stops at illegal - result = parse_cookie_headers(["domain=bad; good=value"]) + result = parse_set_cookie_headers(["domain=bad; good=value"]) assert result == [] # Illegal cookie name that appears after a valid cookie triggers warning - result = parse_cookie_headers(["good=value; Path=/; invalid,cookie=value;"]) + result = parse_set_cookie_headers(["good=value; Path=/; invalid,cookie=value;"]) assert len(result) == 1 assert result[0][0] == "good" assert "Illegal cookie name 'invalid,cookie'" in caplog.text -def test_parse_cookie_headers_attributes_before_cookie() -> None: +def test_parse_set_cookie_headers_attributes_before_cookie() -> None: """Test that attributes before any cookie are invalid.""" # Path attribute before cookie - result = parse_cookie_headers(["Path=/; name=value"]) + result = parse_set_cookie_headers(["Path=/; name=value"]) assert result == [] # Domain attribute before cookie - result = parse_cookie_headers(["Domain=.example.com; name=value"]) + result = parse_set_cookie_headers(["Domain=.example.com; name=value"]) assert result == [] # Multiple attributes before cookie - result = parse_cookie_headers(["Path=/; Domain=.example.com; Secure; name=value"]) + result = parse_set_cookie_headers( + ["Path=/; Domain=.example.com; Secure; name=value"] + ) assert result == [] -def test_parse_cookie_headers_attributes_without_values() -> None: +def test_parse_set_cookie_headers_attributes_without_values() -> None: """Test handling of attributes with missing values.""" # Boolean attribute without value (valid) - result = parse_cookie_headers(["name=value; Secure"]) + result = parse_set_cookie_headers(["name=value; Secure"]) assert len(result) == 1 assert result[0][1]["secure"] is True # Non-boolean attribute without value (invalid, stops parsing) - result = parse_cookie_headers(["name=value; Path"]) + result = parse_set_cookie_headers(["name=value; Path"]) assert len(result) == 1 # Path without value stops further attribute parsing # Multiple cookies, invalid attribute in middle - result = parse_cookie_headers(["name=value; Path; Secure"]) + result = parse_set_cookie_headers(["name=value; Path; Secure"]) assert len(result) == 1 # Secure is not parsed because Path without value stops parsing -def test_parse_cookie_headers_dollar_prefixed_names() -> None: +def test_parse_set_cookie_headers_dollar_prefixed_names() -> None: """Test handling of cookie names starting with $.""" # $Version without preceding cookie (ignored) - result = parse_cookie_headers(["$Version=1; name=value"]) + result = parse_set_cookie_headers(["$Version=1; name=value"]) assert len(result) == 1 assert result[0][0] == "name" # Multiple $ prefixed without cookie (all ignored) - result = parse_cookie_headers(["$Version=1; $Path=/; $Domain=.com; name=value"]) + result = parse_set_cookie_headers(["$Version=1; $Path=/; $Domain=.com; name=value"]) assert len(result) == 1 assert result[0][0] == "name" # $ prefix at start is ignored, cookie follows - result = parse_cookie_headers(["$Unknown=123; valid=cookie"]) + result = parse_set_cookie_headers(["$Unknown=123; valid=cookie"]) assert len(result) == 1 assert result[0][0] == "valid" -def test_parse_cookie_headers_dollar_attributes() -> None: +def test_parse_set_cookie_headers_dollar_attributes() -> None: """Test handling of $ prefixed attributes after cookies.""" # Test multiple $ attributes with cookie (case-insensitive like SimpleCookie) - result = parse_cookie_headers(["name=value; $Path=/test; $Domain=.example.com"]) + result = parse_set_cookie_headers(["name=value; $Path=/test; $Domain=.example.com"]) assert len(result) == 1 assert result[0][0] == "name" assert result[0][1]["path"] == "/test" assert result[0][1]["domain"] == ".example.com" # Test unknown $ attribute (should be ignored) - result = parse_cookie_headers(["name=value; $Unknown=test"]) + result = parse_set_cookie_headers(["name=value; $Unknown=test"]) assert len(result) == 1 assert result[0][0] == "name" # $Unknown should not be set # Test $ attribute with empty value - result = parse_cookie_headers(["name=value; $Path="]) + result = parse_set_cookie_headers(["name=value; $Path="]) assert len(result) == 1 assert result[0][1]["path"] == "" # Test case sensitivity compatibility with SimpleCookie - result = parse_cookie_headers(["test=value; $path=/lower; $PATH=/upper"]) + result = parse_set_cookie_headers(["test=value; $path=/lower; $PATH=/upper"]) assert len(result) == 1 # Last one wins, and it's case-insensitive assert result[0][1]["path"] == "/upper" -def test_parse_cookie_headers_attributes_after_illegal_cookie() -> None: +def test_parse_set_cookie_headers_attributes_after_illegal_cookie() -> None: """ Test that attributes after an illegal cookie name are handled correctly. @@ -775,25 +778,25 @@ def test_parse_cookie_headers_attributes_after_illegal_cookie() -> None: cookie name was encountered. """ # Illegal cookie followed by $ attribute - result = parse_cookie_headers(["good=value; invalid,cookie=bad; $Path=/test"]) + result = parse_set_cookie_headers(["good=value; invalid,cookie=bad; $Path=/test"]) assert len(result) == 1 assert result[0][0] == "good" # $Path should be ignored since current_morsel is None after illegal cookie # Illegal cookie followed by boolean attribute - result = parse_cookie_headers(["good=value; invalid,cookie=bad; HttpOnly"]) + result = parse_set_cookie_headers(["good=value; invalid,cookie=bad; HttpOnly"]) assert len(result) == 1 assert result[0][0] == "good" # HttpOnly should be ignored since current_morsel is None # Illegal cookie followed by regular attribute with value - result = parse_cookie_headers(["good=value; invalid,cookie=bad; Max-Age=3600"]) + result = parse_set_cookie_headers(["good=value; invalid,cookie=bad; Max-Age=3600"]) assert len(result) == 1 assert result[0][0] == "good" # Max-Age should be ignored since current_morsel is None # Multiple attributes after illegal cookie - result = parse_cookie_headers( + result = parse_set_cookie_headers( ["good=value; invalid,cookie=bad; $Path=/; HttpOnly; Max-Age=60; Domain=.com"] ) assert len(result) == 1 @@ -801,7 +804,7 @@ def test_parse_cookie_headers_attributes_after_illegal_cookie() -> None: # All attributes should be ignored after illegal cookie -def test_parse_cookie_headers_unmatched_quotes_compatibility() -> None: +def test_parse_set_cookie_headers_unmatched_quotes_compatibility() -> None: """ Test that most unmatched quote scenarios behave like SimpleCookie. @@ -823,7 +826,7 @@ def test_parse_cookie_headers_unmatched_quotes_compatibility() -> None: sc_cookies = list(sc.items()) # Test our parser behavior - result = parse_cookie_headers([header]) + result = parse_set_cookie_headers([header]) # Both should parse the same cookies (partial parsing) assert len(result) == len(sc_cookies), ( @@ -841,7 +844,7 @@ def test_parse_cookie_headers_unmatched_quotes_compatibility() -> None: assert len(sc) == 1 # Only cookie1 # Our parser handles it better - result = parse_cookie_headers([fixed_case]) + result = parse_set_cookie_headers([fixed_case]) assert len(result) == 3 # All three cookies assert result[0][0] == "cookie1" assert result[0][1].value == "value1" @@ -851,15 +854,15 @@ def test_parse_cookie_headers_unmatched_quotes_compatibility() -> None: assert result[2][1].value == "value3" -def test_parse_cookie_headers_expires_attribute() -> None: - """Test parse_cookie_headers handles expires attribute with date formats.""" +def test_parse_set_cookie_headers_expires_attribute() -> None: + """Test parse_set_cookie_headers handles expires attribute with date formats.""" headers = [ "session=abc; Expires=Wed, 09 Jun 2021 10:18:14 GMT", "user=xyz; expires=Wednesday, 09-Jun-21 10:18:14 GMT", "token=123; EXPIRES=Wed, 09 Jun 2021 10:18:14 GMT", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 3 for _, morsel in result: @@ -867,18 +870,18 @@ def test_parse_cookie_headers_expires_attribute() -> None: assert "GMT" in morsel["expires"] -def test_parse_cookie_headers_edge_cases() -> None: +def test_parse_set_cookie_headers_edge_cases() -> None: """Test various edge cases.""" # Very long cookie values long_value = "x" * 4096 - result = parse_cookie_headers([f"name={long_value}"]) + result = parse_set_cookie_headers([f"name={long_value}"]) assert len(result) == 1 assert result[0][1].value == long_value -def test_parse_cookie_headers_various_date_formats_issue_4327() -> None: +def test_parse_set_cookie_headers_various_date_formats_issue_4327() -> None: """ - Test that parse_cookie_headers handles various date formats per RFC 6265. + Test that parse_set_cookie_headers handles various date formats per RFC 6265. This tests the fix for issue #4327 - support for RFC 822, RFC 850, and ANSI C asctime() date formats in cookie expiration. @@ -899,7 +902,7 @@ def test_parse_cookie_headers_various_date_formats_issue_4327() -> None: "cookie7=value7; Expires=Tue, 01-Jan-30 00:00:00 GMT", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # All cookies should be parsed assert len(result) == 7 @@ -923,7 +926,7 @@ def test_parse_cookie_headers_various_date_formats_issue_4327() -> None: assert morsel.get("expires") == exp_expires -def test_parse_cookie_headers_ansi_c_asctime_format() -> None: +def test_parse_set_cookie_headers_ansi_c_asctime_format() -> None: """ Test parsing of ANSI C asctime() format. @@ -932,7 +935,7 @@ def test_parse_cookie_headers_ansi_c_asctime_format() -> None: """ headers = ["cookie1=value1; Expires=Wed Jun 9 10:18:14 2021"] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # Should parse correctly with the expires attribute preserved assert len(result) == 1 @@ -941,9 +944,9 @@ def test_parse_cookie_headers_ansi_c_asctime_format() -> None: assert result[0][1]["expires"] == "Wed Jun 9 10:18:14 2021" -def test_parse_cookie_headers_rfc2822_timezone_issue_4493() -> None: +def test_parse_set_cookie_headers_rfc2822_timezone_issue_4493() -> None: """ - Test that parse_cookie_headers handles RFC 2822 timezone formats. + Test that parse_set_cookie_headers handles RFC 2822 timezone formats. This tests the fix for issue #4493 - support for RFC 2822-compliant dates with timezone offsets like -0000, +0100, etc. @@ -960,7 +963,7 @@ def test_parse_cookie_headers_rfc2822_timezone_issue_4493() -> None: "classic=cookie; expires=Sat, 03 Apr 2026 12:00:00 GMT", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) # All cookies should be parsed assert len(result) == 4 @@ -983,14 +986,14 @@ def test_parse_cookie_headers_rfc2822_timezone_issue_4493() -> None: assert result[3][1]["expires"] == "Sat, 03 Apr 2026 12:00:00 GMT" -def test_parse_cookie_headers_rfc2822_with_attributes() -> None: +def test_parse_set_cookie_headers_rfc2822_with_attributes() -> None: """Test that RFC 2822 dates work correctly with other cookie attributes.""" headers = [ "session=abc123; expires=Wed, 15 Jan 2020 09:45:07 -0000; Path=/; HttpOnly; Secure", "token=xyz789; expires=Thu, 01 Feb 2024 14:30:00 +0100; Domain=.example.com; SameSite=Strict", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 @@ -1010,14 +1013,14 @@ def test_parse_cookie_headers_rfc2822_with_attributes() -> None: assert result[1][1]["samesite"] == "Strict" -def test_parse_cookie_headers_date_formats_with_attributes() -> None: +def test_parse_set_cookie_headers_date_formats_with_attributes() -> None: """Test that date formats work correctly with other cookie attributes.""" headers = [ "session=abc123; Expires=Wed, 09 Jun 2030 10:18:14 GMT; Path=/; HttpOnly; Secure", "token=xyz789; Expires=Wednesday, 09-Jun-30 10:18:14 GMT; Domain=.example.com; SameSite=Strict", ] - result = parse_cookie_headers(headers) + result = parse_set_cookie_headers(headers) assert len(result) == 2 @@ -1037,6 +1040,350 @@ def test_parse_cookie_headers_date_formats_with_attributes() -> None: assert result[1][1]["samesite"] == "Strict" +@pytest.mark.parametrize( + ("header", "expected_name", "expected_value", "expected_coded"), + [ + # Test cookie values with octal escape sequences + (r'name="\012newline\012"', "name", "\nnewline\n", r'"\012newline\012"'), + ( + r'tab="\011separated\011values"', + "tab", + "\tseparated\tvalues", + r'"\011separated\011values"', + ), + ( + r'mixed="hello\040world\041"', + "mixed", + "hello world!", + r'"hello\040world\041"', + ), + ( + r'complex="\042quoted\042 text with \012 newline"', + "complex", + '"quoted" text with \n newline', + r'"\042quoted\042 text with \012 newline"', + ), + ], +) +def test_parse_set_cookie_headers_uses_unquote_with_octal( + header: str, expected_name: str, expected_value: str, expected_coded: str +) -> None: + """Test that parse_set_cookie_headers correctly unquotes values with octal sequences and preserves coded_value.""" + result = parse_set_cookie_headers([header]) + + assert len(result) == 1 + name, morsel = result[0] + + # Check that octal sequences were properly decoded in the value + assert name == expected_name + assert morsel.value == expected_value + + # Check that coded_value preserves the original quoted string + assert morsel.coded_value == expected_coded + + +# Tests for parse_cookie_header (RFC 6265 compliant Cookie header parser) + + +def test_parse_cookie_header_simple() -> None: + """Test parse_cookie_header with simple cookies.""" + header = "name=value; session=abc123" + + result = parse_cookie_header(header) + + assert len(result) == 2 + assert result[0][0] == "name" + assert result[0][1].value == "value" + assert result[1][0] == "session" + assert result[1][1].value == "abc123" + + +def test_parse_cookie_header_empty() -> None: + """Test parse_cookie_header with empty header.""" + assert parse_cookie_header("") == [] + assert parse_cookie_header(" ") == [] + + +def test_parse_cookie_header_quoted_values() -> None: + """Test parse_cookie_header handles quoted values correctly.""" + header = 'name="quoted value"; session="with;semicolon"; data="with\\"escaped\\""' + + result = parse_cookie_header(header) + + assert len(result) == 3 + assert result[0][0] == "name" + assert result[0][1].value == "quoted value" + assert result[1][0] == "session" + assert result[1][1].value == "with;semicolon" + assert result[2][0] == "data" + assert result[2][1].value == 'with"escaped"' + + +def test_parse_cookie_header_special_chars() -> None: + """Test parse_cookie_header accepts special characters in names.""" + header = ( + "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}=value1; cookie[index]=value2" + ) + + result = parse_cookie_header(header) + + assert len(result) == 2 + assert result[0][0] == "ISAWPLB{A7F52349-3531-4DA9-8776-F74BC6F4F1BB}" + assert result[0][1].value == "value1" + assert result[1][0] == "cookie[index]" + assert result[1][1].value == "value2" + + +def test_parse_cookie_header_invalid_names() -> None: + """Test parse_cookie_header rejects invalid cookie names.""" + # Invalid names with control characters + header = "invalid\tcookie=value; valid=cookie; invalid\ncookie=bad" + + result = parse_cookie_header(header) + + # Parse_cookie_header uses same regex as parse_set_cookie_headers + # Tab and newline are treated as separators, not part of names + assert len(result) == 5 + assert result[0][0] == "invalid" + assert result[0][1].value == "" + assert result[1][0] == "cookie" + assert result[1][1].value == "value" + assert result[2][0] == "valid" + assert result[2][1].value == "cookie" + assert result[3][0] == "invalid" + assert result[3][1].value == "" + assert result[4][0] == "cookie" + assert result[4][1].value == "bad" + + +def test_parse_cookie_header_no_attributes() -> None: + """Test parse_cookie_header treats all pairs as cookies (no attributes).""" + # In Cookie headers, even reserved attribute names are treated as cookies + header = ( + "session=abc123; path=/test; domain=.example.com; secure=yes; httponly=true" + ) + + result = parse_cookie_header(header) + + assert len(result) == 5 + assert result[0][0] == "session" + assert result[0][1].value == "abc123" + assert result[1][0] == "path" + assert result[1][1].value == "/test" + assert result[2][0] == "domain" + assert result[2][1].value == ".example.com" + assert result[3][0] == "secure" + assert result[3][1].value == "yes" + assert result[4][0] == "httponly" + assert result[4][1].value == "true" + + +def test_parse_cookie_header_empty_value() -> None: + """Test parse_cookie_header with empty cookie values.""" + header = "empty=; name=value; also_empty=" + + result = parse_cookie_header(header) + + assert len(result) == 3 + assert result[0][0] == "empty" + assert result[0][1].value == "" + assert result[1][0] == "name" + assert result[1][1].value == "value" + assert result[2][0] == "also_empty" + assert result[2][1].value == "" + + +def test_parse_cookie_header_spaces() -> None: + """Test parse_cookie_header handles spaces correctly.""" + header = "name1=value1 ; name2=value2 ; name3=value3" + + result = parse_cookie_header(header) + + assert len(result) == 3 + assert result[0][0] == "name1" + assert result[0][1].value == "value1" + assert result[1][0] == "name2" + assert result[1][1].value == "value2" + assert result[2][0] == "name3" + assert result[2][1].value == "value3" + + +def test_parse_cookie_header_encoded_values() -> None: + """Test parse_cookie_header preserves encoded values.""" + header = "encoded=hello%20world; url=https%3A%2F%2Fexample.com" + + result = parse_cookie_header(header) + + assert len(result) == 2 + assert result[0][0] == "encoded" + assert result[0][1].value == "hello%20world" + assert result[1][0] == "url" + assert result[1][1].value == "https%3A%2F%2Fexample.com" + + +def test_parse_cookie_header_malformed() -> None: + """Test parse_cookie_header handles malformed input.""" + # Missing value + header = "name1=value1; justname; name2=value2" + + result = parse_cookie_header(header) + + # Parser accepts cookies without values (empty value) + assert len(result) == 3 + assert result[0][0] == "name1" + assert result[0][1].value == "value1" + assert result[1][0] == "justname" + assert result[1][1].value == "" + assert result[2][0] == "name2" + assert result[2][1].value == "value2" + + # Missing name + header = "=value; name=value2" + result = parse_cookie_header(header) + assert len(result) == 2 + assert result[0][0] == "=value" + assert result[0][1].value == "" + assert result[1][0] == "name" + assert result[1][1].value == "value2" + + +def test_parse_cookie_header_complex_quoted() -> None: + """Test parse_cookie_header with complex quoted values.""" + header = 'session="abc;xyz"; data="value;with;multiple;semicolons"; simple=unquoted' + + result = parse_cookie_header(header) + + assert len(result) == 3 + assert result[0][0] == "session" + assert result[0][1].value == "abc;xyz" + assert result[1][0] == "data" + assert result[1][1].value == "value;with;multiple;semicolons" + assert result[2][0] == "simple" + assert result[2][1].value == "unquoted" + + +def test_parse_cookie_header_unmatched_quotes() -> None: + """Test parse_cookie_header handles unmatched quotes.""" + header = 'cookie1=value1; cookie2="unmatched; cookie3=value3' + + result = parse_cookie_header(header) + + # Should parse all cookies correctly + assert len(result) == 3 + assert result[0][0] == "cookie1" + assert result[0][1].value == "value1" + assert result[1][0] == "cookie2" + assert result[1][1].value == '"unmatched' + assert result[2][0] == "cookie3" + assert result[2][1].value == "value3" + + +def test_parse_cookie_header_vs_parse_set_cookie_headers() -> None: + """Test difference between parse_cookie_header and parse_set_cookie_headers.""" + # Cookie header with attribute-like pairs + cookie_header = "session=abc123; path=/test; secure=yes" + + # parse_cookie_header treats all as cookies + cookie_result = parse_cookie_header(cookie_header) + assert len(cookie_result) == 3 + assert cookie_result[0][0] == "session" + assert cookie_result[0][1].value == "abc123" + assert cookie_result[1][0] == "path" + assert cookie_result[1][1].value == "/test" + assert cookie_result[2][0] == "secure" + assert cookie_result[2][1].value == "yes" + + # parse_set_cookie_headers would treat path and secure as attributes + set_cookie_result = parse_set_cookie_headers([cookie_header]) + assert len(set_cookie_result) == 1 + assert set_cookie_result[0][0] == "session" + assert set_cookie_result[0][1].value == "abc123" + assert set_cookie_result[0][1]["path"] == "/test" + # secure with any value is treated as boolean True + assert set_cookie_result[0][1]["secure"] is True + + +def test_parse_cookie_header_compatibility_with_simple_cookie() -> None: + """Test parse_cookie_header output works with SimpleCookie.""" + header = "session=abc123; user=john; token=xyz789" + + # Parse with our function + parsed = parse_cookie_header(header) + + # Create SimpleCookie and update with our results + sc = SimpleCookie() + sc.update(parsed) + + # Verify all cookies are present + assert len(sc) == 3 + assert sc["session"].value == "abc123" + assert sc["user"].value == "john" + assert sc["token"].value == "xyz789" + + +def test_parse_cookie_header_real_world_examples() -> None: + """Test parse_cookie_header with real-world Cookie headers.""" + # Google Analytics style + header = "_ga=GA1.2.1234567890.1234567890; _gid=GA1.2.0987654321.0987654321" + result = parse_cookie_header(header) + assert len(result) == 2 + assert result[0][0] == "_ga" + assert result[0][1].value == "GA1.2.1234567890.1234567890" + assert result[1][0] == "_gid" + assert result[1][1].value == "GA1.2.0987654321.0987654321" + + # Session cookies + header = "PHPSESSID=abc123def456; csrf_token=xyz789; logged_in=true" + result = parse_cookie_header(header) + assert len(result) == 3 + assert result[0][0] == "PHPSESSID" + assert result[0][1].value == "abc123def456" + assert result[1][0] == "csrf_token" + assert result[1][1].value == "xyz789" + assert result[2][0] == "logged_in" + assert result[2][1].value == "true" + + # Complex values with proper quoting + header = r'preferences="{\"theme\":\"dark\",\"lang\":\"en\"}"; session_data=eyJhbGciOiJIUzI1NiJ9' + result = parse_cookie_header(header) + assert len(result) == 2 + assert result[0][0] == "preferences" + assert result[0][1].value == '{"theme":"dark","lang":"en"}' + assert result[1][0] == "session_data" + assert result[1][1].value == "eyJhbGciOiJIUzI1NiJ9" + + +def test_parse_cookie_header_issue_7993() -> None: + """Test parse_cookie_header handles issue #7993 correctly.""" + # This specific case from issue #7993 + header = 'foo=bar; baz="qux; foo2=bar2' + + result = parse_cookie_header(header) + + # All cookies should be parsed + assert len(result) == 3 + assert result[0][0] == "foo" + assert result[0][1].value == "bar" + assert result[1][0] == "baz" + assert result[1][1].value == '"qux' + assert result[2][0] == "foo2" + assert result[2][1].value == "bar2" + + +def test_parse_cookie_header_illegal_names(caplog: pytest.LogCaptureFixture) -> None: + """Test parse_cookie_header warns about illegal cookie names.""" + # Cookie name with comma (not allowed in _COOKIE_NAME_RE) + header = "good=value; invalid,cookie=bad; another=test" + result = parse_cookie_header(header) + # Should skip the invalid cookie but continue parsing + assert len(result) == 2 + assert result[0][0] == "good" + assert result[0][1].value == "value" + assert result[1][0] == "another" + assert result[1][1].value == "test" + assert "Can not load cookie: Illegal cookie name 'invalid,cookie'" in caplog.text + + @pytest.mark.parametrize( ("input_str", "expected"), [ @@ -1225,45 +1572,3 @@ def test_unquote_compatibility_with_simplecookie(test_value: str) -> None: f"our={_unquote(test_value)!r}, " f"SimpleCookie={simplecookie_unquote(test_value)!r}" ) - - -@pytest.mark.parametrize( - ("header", "expected_name", "expected_value", "expected_coded"), - [ - # Test cookie values with octal escape sequences - (r'name="\012newline\012"', "name", "\nnewline\n", r'"\012newline\012"'), - ( - r'tab="\011separated\011values"', - "tab", - "\tseparated\tvalues", - r'"\011separated\011values"', - ), - ( - r'mixed="hello\040world\041"', - "mixed", - "hello world!", - r'"hello\040world\041"', - ), - ( - r'complex="\042quoted\042 text with \012 newline"', - "complex", - '"quoted" text with \n newline', - r'"\042quoted\042 text with \012 newline"', - ), - ], -) -def test_parse_cookie_headers_uses_unquote_with_octal( - header: str, expected_name: str, expected_value: str, expected_coded: str -) -> None: - """Test that parse_cookie_headers correctly unquotes values with octal sequences and preserves coded_value.""" - result = parse_cookie_headers([header]) - - assert len(result) == 1 - name, morsel = result[0] - - # Check that octal sequences were properly decoded in the value - assert name == expected_name - assert morsel.value == expected_value - - # Check that coded_value preserves the original quoted string - assert morsel.coded_value == expected_coded diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 758b8b1f98a..e706e18dee5 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -431,27 +431,35 @@ def test_request_cookies_quoted_values() -> None: def test_request_cookies_with_attributes() -> None: - """Test that cookie attributes don't affect value parsing. + """Test that cookie attributes are parsed as cookies per RFC 6265. - Related to issue #5397 - ensures that the presence of domain or other - attributes doesn't change how cookie values are parsed. + Per RFC 6265 Section 5.4, Cookie headers contain only name-value pairs. + Names that match attribute names (Domain, Path, etc.) should be treated + as regular cookies, not as attributes. """ - # Cookie with domain attribute - quotes should still be removed + # Cookie with domain - both should be parsed as cookies headers = CIMultiDict(COOKIE='sess="quoted_value"; Domain=.example.com') req = make_mocked_request("GET", "/", headers=headers) - assert req.cookies == {"sess": "quoted_value"} + assert req.cookies == {"sess": "quoted_value", "Domain": ".example.com"} - # Cookie with multiple attributes + # Cookie with multiple attribute names - all parsed as cookies headers = CIMultiDict(COOKIE='token="abc123"; Path=/; Secure; HttpOnly') req = make_mocked_request("GET", "/", headers=headers) - assert req.cookies == {"token": "abc123"} + assert req.cookies == {"token": "abc123", "Path": "/", "Secure": "", "HttpOnly": ""} - # Multiple cookies with different attributes + # Multiple cookies with attribute names mixed in headers = CIMultiDict( COOKIE='c1="v1"; Domain=.example.com; c2="v2"; Path=/api; c3=v3; Secure' ) req = make_mocked_request("GET", "/", headers=headers) - assert req.cookies == {"c1": "v1", "c2": "v2", "c3": "v3"} + assert req.cookies == { + "c1": "v1", + "Domain": ".example.com", + "c2": "v2", + "Path": "/api", + "c3": "v3", + "Secure": "", + } def test_match_info() -> None: From a15febd4bd2c5d17a35aa11b00d12087e5089871 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Mon, 9 Jun 2025 23:39:07 -0500 Subject: [PATCH 1498/1511] Release 3.12.12 (#11183) --- CHANGES.rst | 26 ++++++++++++++++++++++++++ CHANGES/11173.bugfix.rst | 1 - CHANGES/11178.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 4 files changed, 27 insertions(+), 3 deletions(-) delete mode 100644 CHANGES/11173.bugfix.rst delete mode 100644 CHANGES/11178.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 859cfd1f6b0..c554d11ecab 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,32 @@ .. towncrier release notes start +3.12.12 (2025-06-09) +==================== + +Bug fixes +--------- + +- Fixed cookie unquoting to properly handle octal escape sequences in cookie values (e.g., ``\012`` for newline) by vendoring the correct ``_unquote`` implementation from Python's ``http.cookies`` module -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11173`. + + + +- Fixed ``Cookie`` header parsing to treat attribute names as regular cookies per :rfc:`6265#section-5.4` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11178`. + + + + +---- + + 3.12.11 (2025-06-07) ==================== diff --git a/CHANGES/11173.bugfix.rst b/CHANGES/11173.bugfix.rst deleted file mode 100644 index 9214080d267..00000000000 --- a/CHANGES/11173.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed cookie unquoting to properly handle octal escape sequences in cookie values (e.g., ``\012`` for newline) by vendoring the correct ``_unquote`` implementation from Python's ``http.cookies`` module -- by :user:`bdraco`. diff --git a/CHANGES/11178.bugfix.rst b/CHANGES/11178.bugfix.rst deleted file mode 100644 index dc74cddde06..00000000000 --- a/CHANGES/11178.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``Cookie`` header parsing to treat attribute names as regular cookies per :rfc:`6265#section-5.4` -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 3f9b1234cb3..cf39af0d8eb 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.12.dev0" +__version__ = "3.12.12" from typing import TYPE_CHECKING, Tuple From 940480842baa3fb2dd6833cd879d5798be7750d2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Tue, 10 Jun 2025 00:27:00 -0500 Subject: [PATCH 1499/1511] Increment version to 3.12.13.dev0 (#11184) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index cf39af0d8eb..76d46504b81 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.12" +__version__ = "3.12.13.dev0" from typing import TYPE_CHECKING, Tuple From bd374b177ba0d13734ab240ba4fd5d229ea8ba05 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Jun 2025 21:19:31 +0000 Subject: [PATCH 1500/1511] [PR #11193/43ea3b1d backport][3.12] Pin CI Python version to 3.13.3 (#11194) Co-authored-by: J. Nick Koston <nick@koston.org> --- .github/workflows/ci-cd.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 1cae0bd57fe..2f0957306cd 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -122,7 +122,11 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.9, '3.10', '3.11', '3.12', '3.13'] + # Note that 3.13.4 is broken on Windows which + # is why 3.13.5 was rushed out. When 3.13.5 is fully + # available, we can remove 3.13.4 from the matrix + # and switch it back to 3.13 + pyver: [3.9, '3.10', '3.11', '3.12', '3.13.3'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] From 2d4a28b344d58f1087da9dbcd7260b0bf401a9c6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Jun 2025 21:44:26 +0000 Subject: [PATCH 1501/1511] [PR #11192/b888dc5c backport][3.12] Add warning to release notes about the quote_cookie fix (#11196) Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index c554d11ecab..713ded10177 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -153,6 +153,14 @@ Features 3.12.7 (2025-06-02) =================== +.. warning:: + + This release fixes an issue where the ``quote_cookie`` parameter was not being properly + respected for shared cookies (domain="", path=""). If your server does not handle quoted + cookies correctly, you may need to disable cookie quoting by setting ``quote_cookie=False`` + when creating your :class:`~aiohttp.ClientSession` or :class:`~aiohttp.CookieJar`. + See :ref:`aiohttp-client-cookie-quoting-routine` for details. + Bug fixes --------- From fc9b7208836db502afa6d3d7ba99cb31ae972166 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 12 Jun 2025 21:47:54 +0000 Subject: [PATCH 1502/1511] [PR #11198/b151d3fc backport][3.12] Fix auto-created TCPConnector not using session's event loop (#11199) Co-authored-by: J. Nick Koston <nick@koston.org> Fixes #11147 --- CHANGES/11147.bugfix.rst | 1 + aiohttp/client.py | 4 +++- tests/test_client_session.py | 20 ++++++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 CHANGES/11147.bugfix.rst diff --git a/CHANGES/11147.bugfix.rst b/CHANGES/11147.bugfix.rst new file mode 100644 index 00000000000..ab88541fcac --- /dev/null +++ b/CHANGES/11147.bugfix.rst @@ -0,0 +1 @@ +Fixed auto-created :py:class:`~aiohttp.TCPConnector` not using the session's event loop when :py:class:`~aiohttp.ClientSession` is created without an explicit connector -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index ce95e5cb39e..26492cd15fe 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -369,7 +369,9 @@ def __init__( ) if connector is None: - connector = TCPConnector(ssl_shutdown_timeout=ssl_shutdown_timeout) + connector = TCPConnector( + loop=loop, ssl_shutdown_timeout=ssl_shutdown_timeout + ) if connector._loop is not loop: raise RuntimeError("Session and connector has to use same event loop") diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 754cac1b47e..c296c9670b0 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -422,6 +422,26 @@ async def make_sess(): another_loop.run_until_complete(connector.close()) +def test_auto_created_connector_uses_session_loop( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test that auto-created TCPConnector uses the session's loop.""" + # Create a ClientSession without providing a connector + # The session should auto-create a TCPConnector with the provided loop + session = ClientSession(loop=loop) + + # Verify the connector was created + assert session.connector is not None + assert isinstance(session.connector, TCPConnector) + + # Verify the connector uses the same loop as the session + assert session.connector._loop is loop + assert session.connector._loop is session._loop + + # Clean up + loop.run_until_complete(session.close()) + + def test_detach(loop, session) -> None: conn = session.connector try: From 2ff9b615c8bf0758b496b830438d8e0f11f4f515 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 14 Jun 2025 08:41:27 -0500 Subject: [PATCH 1503/1511] Release 3.12.13 (#11214) --- CHANGES.rst | 18 ++++++++++++++++++ CHANGES/11147.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/11147.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 713ded10177..d991d99cf5d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,24 @@ .. towncrier release notes start +3.12.13 (2025-06-14) +==================== + +Bug fixes +--------- + +- Fixed auto-created :py:class:`~aiohttp.TCPConnector` not using the session's event loop when :py:class:`~aiohttp.ClientSession` is created without an explicit connector -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11147`. + + + + +---- + + 3.12.12 (2025-06-09) ==================== diff --git a/CHANGES/11147.bugfix.rst b/CHANGES/11147.bugfix.rst deleted file mode 100644 index ab88541fcac..00000000000 --- a/CHANGES/11147.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed auto-created :py:class:`~aiohttp.TCPConnector` not using the session's event loop when :py:class:`~aiohttp.ClientSession` is created without an explicit connector -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 76d46504b81..58ef7a9a565 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.13.dev0" +__version__ = "3.12.13" from typing import TYPE_CHECKING, Tuple From 4ad78b3d31bde9fdab22aac2692247f9746e8b48 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" <nick@koston.org> Date: Sat, 14 Jun 2025 10:48:08 -0500 Subject: [PATCH 1504/1511] Increment version to 3.12.14.dev0 (#11216) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 58ef7a9a565..cc73fcc2c8e 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.13" +__version__ = "3.12.14.dev0" from typing import TYPE_CHECKING, Tuple From 133e2542d0174691f6956e84b6ccdc7fe2bd03e9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 3 Jul 2025 21:21:54 +0100 Subject: [PATCH 1505/1511] [PR #11234/a83597fa backport][3.12] Document exceptions raised by send_frame et al. (#11262) **This is a backport of PR #11234 as merged into master (a83597fa88be7ac7dd5f6081d236d751cb40fe4d).** Co-authored-by: Jonathan Ehwald <github@ehwald.info> --- CHANGES/11234.doc.rst | 2 ++ docs/web_reference.rst | 24 +++++++++++++++++++----- 2 files changed, 21 insertions(+), 5 deletions(-) create mode 100644 CHANGES/11234.doc.rst diff --git a/CHANGES/11234.doc.rst b/CHANGES/11234.doc.rst new file mode 100644 index 00000000000..900b56a771c --- /dev/null +++ b/CHANGES/11234.doc.rst @@ -0,0 +1,2 @@ +Clarified exceptions raised by ``WebSocketResponse.send_frame`` at al. +-- by :user:`DoctorJohn`. diff --git a/docs/web_reference.rst b/docs/web_reference.rst index bcf20817aab..2d1882da17c 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1118,7 +1118,9 @@ and :ref:`aiohttp-web-signals` handlers:: :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. - :raise RuntimeError: if connections is not started or closing. + :raise RuntimeError: if the connections is not started. + + :raise aiohttp.ClientConnectionResetError: if the connection is closing. .. versionchanged:: 3.0 @@ -1133,7 +1135,9 @@ and :ref:`aiohttp-web-signals` handlers:: :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. - :raise RuntimeError: if connections is not started or closing. + :raise RuntimeError: if the connections is not started. + + :raise aiohttp.ClientConnectionResetError: if the connection is closing. .. versionchanged:: 3.0 @@ -1150,10 +1154,12 @@ and :ref:`aiohttp-web-signals` handlers:: single message, ``None`` for not overriding per-socket setting. - :raise RuntimeError: if connection is not started or closing + :raise RuntimeError: if the connection is not started. :raise TypeError: if data is not :class:`str` + :raise aiohttp.ClientConnectionResetError: if the connection is closing. + .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, @@ -1170,11 +1176,13 @@ and :ref:`aiohttp-web-signals` handlers:: single message, ``None`` for not overriding per-socket setting. - :raise RuntimeError: if connection is not started or closing + :raise RuntimeError: if the connection is not started. :raise TypeError: if data is not :class:`bytes`, :class:`bytearray` or :class:`memoryview`. + :raise aiohttp.ClientConnectionResetError: if the connection is closing. + .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, @@ -1195,12 +1203,14 @@ and :ref:`aiohttp-web-signals` handlers:: returns a JSON string (:func:`json.dumps` by default). - :raise RuntimeError: if connection is not started or closing + :raise RuntimeError: if the connection is not started. :raise ValueError: if data is not serializable object :raise TypeError: if value returned by ``dumps`` param is not :class:`str` + :raise aiohttp.ClientConnectionResetError: if the connection is closing. + .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, @@ -1230,6 +1240,10 @@ and :ref:`aiohttp-web-signals` handlers:: single message, ``None`` for not overriding per-socket setting. + :raise RuntimeError: if the connection is not started. + + :raise aiohttp.ClientConnectionResetError: if the connection is closing. + .. versionadded:: 3.11 .. method:: close(*, code=WSCloseCode.OK, message=b'', drain=True) From ce3c0a718c6bcec48fbbf3c656cc954b001d4cd4 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Mon, 7 Jul 2025 20:16:28 +0100 Subject: [PATCH 1506/1511] Bump aiosignal from 1.3.2 to 1.4.0 (#11267) (#11279) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [aiosignal](https://github.com/aio-libs/aiosignal) from 1.3.2 to 1.4.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiosignal/releases">aiosignal's releases</a>.</em></p> <blockquote> <h2>1.4.0</h2> <h2>Features</h2> <ul> <li> <p>Added decorator functionality to <code>Signal</code> as a convenient way to add a callback -- by <code>@Vizonex</code>. <code>[#699](https://github.com/aio-libs/aiosignal/issues/699) <https://github.com/aio-libs/aiosignal/pulls/699></code>_</p> </li> <li> <p>Improved type safety by allowing callback parameters to be type checked (typing-extensions is now required for Python <3.13). Parameters for a <code>Signal</code> callback should now be defined like <code>Signal[int, str]</code> -- by <a href="https://github.com/Vizonex"><code>@​Vizonex</code></a> and <a href="https://github.com/Dreamsorcerer"><code>@​Dreamsorcerer</code></a>. <code>[#699](https://github.com/aio-libs/aiosignal/issues/699) <https://github.com/aio-libs/aiosignal/pulls/699></code><em>, <code>[#710](https://github.com/aio-libs/aiosignal/issues/710) <https://github.com/aio-libs/aiosignal/pulls/710></code></em></p> </li> </ul> <h2>Misc</h2> <ul> <li>Removed the sphinxcontrib-asyncio documentation dependency. <code>[#528](https://github.com/aio-libs/aiosignal/issues/528) <https://github.com/aio-libs/aiosignal/pull/528></code>_</li> </ul> <hr /> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/aio-libs/aiosignal/blob/master/CHANGES.rst">aiosignal's changelog</a>.</em></p> <blockquote> <h1>1.4.0 (2025-07-03)</h1> <h2>Features</h2> <ul> <li> <p>Added decorator functionality to <code>Signal</code> as a convenient way to add a callback -- by <code>@Vizonex</code>. <code>[#699](https://github.com/aio-libs/aiosignal/issues/699) <https://github.com/aio-libs/aiosignal/pulls/699></code>_</p> </li> <li> <p>Improved type safety by allowing callback parameters to be type checked (typing-extensions is now required for Python <3.13). Parameters for a <code>Signal</code> callback should now be defined like <code>Signal[int, str]</code> -- by <a href="https://github.com/Vizonex"><code>@​Vizonex</code></a> and <a href="https://github.com/Dreamsorcerer"><code>@​Dreamsorcerer</code></a>. <code>[#699](https://github.com/aio-libs/aiosignal/issues/699) <https://github.com/aio-libs/aiosignal/pulls/699></code><em>, <code>[#710](https://github.com/aio-libs/aiosignal/issues/710) <https://github.com/aio-libs/aiosignal/pulls/710></code></em></p> </li> </ul> <h2>Misc</h2> <ul> <li>Removed the sphinxcontrib-asyncio documentation dependency. <code>[#528](https://github.com/aio-libs/aiosignal/issues/528) <https://github.com/aio-libs/aiosignal/pull/528></code>_</li> </ul> <hr /> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/aio-libs/aiosignal/commit/1cf80149c869d410c90a58e85e9c703be6ef8692"><code>1cf8014</code></a> Fix deploy</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/892494c5c72553e281e57e924cf055e6125fb0fc"><code>892494c</code></a> Release v1.4 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/718">#718</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/fa360821631383aed6a534da6f73ad149ec666f7"><code>fa36082</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/719">#719</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/b7f68f12fed7c4b84a6866984e7adafcd18fe265"><code>b7f68f1</code></a> [pre-commit.ci] pre-commit autoupdate (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/717">#717</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/2b1acac380eaaa91a643bfc9c24fa1f3942f0d45"><code>2b1acac</code></a> Build(deps): Bump sigstore/gh-action-sigstore-python from 3.0.0 to 3.0.1 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/716">#716</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/17456ed5f968c0001a8823c2076f999fbf448157"><code>17456ed</code></a> Build(deps): Bump tox from 4.26.0 to 4.27.0 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/715">#715</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/4c236903da2e71b85652c07bed69013189a39406"><code>4c23690</code></a> Build(deps): Bump pytest from 8.4.0 to 8.4.1 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/714">#714</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/7be2f6833be08bb14207bc627f9931665cd947ce"><code>7be2f68</code></a> Build(deps): Bump mypy from 1.16.0 to 1.16.1 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/713">#713</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/5d62945d07c9413720e968cc3f25c66307d9a337"><code>5d62945</code></a> Build(deps): Bump coverage from 7.9.0 to 7.9.1 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/712">#712</a>)</li> <li><a href="https://github.com/aio-libs/aiosignal/commit/a6d85c1c3430621814d6163ea442828e7f31b34b"><code>a6d85c1</code></a> Build(deps): Bump dependabot/fetch-metadata from 2.3.0 to 2.4.0 (<a href="https://redirect.github.com/aio-libs/aiosignal/issues/694">#694</a>)</li> <li>Additional commits viewable in <a href="https://github.com/aio-libs/aiosignal/compare/v1.3.2...v1.4.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiosignal&package-manager=pip&previous-version=1.3.2&new-version=1.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- (cherry picked from commit 9571860347ef7570549415358f0a29d72f0852c8) <!-- Thank you for your contribution! --> ## What do these changes do? <!-- Please give a short brief about these changes. --> ## Are there changes in behavior for the user? <!-- Outline any notable behaviour for the end users. --> ## Is it a substantial burden for the maintainers to support this? <!-- Stop right there! Pause. Just for a minute... Can you think of anything obvious that would complicate the ongoing development of this project? Try to consider if you'd be able to maintain it throughout the next 5 years. Does it seem viable? Tell us your thoughts! We'd very much love to hear what the consequences of merging this patch might be... This will help us assess if your change is something we'd want to entertain early in the review process. Thank you in advance! --> ## Related issue number <!-- Will this resolve any open issues? --> <!-- Remember to prefix with 'Fixes' if it closes an issue (e.g. 'Fixes #123'). --> ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `<issue_or_pr_num>.<type>.rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- aiohttp/tracing.py | 101 +++++++++++++++------------------- aiohttp/web_app.py | 4 +- requirements/base.txt | 8 ++- requirements/constraints.txt | 3 +- requirements/dev.txt | 3 +- requirements/runtime-deps.txt | 9 +-- requirements/test.txt | 3 +- 7 files changed, 61 insertions(+), 70 deletions(-) diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 012ed7bdaf6..568fa7f9e38 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -1,5 +1,5 @@ from types import SimpleNamespace -from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar +from typing import TYPE_CHECKING, Mapping, Optional, Type, TypeVar import attr from aiosignal import Signal @@ -12,14 +12,7 @@ from .client import ClientSession _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) - - class _SignalCallback(Protocol[_ParamT_contra]): - def __call__( - self, - __client_session: ClientSession, - __trace_config_ctx: SimpleNamespace, - __params: _ParamT_contra, - ) -> Awaitable[None]: ... + _TracingSignal = Signal[ClientSession, SimpleNamespace, _ParamT_contra] __all__ = ( @@ -49,54 +42,46 @@ class TraceConfig: def __init__( self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace ) -> None: - self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = ( + self._on_request_start: _TracingSignal[TraceRequestStartParams] = Signal(self) + self._on_request_chunk_sent: _TracingSignal[TraceRequestChunkSentParams] = ( Signal(self) ) - self._on_request_chunk_sent: Signal[ - _SignalCallback[TraceRequestChunkSentParams] - ] = Signal(self) - self._on_response_chunk_received: Signal[ - _SignalCallback[TraceResponseChunkReceivedParams] + self._on_response_chunk_received: _TracingSignal[ + TraceResponseChunkReceivedParams ] = Signal(self) - self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal( + self._on_request_end: _TracingSignal[TraceRequestEndParams] = Signal(self) + self._on_request_exception: _TracingSignal[TraceRequestExceptionParams] = ( + Signal(self) + ) + self._on_request_redirect: _TracingSignal[TraceRequestRedirectParams] = Signal( self ) - self._on_request_exception: Signal[ - _SignalCallback[TraceRequestExceptionParams] + self._on_connection_queued_start: _TracingSignal[ + TraceConnectionQueuedStartParams ] = Signal(self) - self._on_request_redirect: Signal[ - _SignalCallback[TraceRequestRedirectParams] + self._on_connection_queued_end: _TracingSignal[ + TraceConnectionQueuedEndParams ] = Signal(self) - self._on_connection_queued_start: Signal[ - _SignalCallback[TraceConnectionQueuedStartParams] + self._on_connection_create_start: _TracingSignal[ + TraceConnectionCreateStartParams ] = Signal(self) - self._on_connection_queued_end: Signal[ - _SignalCallback[TraceConnectionQueuedEndParams] + self._on_connection_create_end: _TracingSignal[ + TraceConnectionCreateEndParams ] = Signal(self) - self._on_connection_create_start: Signal[ - _SignalCallback[TraceConnectionCreateStartParams] + self._on_connection_reuseconn: _TracingSignal[ + TraceConnectionReuseconnParams ] = Signal(self) - self._on_connection_create_end: Signal[ - _SignalCallback[TraceConnectionCreateEndParams] + self._on_dns_resolvehost_start: _TracingSignal[ + TraceDnsResolveHostStartParams ] = Signal(self) - self._on_connection_reuseconn: Signal[ - _SignalCallback[TraceConnectionReuseconnParams] - ] = Signal(self) - self._on_dns_resolvehost_start: Signal[ - _SignalCallback[TraceDnsResolveHostStartParams] - ] = Signal(self) - self._on_dns_resolvehost_end: Signal[ - _SignalCallback[TraceDnsResolveHostEndParams] - ] = Signal(self) - self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = ( + self._on_dns_resolvehost_end: _TracingSignal[TraceDnsResolveHostEndParams] = ( Signal(self) ) - self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = ( + self._on_dns_cache_hit: _TracingSignal[TraceDnsCacheHitParams] = Signal(self) + self._on_dns_cache_miss: _TracingSignal[TraceDnsCacheMissParams] = Signal(self) + self._on_request_headers_sent: _TracingSignal[TraceRequestHeadersSentParams] = ( Signal(self) ) - self._on_request_headers_sent: Signal[ - _SignalCallback[TraceRequestHeadersSentParams] - ] = Signal(self) self._trace_config_ctx_factory = trace_config_ctx_factory @@ -125,91 +110,91 @@ def freeze(self) -> None: self._on_request_headers_sent.freeze() @property - def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": + def on_request_start(self) -> "_TracingSignal[TraceRequestStartParams]": return self._on_request_start @property def on_request_chunk_sent( self, - ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": + ) -> "_TracingSignal[TraceRequestChunkSentParams]": return self._on_request_chunk_sent @property def on_response_chunk_received( self, - ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": + ) -> "_TracingSignal[TraceResponseChunkReceivedParams]": return self._on_response_chunk_received @property - def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": + def on_request_end(self) -> "_TracingSignal[TraceRequestEndParams]": return self._on_request_end @property def on_request_exception( self, - ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": + ) -> "_TracingSignal[TraceRequestExceptionParams]": return self._on_request_exception @property def on_request_redirect( self, - ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": + ) -> "_TracingSignal[TraceRequestRedirectParams]": return self._on_request_redirect @property def on_connection_queued_start( self, - ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": + ) -> "_TracingSignal[TraceConnectionQueuedStartParams]": return self._on_connection_queued_start @property def on_connection_queued_end( self, - ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": + ) -> "_TracingSignal[TraceConnectionQueuedEndParams]": return self._on_connection_queued_end @property def on_connection_create_start( self, - ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": + ) -> "_TracingSignal[TraceConnectionCreateStartParams]": return self._on_connection_create_start @property def on_connection_create_end( self, - ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": + ) -> "_TracingSignal[TraceConnectionCreateEndParams]": return self._on_connection_create_end @property def on_connection_reuseconn( self, - ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": + ) -> "_TracingSignal[TraceConnectionReuseconnParams]": return self._on_connection_reuseconn @property def on_dns_resolvehost_start( self, - ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": + ) -> "_TracingSignal[TraceDnsResolveHostStartParams]": return self._on_dns_resolvehost_start @property def on_dns_resolvehost_end( self, - ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": + ) -> "_TracingSignal[TraceDnsResolveHostEndParams]": return self._on_dns_resolvehost_end @property - def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": + def on_dns_cache_hit(self) -> "_TracingSignal[TraceDnsCacheHitParams]": return self._on_dns_cache_hit @property - def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": + def on_dns_cache_miss(self) -> "_TracingSignal[TraceDnsCacheMissParams]": return self._on_dns_cache_miss @property def on_request_headers_sent( self, - ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]": + ) -> "_TracingSignal[TraceRequestHeadersSentParams]": return self._on_request_headers_sent diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 854f9bce88d..619c0085da1 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -62,8 +62,8 @@ if TYPE_CHECKING: - _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] - _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] + _AppSignal = Signal["Application"] + _RespPrepareSignal = Signal[Request, StreamResponse] _Middlewares = FrozenList[Middleware] _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]] _Subapps = List["Application"] diff --git a/requirements/base.txt b/requirements/base.txt index 2cd73f52418..74f528d67bc 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -8,7 +8,7 @@ aiodns==3.4.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in -aiosignal==1.3.2 +aiosignal==1.4.0 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in @@ -40,8 +40,10 @@ pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi -typing-extensions==4.13.2 - # via multidict +typing-extensions==4.14.0 + # via + # aiosignal + # multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" winloop==0.1.8; platform_system == "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 9bcdeb5ff8b..4457788efc0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in -aiosignal==1.3.2 +aiosignal==1.4.0 # via -r requirements/runtime-deps.in alabaster==1.0.0 # via sphinx @@ -266,6 +266,7 @@ trustme==1.2.1 ; platform_machine != "i686" # -r requirements/test.in typing-extensions==4.13.2 # via + # aiosignal # exceptiongroup # multidict # mypy diff --git a/requirements/dev.txt b/requirements/dev.txt index 26728928cee..c9ab0cb822b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.7 # via -r requirements/doc.in -aiosignal==1.3.2 +aiosignal==1.4.0 # via -r requirements/runtime-deps.in alabaster==1.0.0 # via sphinx @@ -257,6 +257,7 @@ trustme==1.2.1 ; platform_machine != "i686" # -r requirements/test.in typing-extensions==4.13.2 # via + # aiosignal # exceptiongroup # multidict # mypy diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 58263ab61ed..4dca87c1362 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -8,7 +8,7 @@ aiodns==3.4.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in -aiosignal==1.3.2 +aiosignal==1.4.0 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in @@ -36,7 +36,8 @@ pycares==4.8.0 # via aiodns pycparser==2.22 # via cffi -typing-extensions==4.13.2 - # via multidict -yarl==1.20.0 +typing-extensions==4.14.0 + # via + # aiosignal + # multidict # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 007852dbcaa..b1ff140b7cc 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ aiodns==3.4.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in -aiosignal==1.3.2 +aiosignal==1.4.0 # via -r requirements/runtime-deps.in annotated-types==0.7.0 # via pydantic @@ -129,6 +129,7 @@ trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test.in typing-extensions==4.13.2 # via + # aiosignal # exceptiongroup # multidict # mypy From 03893711d35f3588a7e8891ffbf2b5a6d3319fae Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 7 Jul 2025 21:31:55 +0100 Subject: [PATCH 1507/1511] [PR #11280/91108c90 backport][3.12] Bump the minimum supported version of aiosignal to 1.4 (#11281) **This is a backport of PR #11280 as merged into master (91108c905f6265bd19e8d1aafbaf2826a33180d2).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/11280.misc.rst | 1 + requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/11280.misc.rst diff --git a/CHANGES/11280.misc.rst b/CHANGES/11280.misc.rst new file mode 100644 index 00000000000..6750918bda7 --- /dev/null +++ b/CHANGES/11280.misc.rst @@ -0,0 +1 @@ +Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`. diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 7b0382a7a2b..d748eab9fac 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -2,7 +2,7 @@ aiodns >= 3.3.0 aiohappyeyeballs >= 2.5.0 -aiosignal >= 1.1.2 +aiosignal >= 1.4.0 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 Brotli; platform_python_implementation == 'CPython' diff --git a/setup.cfg b/setup.cfg index 4adfde579a0..1f70301856b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -50,7 +50,7 @@ include_package_data = True install_requires = aiohappyeyeballs >= 2.5.0 - aiosignal >= 1.1.2 + aiosignal >= 1.4.0 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 frozenlist >= 1.1.1 From e8d774f635dc6d1cd3174d0e38891da5de0e2b6a Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Wed, 9 Jul 2025 19:55:22 +0100 Subject: [PATCH 1508/1511] Add trailer parsing logic (#11269) (#11287) (cherry picked from commit 7dd4b5535e6bf9c2d2f05fde638517bff065ba74) --- CHANGES/11269.feature.rst | 1 + aiohttp/http_parser.py | 70 +++++++++--------- aiohttp/multipart.py | 2 +- tests/test_http_parser.py | 148 ++++++++++++++++---------------------- 4 files changed, 100 insertions(+), 121 deletions(-) create mode 100644 CHANGES/11269.feature.rst diff --git a/CHANGES/11269.feature.rst b/CHANGES/11269.feature.rst new file mode 100644 index 00000000000..92cf173be14 --- /dev/null +++ b/CHANGES/11269.feature.rst @@ -0,0 +1 @@ +Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index db61ab5264c..9f864b27876 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -142,8 +142,8 @@ def parse_headers( # note: "raw" does not mean inclusion of OWS before/after the field value raw_headers = [] - lines_idx = 1 - line = lines[1] + lines_idx = 0 + line = lines[lines_idx] line_count = len(lines) while line: @@ -400,6 +400,7 @@ def get_content_length() -> Optional[int]: response_with_body=self.response_with_body, auto_decompress=self._auto_decompress, lax=self.lax, + headers_parser=self._headers_parser, ) if not payload_parser.done: self._payload_parser = payload_parser @@ -418,6 +419,7 @@ def get_content_length() -> Optional[int]: compression=msg.compression, auto_decompress=self._auto_decompress, lax=self.lax, + headers_parser=self._headers_parser, ) elif not empty_body and length is None and self.read_until_eof: payload = StreamReader( @@ -436,6 +438,7 @@ def get_content_length() -> Optional[int]: response_with_body=self.response_with_body, auto_decompress=self._auto_decompress, lax=self.lax, + headers_parser=self._headers_parser, ) if not payload_parser.done: self._payload_parser = payload_parser @@ -473,6 +476,10 @@ def get_content_length() -> Optional[int]: eof = True data = b"" + if isinstance( + underlying_exc, (InvalidHeader, TransferEncodingError) + ): + raise if eof: start_pos = 0 @@ -635,7 +642,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: compression, upgrade, chunked, - ) = self.parse_headers(lines) + ) = self.parse_headers(lines[1:]) if close is None: # then the headers weren't set in the request if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close @@ -721,7 +728,7 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: compression, upgrade, chunked, - ) = self.parse_headers(lines) + ) = self.parse_headers(lines[1:]) if close is None: if version_o <= HttpVersion10: @@ -764,6 +771,8 @@ def __init__( response_with_body: bool = True, auto_decompress: bool = True, lax: bool = False, + *, + headers_parser: HeadersParser, ) -> None: self._length = 0 self._type = ParseState.PARSE_UNTIL_EOF @@ -772,6 +781,8 @@ def __init__( self._chunk_tail = b"" self._auto_decompress = auto_decompress self._lax = lax + self._headers_parser = headers_parser + self._trailer_lines: list[bytes] = [] self.done = False # payload decompression wrapper @@ -848,7 +859,7 @@ def feed_data( size_b = chunk[:i] # strip chunk-extensions # Verify no LF in the chunk-extension if b"\n" in (ext := chunk[i:pos]): - exc = BadHttpMessage( + exc = TransferEncodingError( f"Unexpected LF in chunk-extension: {ext!r}" ) set_exception(self.payload, exc) @@ -869,7 +880,7 @@ def feed_data( chunk = chunk[pos + len(SEP) :] if size == 0: # eof marker - self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + self._chunk = ChunkState.PARSE_TRAILERS if self._lax and chunk.startswith(b"\r"): chunk = chunk[1:] else: @@ -907,38 +918,31 @@ def feed_data( self._chunk_tail = chunk return False, b"" - # if stream does not contain trailer, after 0\r\n - # we should get another \r\n otherwise - # trailers needs to be skipped until \r\n\r\n - if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: - head = chunk[: len(SEP)] - if head == SEP: - # end of stream - self.payload.feed_eof() - return True, chunk[len(SEP) :] - # Both CR and LF, or only LF may not be received yet. It is - # expected that CRLF or LF will be shown at the very first - # byte next time, otherwise trailers should come. The last - # CRLF which marks the end of response might not be - # contained in the same TCP segment which delivered the - # size indicator. - if not head: - return False, b"" - if head == SEP[:1]: - self._chunk_tail = head - return False, b"" - self._chunk = ChunkState.PARSE_TRAILERS - - # read and discard trailer up to the CRLF terminator if self._chunk == ChunkState.PARSE_TRAILERS: pos = chunk.find(SEP) - if pos >= 0: - chunk = chunk[pos + len(SEP) :] - self._chunk = ChunkState.PARSE_MAYBE_TRAILERS - else: + if pos < 0: # No line found self._chunk_tail = chunk return False, b"" + line = chunk[:pos] + chunk = chunk[pos + len(SEP) :] + if SEP == b"\n": # For lax response parsing + line = line.rstrip(b"\r") + self._trailer_lines.append(line) + + # \r\n\r\n found, end of stream + if self._trailer_lines[-1] == b"": + # Headers and trailers are defined the same way, + # so we reuse the HeadersParser here. + try: + trailers, raw_trailers = self._headers_parser.parse_headers( + self._trailer_lines + ) + finally: + self._trailer_lines.clear() + self.payload.feed_eof() + return True, chunk + # Read all bytes until eof elif self._type == ParseState.PARSE_UNTIL_EOF: self.payload.feed_data(chunk, len(chunk)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 79f8481ee30..02605146720 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -777,7 +777,7 @@ async def _read_boundary(self) -> None: raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") async def _read_headers(self) -> "CIMultiDictProxy[str]": - lines = [b""] + lines = [] while True: chunk = await self._content.readline() chunk = chunk.strip() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 58fef625f82..385452c1cfb 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -17,6 +17,7 @@ from aiohttp.http_parser import ( NO_EXTENSIONS, DeflateBuffer, + HeadersParser, HttpPayloadParser, HttpRequestParser, HttpRequestParserPy, @@ -244,41 +245,13 @@ def test_content_length_transfer_encoding(parser: Any) -> None: parser.feed_data(text) -def test_bad_chunked_py(loop: Any, protocol: Any) -> None: +def test_bad_chunked(parser: HttpRequestParser) -> None: """Test that invalid chunked encoding doesn't allow content-length to be used.""" - parser = HttpRequestParserPy( - protocol, - loop, - 2**16, - max_line_size=8190, - max_field_size=8190, - ) - text = ( - b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n" - + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n" - ) - messages, upgrade, tail = parser.feed_data(text) - assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError) - - -@pytest.mark.skipif( - "HttpRequestParserC" not in dir(aiohttp.http_parser), - reason="C based HTTP parser not available", -) -def test_bad_chunked_c(loop: Any, protocol: Any) -> None: - """C parser behaves differently. Maybe we should align them later.""" - parser = HttpRequestParserC( - protocol, - loop, - 2**16, - max_line_size=8190, - max_field_size=8190, - ) text = ( b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n" + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n" ) - with pytest.raises(http_exceptions.BadHttpMessage): + with pytest.raises(http_exceptions.BadHttpMessage, match="0_2e"): parser.feed_data(text) @@ -1158,8 +1131,8 @@ async def test_http_response_parser_bad_chunked_strict_py(loop, protocol) -> Non text = ( b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n" ) - messages, upgrade, tail = response.feed_data(text) - assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError) + with pytest.raises(http_exceptions.TransferEncodingError, match="5"): + response.feed_data(text) @pytest.mark.dev_mode @@ -1295,7 +1268,27 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None: assert payload.is_eof() -def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: Any): +async def test_request_chunked_with_trailer(parser: HttpRequestParser) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ntest\r\n0\r\ntest: trailer\r\nsecond: test trailer\r\n\r\n" + messages, upgraded, tail = parser.feed_data(text) + assert not tail + msg, payload = messages[0] + assert await payload.read() == b"test" + + # TODO: Add assertion of trailers when API added. + + +async def test_request_chunked_reject_bad_trailer(parser: HttpRequestParser) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n0\r\nbad\ntrailer\r\n\r\n" + with pytest.raises(http_exceptions.BadHttpMessage, match=r"b'bad\\ntrailer'"): + parser.feed_data(text) + + +def test_parse_no_length_or_te_on_post( + loop: asyncio.AbstractEventLoop, + protocol: BaseProtocol, + request_cls: type[HttpRequestParser], +) -> None: parser = request_cls(protocol, loop, limit=2**16) text = b"POST /test HTTP/1.1\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1478,19 +1471,10 @@ async def test_parse_chunked_payload_split_chunks(response: Any) -> None: assert await reader.read() == b"firstsecond" -@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") -async def test_parse_chunked_payload_with_lf_in_extensions_c_parser( - loop: asyncio.AbstractEventLoop, protocol: BaseProtocol +async def test_parse_chunked_payload_with_lf_in_extensions( + parser: HttpRequestParser, ) -> None: - """Test the C-parser with a chunked payload that has a LF in the chunk extensions.""" - # The C parser will raise a BadHttpMessage from feed_data - parser = HttpRequestParserC( - protocol, - loop, - 2**16, - max_line_size=8190, - max_field_size=8190, - ) + """Test chunked payload that has a LF in the chunk extensions.""" payload = ( b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n" b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n" @@ -1501,31 +1485,6 @@ async def test_parse_chunked_payload_with_lf_in_extensions_c_parser( parser.feed_data(payload) -async def test_parse_chunked_payload_with_lf_in_extensions_py_parser( - loop: asyncio.AbstractEventLoop, protocol: BaseProtocol -) -> None: - """Test the py-parser with a chunked payload that has a LF in the chunk extensions.""" - # The py parser will not raise the BadHttpMessage directly, but instead - # it will set the exception on the StreamReader. - parser = HttpRequestParserPy( - protocol, - loop, - 2**16, - max_line_size=8190, - max_field_size=8190, - ) - payload = ( - b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n" - b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n" - b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n" - b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n" - ) - messages, _, _ = parser.feed_data(payload) - reader = messages[0][1] - assert isinstance(reader.exception(), http_exceptions.BadHttpMessage) - assert "\\nxx" in str(reader.exception()) - - def test_partial_url(parser: HttpRequestParser) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 @@ -1612,7 +1571,7 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol): class TestParsePayload: async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out) + p = HttpPayloadParser(out, headers_parser=HeadersParser()) p.feed_data(b"data") p.feed_eof() @@ -1622,7 +1581,7 @@ async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None: async def test_parse_length_payload_eof(self, protocol: BaseProtocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=4) + p = HttpPayloadParser(out, length=4, headers_parser=HeadersParser()) p.feed_data(b"da") with pytest.raises(http_exceptions.ContentLengthError): @@ -1632,7 +1591,7 @@ async def test_parse_chunked_payload_size_error( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) with pytest.raises(http_exceptions.TransferEncodingError): p.feed_data(b"blah\r\n") assert isinstance(out.exception(), http_exceptions.TransferEncodingError) @@ -1641,7 +1600,7 @@ async def test_parse_chunked_payload_split_end( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) p.feed_data(b"4\r\nasdf\r\n0\r\n") p.feed_data(b"\r\n") @@ -1652,7 +1611,7 @@ async def test_parse_chunked_payload_split_end2( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) p.feed_data(b"4\r\nasdf\r\n0\r\n\r") p.feed_data(b"\n") @@ -1663,7 +1622,7 @@ async def test_parse_chunked_payload_split_end_trailers( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) p.feed_data(b"4\r\nasdf\r\n0\r\n") p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n") p.feed_data(b"\r\n") @@ -1675,7 +1634,7 @@ async def test_parse_chunked_payload_split_end_trailers2( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) p.feed_data(b"4\r\nasdf\r\n0\r\n") p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r") p.feed_data(b"\n") @@ -1687,7 +1646,7 @@ async def test_parse_chunked_payload_split_end_trailers3( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) p.feed_data(b"4\r\nasdf\r\n0\r\nContent-MD5: ") p.feed_data(b"912ec803b2ce49e4a541068d495ab570\r\n\r\n") @@ -1698,7 +1657,7 @@ async def test_parse_chunked_payload_split_end_trailers4( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, chunked=True) + p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser()) p.feed_data(b"4\r\nasdf\r\n0\r\nC") p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n") @@ -1707,7 +1666,7 @@ async def test_parse_chunked_payload_split_end_trailers4( async def test_http_payload_parser_length(self, protocol: BaseProtocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=2) + p = HttpPayloadParser(out, length=2, headers_parser=HeadersParser()) eof, tail = p.feed_data(b"1245") assert eof @@ -1720,7 +1679,9 @@ async def test_http_payload_parser_deflate(self, protocol: BaseProtocol) -> None length = len(COMPRESSED) out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=length, compression="deflate") + p = HttpPayloadParser( + out, length=length, compression="deflate", headers_parser=HeadersParser() + ) p.feed_data(COMPRESSED) assert b"data" == out._buffer[0] assert out.is_eof() @@ -1734,7 +1695,9 @@ async def test_http_payload_parser_deflate_no_hdrs( length = len(COMPRESSED) out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=length, compression="deflate") + p = HttpPayloadParser( + out, length=length, compression="deflate", headers_parser=HeadersParser() + ) p.feed_data(COMPRESSED) assert b"data" == out._buffer[0] assert out.is_eof() @@ -1747,7 +1710,9 @@ async def test_http_payload_parser_deflate_light( length = len(COMPRESSED) out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=length, compression="deflate") + p = HttpPayloadParser( + out, length=length, compression="deflate", headers_parser=HeadersParser() + ) p.feed_data(COMPRESSED) assert b"data" == out._buffer[0] @@ -1757,7 +1722,9 @@ async def test_http_payload_parser_deflate_split( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, compression="deflate") + p = HttpPayloadParser( + out, compression="deflate", headers_parser=HeadersParser() + ) # Feeding one correct byte should be enough to choose exact # deflate decompressor p.feed_data(b"x") @@ -1769,7 +1736,9 @@ async def test_http_payload_parser_deflate_split_err( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, compression="deflate") + p = HttpPayloadParser( + out, compression="deflate", headers_parser=HeadersParser() + ) # Feeding one wrong byte should be enough to choose exact # deflate decompressor p.feed_data(b"K") @@ -1781,7 +1750,7 @@ async def test_http_payload_parser_length_zero( self, protocol: BaseProtocol ) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=0) + p = HttpPayloadParser(out, length=0, headers_parser=HeadersParser()) assert p.done assert out.is_eof() @@ -1789,7 +1758,12 @@ async def test_http_payload_parser_length_zero( async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None: compressed = brotli.compress(b"brotli data") out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) - p = HttpPayloadParser(out, length=len(compressed), compression="br") + p = HttpPayloadParser( + out, + length=len(compressed), + compression="br", + headers_parser=HeadersParser(), + ) p.feed_data(compressed) assert b"brotli data" == out._buffer[0] assert out.is_eof() From edf2abd2609a24cf1e7ac76da986af363aebf210 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 00:42:05 +0100 Subject: [PATCH 1509/1511] [PR #11289/e38220fc backport][3.12] Fix ClientSession.close() hanging with HTTPS proxy connections (#11291) **This is a backport of PR #11289 as merged into master (e38220fc4ed59c9de0dbe23da48e9cfd287c2ed7).** --------- Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11273.bugfix.rst | 1 + aiohttp/connector.py | 22 ++++++++++++++++- tests/test_connector.py | 29 ++++++++++++++++++++++ tests/test_proxy_functional.py | 45 ++++++++++++++++++++++++++++++++++ 4 files changed, 96 insertions(+), 1 deletion(-) create mode 100644 CHANGES/11273.bugfix.rst diff --git a/CHANGES/11273.bugfix.rst b/CHANGES/11273.bugfix.rst new file mode 100644 index 00000000000..b4d9948fbcd --- /dev/null +++ b/CHANGES/11273.bugfix.rst @@ -0,0 +1 @@ +Fixed :py:meth:`ClientSession.close() <aiohttp.ClientSession.close>` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 4479ae321bc..0fbacde3b42 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -229,6 +229,26 @@ def closed(self) -> bool: return self._protocol is None or not self._protocol.is_connected() +class _ConnectTunnelConnection(Connection): + """Special connection wrapper for CONNECT tunnels that must never be pooled. + + This connection wraps the proxy connection that will be upgraded with TLS. + It must never be released to the pool because: + 1. Its 'closed' future will never complete, causing session.close() to hang + 2. It represents an intermediate state, not a reusable connection + 3. The real connection (with TLS) will be created separately + """ + + def release(self) -> None: + """Do nothing - don't pool or close the connection. + + These connections are an intermediate state during the CONNECT tunnel + setup and will be cleaned up naturally after the TLS upgrade. If they + were to be pooled, they would never be properly closed, causing + session.close() to wait forever for their 'closed' future. + """ + + class _TransportPlaceholder: """placeholder for BaseConnector.connect function""" @@ -1612,7 +1632,7 @@ async def _create_proxy_connection( key = req.connection_key._replace( proxy=None, proxy_auth=None, proxy_headers_hash=None ) - conn = Connection(self, key, proto, self._loop) + conn = _ConnectTunnelConnection(self, key, proto, self._loop) proxy_resp = await proxy_req.send(conn) try: protocol = conn._protocol diff --git a/tests/test_connector.py b/tests/test_connector.py index c7938ed08e4..9932dee581b 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -40,6 +40,7 @@ AddrInfoType, Connection, TCPConnector, + _ConnectTunnelConnection, _DNSCacheTable, ) from aiohttp.resolver import ResolveResult @@ -4311,3 +4312,31 @@ async def test_available_connections_no_limits( connection1.close() assert conn._available_connections(key) == 1 assert conn._available_connections(other_host_key2) == 1 + + +async def test_connect_tunnel_connection_release( + loop: asyncio.AbstractEventLoop, +) -> None: + """Test _ConnectTunnelConnection.release() does not pool the connection.""" + connector = mock.create_autospec( + aiohttp.BaseConnector, spec_set=True, instance=True + ) + key = mock.create_autospec(ConnectionKey, spec_set=True, instance=True) + protocol = mock.create_autospec(ResponseHandler, spec_set=True, instance=True) + + # Create a connect tunnel connection + conn = _ConnectTunnelConnection(connector, key, protocol, loop) + + # Verify protocol is set + assert conn._protocol is protocol + + # Release should do nothing (not pool the connection) + conn.release() + + # Protocol should still be there (not released to pool) + assert conn._protocol is protocol + # Connector._release should NOT have been called + connector._release.assert_not_called() + + # Clean up to avoid resource warning + conn.close() diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 5b33ed6ca3b..f4bc020d1f0 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -4,6 +4,7 @@ import platform import ssl import sys +from contextlib import suppress from re import match as match_regex from typing import Awaitable, Callable from unittest import mock @@ -17,6 +18,7 @@ from aiohttp import ClientResponse, web from aiohttp.client_exceptions import ClientConnectionError from aiohttp.helpers import IS_MACOS, IS_WINDOWS +from aiohttp.pytest_plugin import AiohttpServer ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11) @@ -884,3 +886,46 @@ async def test_proxy_auth() -> None: proxy_auth=("user", "pass"), ): pass + + +async def test_https_proxy_connect_tunnel_session_close_no_hang( + aiohttp_server: AiohttpServer, +) -> None: + """Test that CONNECT tunnel connections are not pooled.""" + # Regression test for issue #11273. + + # Create a minimal proxy server + # The CONNECT method is handled at the protocol level, not by the handler + proxy_app = web.Application() + proxy_server = await aiohttp_server(proxy_app) + proxy_url = f"http://{proxy_server.host}:{proxy_server.port}" + + # Create session and make HTTPS request through proxy + session = aiohttp.ClientSession() + + try: + # This will fail during TLS upgrade because proxy doesn't establish tunnel + with suppress(aiohttp.ClientError): + async with session.get("https://example.com/test", proxy=proxy_url) as resp: + await resp.read() + + # The critical test: Check if any connections were pooled with proxy=None + # This is the root cause of the hang - CONNECT tunnel connections + # should NOT be pooled + connector = session.connector + assert connector is not None + + # Count connections with proxy=None in the pool + proxy_none_keys = [key for key in connector._conns if key.proxy is None] + proxy_none_count = len(proxy_none_keys) + + # Before the fix, there would be a connection with proxy=None + # After the fix, CONNECT tunnel connections are not pooled + assert proxy_none_count == 0, ( + f"Found {proxy_none_count} connections with proxy=None in pool. " + f"CONNECT tunnel connections should not be pooled - this is bug #11273" + ) + + finally: + # Clean close + await session.close() From 13b20a1b0af87b86816355a9090de191723858fc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:41:06 +0100 Subject: [PATCH 1510/1511] [PR #11290/16703bb9 backport][3.12] Fix file uploads failing with HTTP 422 on 307/308 redirects (#11296) **This is a backport of PR #11290 as merged into master (16703bb955ae4a11a131cedbbbf3ec7aa55f4bb4).** --------- Co-authored-by: J. Nick Koston <nick@koston.org> --- CHANGES/11270.bugfix.rst | 1 + aiohttp/client.py | 6 + aiohttp/payload.py | 31 ++++- tests/test_client_functional.py | 225 ++++++++++++++++++++++++++++++++ tests/test_payload.py | 76 +++++++++++ 5 files changed, 335 insertions(+), 4 deletions(-) create mode 100644 CHANGES/11270.bugfix.rst diff --git a/CHANGES/11270.bugfix.rst b/CHANGES/11270.bugfix.rst new file mode 100644 index 00000000000..d1e0992b949 --- /dev/null +++ b/CHANGES/11270.bugfix.rst @@ -0,0 +1 @@ +Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 26492cd15fe..0c72d5948ce 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -821,6 +821,12 @@ async def _connect_and_send_request( data = None if headers.get(hdrs.CONTENT_LENGTH): headers.pop(hdrs.CONTENT_LENGTH) + else: + # For 307/308, always preserve the request body + # For 301/302 with non-POST methods, preserve the request body + # https://www.rfc-editor.org/rfc/rfc9110#section-15.4.3-3.1 + # Use the existing payload to avoid recreating it from a potentially consumed file + data = req._body r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( hdrs.URI diff --git a/aiohttp/payload.py b/aiohttp/payload.py index d119d9beefc..3affa710b63 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -486,10 +486,14 @@ def _set_or_restore_start_position(self) -> None: if self._start_position is None: try: self._start_position = self._value.tell() - except OSError: + except (OSError, AttributeError): self._consumed = True # Cannot seek, mark as consumed return - self._value.seek(self._start_position) + try: + self._value.seek(self._start_position) + except (OSError, AttributeError): + # Failed to seek back - mark as consumed since we've already read + self._consumed = True def _read_and_available_len( self, remaining_content_len: Optional[int] @@ -540,11 +544,30 @@ def size(self) -> Optional[int]: """ Size of the payload in bytes. - Returns the number of bytes remaining to be read from the file. + Returns the total size of the payload content from the initial position. + This ensures consistent Content-Length for requests, including 307/308 redirects + where the same payload instance is reused. + Returns None if the size cannot be determined (e.g., for unseekable streams). """ try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() + # Store the start position on first access. + # This is critical when the same payload instance is reused (e.g., 307/308 + # redirects). Without storing the initial position, after the payload is + # read once, the file position would be at EOF, which would cause the + # size calculation to return 0 (file_size - EOF position). + # By storing the start position, we ensure the size calculation always + # returns the correct total size for any subsequent use. + if self._start_position is None: + try: + self._start_position = self._value.tell() + except (OSError, AttributeError): + # Can't get position, can't determine size + return None + + # Return the total size from the start position + # This ensures Content-Length is correct even after reading + return os.fstat(self._value.fileno()).st_size - self._start_position except (AttributeError, OSError): return None diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 08cc5c97538..230d47389c5 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -5286,3 +5286,228 @@ async def handler(request: web.Request) -> web.Response: assert ( len(resp._raw_cookie_headers) == 12 ), "All raw headers should be preserved" + + +@pytest.mark.parametrize("status", (307, 308)) +async def test_file_upload_307_308_redirect( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path, status: int +) -> None: + """Test that file uploads work correctly with 307/308 redirects. + + This demonstrates the bug where file payloads get incorrect Content-Length + on redirect because the file position isn't reset. + """ + received_bodies: list[bytes] = [] + + async def handler(request: web.Request) -> web.Response: + # Store the body content + body = await request.read() + received_bodies.append(body) + + if str(request.url.path).endswith("/"): + # Redirect URLs ending with / to remove the trailing slash + return web.Response( + status=status, + headers={ + "Location": str(request.url.with_path(request.url.path.rstrip("/"))) + }, + ) + + # Return success with the body size + return web.json_response( + { + "received_size": len(body), + "content_length": request.headers.get("Content-Length"), + } + ) + + app = web.Application() + app.router.add_post("/upload/", handler) + app.router.add_post("/upload", handler) + + client = await aiohttp_client(app) + + # Create a test file + test_file = tmp_path / f"test_upload_{status}.txt" + content = b"This is test file content for upload." + await asyncio.to_thread(test_file.write_bytes, content) + expected_size = len(content) + + # Upload file to URL with trailing slash (will trigger redirect) + f = await asyncio.to_thread(open, test_file, "rb") + try: + async with client.post("/upload/", data=f) as resp: + assert resp.status == 200 + result = await resp.json() + + # The server should receive the full file content + assert result["received_size"] == expected_size + assert result["content_length"] == str(expected_size) + + # Both requests should have received the same content + assert len(received_bodies) == 2 + assert received_bodies[0] == content # First request + assert received_bodies[1] == content # After redirect + finally: + await asyncio.to_thread(f.close) + + +@pytest.mark.parametrize("status", [301, 302]) +@pytest.mark.parametrize("method", ["PUT", "PATCH", "DELETE"]) +async def test_file_upload_301_302_redirect_non_post( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path, status: int, method: str +) -> None: + """Test that file uploads work correctly with 301/302 redirects for non-POST methods. + + Per RFC 9110, 301/302 redirects should preserve the method and body for non-POST requests. + """ + received_bodies: list[bytes] = [] + + async def handler(request: web.Request) -> web.Response: + # Store the body content + body = await request.read() + received_bodies.append(body) + + if str(request.url.path).endswith("/"): + # Redirect URLs ending with / to remove the trailing slash + return web.Response( + status=status, + headers={ + "Location": str(request.url.with_path(request.url.path.rstrip("/"))) + }, + ) + + # Return success with the body size + return web.json_response( + { + "method": request.method, + "received_size": len(body), + "content_length": request.headers.get("Content-Length"), + } + ) + + app = web.Application() + app.router.add_route(method, "/upload/", handler) + app.router.add_route(method, "/upload", handler) + + client = await aiohttp_client(app) + + # Create a test file + test_file = tmp_path / f"test_upload_{status}_{method.lower()}.txt" + content = f"Test {method} file content for {status} redirect.".encode() + await asyncio.to_thread(test_file.write_bytes, content) + expected_size = len(content) + + # Upload file to URL with trailing slash (will trigger redirect) + f = await asyncio.to_thread(open, test_file, "rb") + try: + async with client.request(method, "/upload/", data=f) as resp: + assert resp.status == 200 + result = await resp.json() + + # The server should receive the full file content after redirect + assert result["method"] == method # Method should be preserved + assert result["received_size"] == expected_size + assert result["content_length"] == str(expected_size) + + # Both requests should have received the same content + assert len(received_bodies) == 2 + assert received_bodies[0] == content # First request + assert received_bodies[1] == content # After redirect + finally: + await asyncio.to_thread(f.close) + + +async def test_file_upload_307_302_redirect_chain( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: + """Test that file uploads work correctly with 307->302->200 redirect chain. + + This verifies that: + 1. 307 preserves POST method and file body + 2. 302 changes POST to GET and drops the body + 3. No body leaks to the final GET request + """ + received_requests: list[dict[str, Any]] = [] + + async def handler(request: web.Request) -> web.Response: + # Store request details + body = await request.read() + received_requests.append( + { + "path": str(request.url.path), + "method": request.method, + "body_size": len(body), + "content_length": request.headers.get("Content-Length"), + } + ) + + if request.url.path == "/upload307": + # First redirect: 307 should preserve method and body + return web.Response(status=307, headers={"Location": "/upload302"}) + elif request.url.path == "/upload302": + # Second redirect: 302 should change POST to GET + return web.Response(status=302, headers={"Location": "/final"}) + else: + # Final destination + return web.json_response( + { + "final_method": request.method, + "final_body_size": len(body), + "requests_received": len(received_requests), + } + ) + + app = web.Application() + app.router.add_route("*", "/upload307", handler) + app.router.add_route("*", "/upload302", handler) + app.router.add_route("*", "/final", handler) + + client = await aiohttp_client(app) + + # Create a test file + test_file = tmp_path / "test_redirect_chain.txt" + content = b"Test file content that should not leak to GET request" + await asyncio.to_thread(test_file.write_bytes, content) + expected_size = len(content) + + # Upload file to URL that triggers 307->302->final redirect chain + f = await asyncio.to_thread(open, test_file, "rb") + try: + async with client.post("/upload307", data=f) as resp: + assert resp.status == 200 + result = await resp.json() + + # Verify the redirect chain + assert len(resp.history) == 2 + assert resp.history[0].status == 307 + assert resp.history[1].status == 302 + + # Verify final request is GET with no body + assert result["final_method"] == "GET" + assert result["final_body_size"] == 0 + assert result["requests_received"] == 3 + + # Verify the request sequence + assert len(received_requests) == 3 + + # First request (307): POST with full body + assert received_requests[0]["path"] == "/upload307" + assert received_requests[0]["method"] == "POST" + assert received_requests[0]["body_size"] == expected_size + assert received_requests[0]["content_length"] == str(expected_size) + + # Second request (302): POST with preserved body from 307 + assert received_requests[1]["path"] == "/upload302" + assert received_requests[1]["method"] == "POST" + assert received_requests[1]["body_size"] == expected_size + assert received_requests[1]["content_length"] == str(expected_size) + + # Third request (final): GET with no body (302 changed method and dropped body) + assert received_requests[2]["path"] == "/final" + assert received_requests[2]["method"] == "GET" + assert received_requests[2]["body_size"] == 0 + assert received_requests[2]["content_length"] is None + + finally: + await asyncio.to_thread(f.close) diff --git a/tests/test_payload.py b/tests/test_payload.py index 2fd0a0f60d9..e749881cc82 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -1278,3 +1278,79 @@ def open_file() -> TextIO: assert len(writer.buffer) == utf16_file_size finally: await loop.run_in_executor(None, f.close) + + +async def test_iobase_payload_size_after_reading(tmp_path: Path) -> None: + """Test that IOBasePayload.size returns correct size after file has been read. + + This demonstrates the bug where size calculation doesn't account for + the current file position, causing issues with 307/308 redirects. + """ + # Create a test file with known content + test_file = tmp_path / "test.txt" + content = b"Hello, World! This is test content." + await asyncio.to_thread(test_file.write_bytes, content) + expected_size = len(content) + + # Open the file and create payload + f = await asyncio.to_thread(open, test_file, "rb") + try: + p = payload.BufferedReaderPayload(f) + + # First size check - should return full file size + assert p.size == expected_size + + # Read the file (simulating first request) + writer = BufferWriter() + await p.write(writer) + assert len(writer.buffer) == expected_size + + # Second size check - should still return full file size + # but currently returns 0 because file position is at EOF + assert p.size == expected_size # This assertion fails! + + # Attempting to write again should write the full content + # but currently writes nothing because file is at EOF + writer2 = BufferWriter() + await p.write(writer2) + assert len(writer2.buffer) == expected_size # This also fails! + finally: + await asyncio.to_thread(f.close) + + +async def test_iobase_payload_size_unseekable() -> None: + """Test that IOBasePayload.size returns None for unseekable files.""" + + class UnseekableFile: + """Mock file object that doesn't support seeking.""" + + def __init__(self, content: bytes) -> None: + self.content = content + self.pos = 0 + + def read(self, size: int) -> bytes: + result = self.content[self.pos : self.pos + size] + self.pos += len(result) + return result + + def tell(self) -> int: + raise OSError("Unseekable file") + + content = b"Unseekable content" + f = UnseekableFile(content) + p = payload.IOBasePayload(f) # type: ignore[arg-type] + + # Size should return None for unseekable files + assert p.size is None + + # Payload should not be consumed before writing + assert p.consumed is False + + # Writing should still work + writer = BufferWriter() + await p.write(writer) + assert writer.buffer == content + + # For unseekable files that can't tell() or seek(), + # they are marked as consumed after the first write + assert p.consumed is True From 90b6cf6f3e303309db6d388f1e53d0f30997e1c8 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 10 Jul 2025 13:08:46 +0100 Subject: [PATCH 1511/1511] Release 3.12.14 (#11298) --- CHANGES.rst | 59 +++++++++++++++++++++++++++++++++++++++ CHANGES/11234.doc.rst | 2 -- CHANGES/11269.feature.rst | 1 - CHANGES/11270.bugfix.rst | 1 - CHANGES/11273.bugfix.rst | 1 - CHANGES/11280.misc.rst | 1 - aiohttp/__init__.py | 2 +- 7 files changed, 60 insertions(+), 7 deletions(-) delete mode 100644 CHANGES/11234.doc.rst delete mode 100644 CHANGES/11269.feature.rst delete mode 100644 CHANGES/11270.bugfix.rst delete mode 100644 CHANGES/11273.bugfix.rst delete mode 100644 CHANGES/11280.misc.rst diff --git a/CHANGES.rst b/CHANGES.rst index d991d99cf5d..c701167b33a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,65 @@ .. towncrier release notes start +3.12.14 (2025-07-10) +==================== + +Bug fixes +--------- + +- Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11270`. + + + +- Fixed :py:meth:`ClientSession.close() <aiohttp.ClientSession.close>` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11273`. + + + +- Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`11280`. + + + + +Features +-------- + +- Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`11269`. + + + + +Improved documentation +---------------------- + +- Clarified exceptions raised by ``WebSocketResponse.send_frame`` et al. + -- by :user:`DoctorJohn`. + + + *Related issues and pull requests on GitHub:* + :issue:`11234`. + + + + +---- + + 3.12.13 (2025-06-14) ==================== diff --git a/CHANGES/11234.doc.rst b/CHANGES/11234.doc.rst deleted file mode 100644 index 900b56a771c..00000000000 --- a/CHANGES/11234.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Clarified exceptions raised by ``WebSocketResponse.send_frame`` at al. --- by :user:`DoctorJohn`. diff --git a/CHANGES/11269.feature.rst b/CHANGES/11269.feature.rst deleted file mode 100644 index 92cf173be14..00000000000 --- a/CHANGES/11269.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/11270.bugfix.rst b/CHANGES/11270.bugfix.rst deleted file mode 100644 index d1e0992b949..00000000000 --- a/CHANGES/11270.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`. diff --git a/CHANGES/11273.bugfix.rst b/CHANGES/11273.bugfix.rst deleted file mode 100644 index b4d9948fbcd..00000000000 --- a/CHANGES/11273.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed :py:meth:`ClientSession.close() <aiohttp.ClientSession.close>` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`. diff --git a/CHANGES/11280.misc.rst b/CHANGES/11280.misc.rst deleted file mode 100644 index 6750918bda7..00000000000 --- a/CHANGES/11280.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index cc73fcc2c8e..a3ab781e984 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.12.14.dev0" +__version__ = "3.12.14" from typing import TYPE_CHECKING, Tuple